Add python venv
This commit is contained in:
244
utils/python-venv/Lib/site-packages/setuptools/__init__.py
Normal file
244
utils/python-venv/Lib/site-packages/setuptools/__init__.py
Normal file
@ -0,0 +1,244 @@
|
||||
"""Extensions to the 'distutils' for large or complex distributions"""
|
||||
|
||||
from fnmatch import fnmatchcase
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
|
||||
import _distutils_hack.override # noqa: F401
|
||||
|
||||
import distutils.core
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.util import convert_path
|
||||
|
||||
from ._deprecation_warning import SetuptoolsDeprecationWarning
|
||||
|
||||
import setuptools.version
|
||||
from setuptools.extension import Extension
|
||||
from setuptools.dist import Distribution
|
||||
from setuptools.depends import Require
|
||||
from . import monkey
|
||||
from . import logging
|
||||
|
||||
|
||||
__all__ = [
|
||||
'setup',
|
||||
'Distribution',
|
||||
'Command',
|
||||
'Extension',
|
||||
'Require',
|
||||
'SetuptoolsDeprecationWarning',
|
||||
'find_packages',
|
||||
'find_namespace_packages',
|
||||
]
|
||||
|
||||
__version__ = setuptools.version.__version__
|
||||
|
||||
bootstrap_install_from = None
|
||||
|
||||
|
||||
class PackageFinder:
|
||||
"""
|
||||
Generate a list of all Python packages found within a directory
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def find(cls, where='.', exclude=(), include=('*',)):
|
||||
"""Return a list all Python packages found within directory 'where'
|
||||
|
||||
'where' is the root directory which will be searched for packages. It
|
||||
should be supplied as a "cross-platform" (i.e. URL-style) path; it will
|
||||
be converted to the appropriate local path syntax.
|
||||
|
||||
'exclude' is a sequence of package names to exclude; '*' can be used
|
||||
as a wildcard in the names, such that 'foo.*' will exclude all
|
||||
subpackages of 'foo' (but not 'foo' itself).
|
||||
|
||||
'include' is a sequence of package names to include. If it's
|
||||
specified, only the named packages will be included. If it's not
|
||||
specified, all found packages will be included. 'include' can contain
|
||||
shell style wildcard patterns just like 'exclude'.
|
||||
"""
|
||||
|
||||
return list(
|
||||
cls._find_packages_iter(
|
||||
convert_path(where),
|
||||
cls._build_filter('ez_setup', '*__pycache__', *exclude),
|
||||
cls._build_filter(*include),
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _find_packages_iter(cls, where, exclude, include):
|
||||
"""
|
||||
All the packages found in 'where' that pass the 'include' filter, but
|
||||
not the 'exclude' filter.
|
||||
"""
|
||||
for root, dirs, files in os.walk(where, followlinks=True):
|
||||
# Copy dirs to iterate over it, then empty dirs.
|
||||
all_dirs = dirs[:]
|
||||
dirs[:] = []
|
||||
|
||||
for dir in all_dirs:
|
||||
full_path = os.path.join(root, dir)
|
||||
rel_path = os.path.relpath(full_path, where)
|
||||
package = rel_path.replace(os.path.sep, '.')
|
||||
|
||||
# Skip directory trees that are not valid packages
|
||||
if '.' in dir or not cls._looks_like_package(full_path):
|
||||
continue
|
||||
|
||||
# Should this package be included?
|
||||
if include(package) and not exclude(package):
|
||||
yield package
|
||||
|
||||
# Keep searching subdirectories, as there may be more packages
|
||||
# down there, even if the parent was excluded.
|
||||
dirs.append(dir)
|
||||
|
||||
@staticmethod
|
||||
def _looks_like_package(path):
|
||||
"""Does a directory look like a package?"""
|
||||
return os.path.isfile(os.path.join(path, '__init__.py'))
|
||||
|
||||
@staticmethod
|
||||
def _build_filter(*patterns):
|
||||
"""
|
||||
Given a list of patterns, return a callable that will be true only if
|
||||
the input matches at least one of the patterns.
|
||||
"""
|
||||
return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
|
||||
|
||||
|
||||
class PEP420PackageFinder(PackageFinder):
|
||||
@staticmethod
|
||||
def _looks_like_package(path):
|
||||
return True
|
||||
|
||||
|
||||
find_packages = PackageFinder.find
|
||||
find_namespace_packages = PEP420PackageFinder.find
|
||||
|
||||
|
||||
def _install_setup_requires(attrs):
|
||||
# Note: do not use `setuptools.Distribution` directly, as
|
||||
# our PEP 517 backend patch `distutils.core.Distribution`.
|
||||
class MinimalDistribution(distutils.core.Distribution):
|
||||
"""
|
||||
A minimal version of a distribution for supporting the
|
||||
fetch_build_eggs interface.
|
||||
"""
|
||||
|
||||
def __init__(self, attrs):
|
||||
_incl = 'dependency_links', 'setup_requires'
|
||||
filtered = {k: attrs[k] for k in set(_incl) & set(attrs)}
|
||||
distutils.core.Distribution.__init__(self, filtered)
|
||||
|
||||
def finalize_options(self):
|
||||
"""
|
||||
Disable finalize_options to avoid building the working set.
|
||||
Ref #2158.
|
||||
"""
|
||||
|
||||
dist = MinimalDistribution(attrs)
|
||||
|
||||
# Honor setup.cfg's options.
|
||||
dist.parse_config_files(ignore_option_errors=True)
|
||||
if dist.setup_requires:
|
||||
dist.fetch_build_eggs(dist.setup_requires)
|
||||
|
||||
|
||||
def setup(**attrs):
|
||||
# Make sure we have any requirements needed to interpret 'attrs'.
|
||||
logging.configure()
|
||||
_install_setup_requires(attrs)
|
||||
return distutils.core.setup(**attrs)
|
||||
|
||||
|
||||
setup.__doc__ = distutils.core.setup.__doc__
|
||||
|
||||
|
||||
_Command = monkey.get_unpatched(distutils.core.Command)
|
||||
|
||||
|
||||
class Command(_Command):
|
||||
__doc__ = _Command.__doc__
|
||||
|
||||
command_consumes_arguments = False
|
||||
|
||||
def __init__(self, dist, **kw):
|
||||
"""
|
||||
Construct the command for dist, updating
|
||||
vars(self) with any keyword parameters.
|
||||
"""
|
||||
_Command.__init__(self, dist)
|
||||
vars(self).update(kw)
|
||||
|
||||
def _ensure_stringlike(self, option, what, default=None):
|
||||
val = getattr(self, option)
|
||||
if val is None:
|
||||
setattr(self, option, default)
|
||||
return default
|
||||
elif not isinstance(val, str):
|
||||
raise DistutilsOptionError(
|
||||
"'%s' must be a %s (got `%s`)" % (option, what, val)
|
||||
)
|
||||
return val
|
||||
|
||||
def ensure_string_list(self, option):
|
||||
r"""Ensure that 'option' is a list of strings. If 'option' is
|
||||
currently a string, we split it either on /,\s*/ or /\s+/, so
|
||||
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
|
||||
["foo", "bar", "baz"].
|
||||
"""
|
||||
val = getattr(self, option)
|
||||
if val is None:
|
||||
return
|
||||
elif isinstance(val, str):
|
||||
setattr(self, option, re.split(r',\s*|\s+', val))
|
||||
else:
|
||||
if isinstance(val, list):
|
||||
ok = all(isinstance(v, str) for v in val)
|
||||
else:
|
||||
ok = False
|
||||
if not ok:
|
||||
raise DistutilsOptionError(
|
||||
"'%s' must be a list of strings (got %r)" % (option, val)
|
||||
)
|
||||
|
||||
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
|
||||
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
|
||||
vars(cmd).update(kw)
|
||||
return cmd
|
||||
|
||||
|
||||
def _find_all_simple(path):
|
||||
"""
|
||||
Find all files under 'path'
|
||||
"""
|
||||
results = (
|
||||
os.path.join(base, file)
|
||||
for base, dirs, files in os.walk(path, followlinks=True)
|
||||
for file in files
|
||||
)
|
||||
return filter(os.path.isfile, results)
|
||||
|
||||
|
||||
def findall(dir=os.curdir):
|
||||
"""
|
||||
Find all files under 'dir' and return the list of full filenames.
|
||||
Unless dir is '.', return full filenames with dir prepended.
|
||||
"""
|
||||
files = _find_all_simple(dir)
|
||||
if dir == os.curdir:
|
||||
make_rel = functools.partial(os.path.relpath, start=dir)
|
||||
files = map(make_rel, files)
|
||||
return list(files)
|
||||
|
||||
|
||||
class sic(str):
|
||||
"""Treat this string as-is (https://en.wikipedia.org/wiki/Sic)"""
|
||||
|
||||
|
||||
# Apply monkey patches
|
||||
monkey.patch_all()
|
@ -0,0 +1,7 @@
|
||||
class SetuptoolsDeprecationWarning(Warning):
|
||||
"""
|
||||
Base class for warning deprecations in ``setuptools``
|
||||
|
||||
This class is not derived from ``DeprecationWarning``, and as such is
|
||||
visible by default.
|
||||
"""
|
@ -0,0 +1,24 @@
|
||||
"""distutils
|
||||
|
||||
The main package for the Python Module Distribution Utilities. Normally
|
||||
used from a setup script as
|
||||
|
||||
from distutils.core import setup
|
||||
|
||||
setup (...)
|
||||
"""
|
||||
|
||||
import sys
|
||||
import importlib
|
||||
|
||||
__version__ = sys.version[:sys.version.index(' ')]
|
||||
|
||||
|
||||
try:
|
||||
# Allow Debian and pkgsrc (only) to customize system
|
||||
# behavior. Ref pypa/distutils#2 and pypa/distutils#16.
|
||||
# This hook is deprecated and no other environments
|
||||
# should use it.
|
||||
importlib.import_module('_distutils_system_mod')
|
||||
except ImportError:
|
||||
pass
|
@ -0,0 +1,561 @@
|
||||
"""distutils._msvccompiler
|
||||
|
||||
Contains MSVCCompiler, an implementation of the abstract CCompiler class
|
||||
for Microsoft Visual Studio 2015.
|
||||
|
||||
The module is compatible with VS 2015 and later. You can find legacy support
|
||||
for older versions in distutils.msvc9compiler and distutils.msvccompiler.
|
||||
"""
|
||||
|
||||
# Written by Perry Stoll
|
||||
# hacked by Robin Becker and Thomas Heller to do a better job of
|
||||
# finding DevStudio (through the registry)
|
||||
# ported to VS 2005 and VS 2008 by Christian Heimes
|
||||
# ported to VS 2015 by Steve Dower
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import contextlib
|
||||
import warnings
|
||||
import unittest.mock
|
||||
with contextlib.suppress(ImportError):
|
||||
import winreg
|
||||
|
||||
from distutils.errors import DistutilsExecError, DistutilsPlatformError, \
|
||||
CompileError, LibError, LinkError
|
||||
from distutils.ccompiler import CCompiler, gen_lib_options
|
||||
from distutils import log
|
||||
from distutils.util import get_platform
|
||||
|
||||
from itertools import count
|
||||
|
||||
def _find_vc2015():
|
||||
try:
|
||||
key = winreg.OpenKeyEx(
|
||||
winreg.HKEY_LOCAL_MACHINE,
|
||||
r"Software\Microsoft\VisualStudio\SxS\VC7",
|
||||
access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY
|
||||
)
|
||||
except OSError:
|
||||
log.debug("Visual C++ is not registered")
|
||||
return None, None
|
||||
|
||||
best_version = 0
|
||||
best_dir = None
|
||||
with key:
|
||||
for i in count():
|
||||
try:
|
||||
v, vc_dir, vt = winreg.EnumValue(key, i)
|
||||
except OSError:
|
||||
break
|
||||
if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir):
|
||||
try:
|
||||
version = int(float(v))
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
if version >= 14 and version > best_version:
|
||||
best_version, best_dir = version, vc_dir
|
||||
return best_version, best_dir
|
||||
|
||||
def _find_vc2017():
|
||||
"""Returns "15, path" based on the result of invoking vswhere.exe
|
||||
If no install is found, returns "None, None"
|
||||
|
||||
The version is returned to avoid unnecessarily changing the function
|
||||
result. It may be ignored when the path is not None.
|
||||
|
||||
If vswhere.exe is not available, by definition, VS 2017 is not
|
||||
installed.
|
||||
"""
|
||||
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
|
||||
if not root:
|
||||
return None, None
|
||||
|
||||
try:
|
||||
path = subprocess.check_output([
|
||||
os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
|
||||
"-latest",
|
||||
"-prerelease",
|
||||
"-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
|
||||
"-property", "installationPath",
|
||||
"-products", "*",
|
||||
], encoding="mbcs", errors="strict").strip()
|
||||
except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
|
||||
return None, None
|
||||
|
||||
path = os.path.join(path, "VC", "Auxiliary", "Build")
|
||||
if os.path.isdir(path):
|
||||
return 15, path
|
||||
|
||||
return None, None
|
||||
|
||||
PLAT_SPEC_TO_RUNTIME = {
|
||||
'x86' : 'x86',
|
||||
'x86_amd64' : 'x64',
|
||||
'x86_arm' : 'arm',
|
||||
'x86_arm64' : 'arm64'
|
||||
}
|
||||
|
||||
def _find_vcvarsall(plat_spec):
|
||||
# bpo-38597: Removed vcruntime return value
|
||||
_, best_dir = _find_vc2017()
|
||||
|
||||
if not best_dir:
|
||||
best_version, best_dir = _find_vc2015()
|
||||
|
||||
if not best_dir:
|
||||
log.debug("No suitable Visual C++ version found")
|
||||
return None, None
|
||||
|
||||
vcvarsall = os.path.join(best_dir, "vcvarsall.bat")
|
||||
if not os.path.isfile(vcvarsall):
|
||||
log.debug("%s cannot be found", vcvarsall)
|
||||
return None, None
|
||||
|
||||
return vcvarsall, None
|
||||
|
||||
def _get_vc_env(plat_spec):
|
||||
if os.getenv("DISTUTILS_USE_SDK"):
|
||||
return {
|
||||
key.lower(): value
|
||||
for key, value in os.environ.items()
|
||||
}
|
||||
|
||||
vcvarsall, _ = _find_vcvarsall(plat_spec)
|
||||
if not vcvarsall:
|
||||
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
|
||||
|
||||
try:
|
||||
out = subprocess.check_output(
|
||||
'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
|
||||
stderr=subprocess.STDOUT,
|
||||
).decode('utf-16le', errors='replace')
|
||||
except subprocess.CalledProcessError as exc:
|
||||
log.error(exc.output)
|
||||
raise DistutilsPlatformError("Error executing {}"
|
||||
.format(exc.cmd))
|
||||
|
||||
env = {
|
||||
key.lower(): value
|
||||
for key, _, value in
|
||||
(line.partition('=') for line in out.splitlines())
|
||||
if key and value
|
||||
}
|
||||
|
||||
return env
|
||||
|
||||
def _find_exe(exe, paths=None):
|
||||
"""Return path to an MSVC executable program.
|
||||
|
||||
Tries to find the program in several places: first, one of the
|
||||
MSVC program search paths from the registry; next, the directories
|
||||
in the PATH environment variable. If any of those work, return an
|
||||
absolute path that is known to exist. If none of them work, just
|
||||
return the original program name, 'exe'.
|
||||
"""
|
||||
if not paths:
|
||||
paths = os.getenv('path').split(os.pathsep)
|
||||
for p in paths:
|
||||
fn = os.path.join(os.path.abspath(p), exe)
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
return exe
|
||||
|
||||
# A map keyed by get_platform() return values to values accepted by
|
||||
# 'vcvarsall.bat'. Always cross-compile from x86 to work with the
|
||||
# lighter-weight MSVC installs that do not include native 64-bit tools.
|
||||
PLAT_TO_VCVARS = {
|
||||
'win32' : 'x86',
|
||||
'win-amd64' : 'x86_amd64',
|
||||
'win-arm32' : 'x86_arm',
|
||||
'win-arm64' : 'x86_arm64'
|
||||
}
|
||||
|
||||
class MSVCCompiler(CCompiler) :
|
||||
"""Concrete class that implements an interface to Microsoft Visual C++,
|
||||
as defined by the CCompiler abstract class."""
|
||||
|
||||
compiler_type = 'msvc'
|
||||
|
||||
# Just set this so CCompiler's constructor doesn't barf. We currently
|
||||
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
||||
# as it really isn't necessary for this sort of single-compiler class.
|
||||
# Would be nice to have a consistent interface with UnixCCompiler,
|
||||
# though, so it's worth thinking about.
|
||||
executables = {}
|
||||
|
||||
# Private class data (need to distinguish C from C++ source for compiler)
|
||||
_c_extensions = ['.c']
|
||||
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
||||
_rc_extensions = ['.rc']
|
||||
_mc_extensions = ['.mc']
|
||||
|
||||
# Needed for the filename generation methods provided by the
|
||||
# base class, CCompiler.
|
||||
src_extensions = (_c_extensions + _cpp_extensions +
|
||||
_rc_extensions + _mc_extensions)
|
||||
res_extension = '.res'
|
||||
obj_extension = '.obj'
|
||||
static_lib_extension = '.lib'
|
||||
shared_lib_extension = '.dll'
|
||||
static_lib_format = shared_lib_format = '%s%s'
|
||||
exe_extension = '.exe'
|
||||
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
CCompiler.__init__ (self, verbose, dry_run, force)
|
||||
# target platform (.plat_name is consistent with 'bdist')
|
||||
self.plat_name = None
|
||||
self.initialized = False
|
||||
|
||||
def initialize(self, plat_name=None):
|
||||
# multi-init means we would need to check platform same each time...
|
||||
assert not self.initialized, "don't init multiple times"
|
||||
if plat_name is None:
|
||||
plat_name = get_platform()
|
||||
# sanity check for platforms to prevent obscure errors later.
|
||||
if plat_name not in PLAT_TO_VCVARS:
|
||||
raise DistutilsPlatformError("--plat-name must be one of {}"
|
||||
.format(tuple(PLAT_TO_VCVARS)))
|
||||
|
||||
# Get the vcvarsall.bat spec for the requested platform.
|
||||
plat_spec = PLAT_TO_VCVARS[plat_name]
|
||||
|
||||
vc_env = _get_vc_env(plat_spec)
|
||||
if not vc_env:
|
||||
raise DistutilsPlatformError("Unable to find a compatible "
|
||||
"Visual Studio installation.")
|
||||
|
||||
self._paths = vc_env.get('path', '')
|
||||
paths = self._paths.split(os.pathsep)
|
||||
self.cc = _find_exe("cl.exe", paths)
|
||||
self.linker = _find_exe("link.exe", paths)
|
||||
self.lib = _find_exe("lib.exe", paths)
|
||||
self.rc = _find_exe("rc.exe", paths) # resource compiler
|
||||
self.mc = _find_exe("mc.exe", paths) # message compiler
|
||||
self.mt = _find_exe("mt.exe", paths) # message compiler
|
||||
|
||||
for dir in vc_env.get('include', '').split(os.pathsep):
|
||||
if dir:
|
||||
self.add_include_dir(dir.rstrip(os.sep))
|
||||
|
||||
for dir in vc_env.get('lib', '').split(os.pathsep):
|
||||
if dir:
|
||||
self.add_library_dir(dir.rstrip(os.sep))
|
||||
|
||||
self.preprocess_options = None
|
||||
# bpo-38597: Always compile with dynamic linking
|
||||
# Future releases of Python 3.x will include all past
|
||||
# versions of vcruntime*.dll for compatibility.
|
||||
self.compile_options = [
|
||||
'/nologo', '/O2', '/W3', '/GL', '/DNDEBUG', '/MD'
|
||||
]
|
||||
|
||||
self.compile_options_debug = [
|
||||
'/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG'
|
||||
]
|
||||
|
||||
ldflags = [
|
||||
'/nologo', '/INCREMENTAL:NO', '/LTCG'
|
||||
]
|
||||
|
||||
ldflags_debug = [
|
||||
'/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL'
|
||||
]
|
||||
|
||||
self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1']
|
||||
self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1']
|
||||
self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO']
|
||||
self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO']
|
||||
self.ldflags_static = [*ldflags]
|
||||
self.ldflags_static_debug = [*ldflags_debug]
|
||||
|
||||
self._ldflags = {
|
||||
(CCompiler.EXECUTABLE, None): self.ldflags_exe,
|
||||
(CCompiler.EXECUTABLE, False): self.ldflags_exe,
|
||||
(CCompiler.EXECUTABLE, True): self.ldflags_exe_debug,
|
||||
(CCompiler.SHARED_OBJECT, None): self.ldflags_shared,
|
||||
(CCompiler.SHARED_OBJECT, False): self.ldflags_shared,
|
||||
(CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug,
|
||||
(CCompiler.SHARED_LIBRARY, None): self.ldflags_static,
|
||||
(CCompiler.SHARED_LIBRARY, False): self.ldflags_static,
|
||||
(CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug,
|
||||
}
|
||||
|
||||
self.initialized = True
|
||||
|
||||
# -- Worker methods ------------------------------------------------
|
||||
|
||||
def object_filenames(self,
|
||||
source_filenames,
|
||||
strip_dir=0,
|
||||
output_dir=''):
|
||||
ext_map = {
|
||||
**{ext: self.obj_extension for ext in self.src_extensions},
|
||||
**{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions},
|
||||
}
|
||||
|
||||
output_dir = output_dir or ''
|
||||
|
||||
def make_out_path(p):
|
||||
base, ext = os.path.splitext(p)
|
||||
if strip_dir:
|
||||
base = os.path.basename(base)
|
||||
else:
|
||||
_, base = os.path.splitdrive(base)
|
||||
if base.startswith((os.path.sep, os.path.altsep)):
|
||||
base = base[1:]
|
||||
try:
|
||||
# XXX: This may produce absurdly long paths. We should check
|
||||
# the length of the result and trim base until we fit within
|
||||
# 260 characters.
|
||||
return os.path.join(output_dir, base + ext_map[ext])
|
||||
except LookupError:
|
||||
# Better to raise an exception instead of silently continuing
|
||||
# and later complain about sources and targets having
|
||||
# different lengths
|
||||
raise CompileError("Don't know how to compile {}".format(p))
|
||||
|
||||
return list(map(make_out_path, source_filenames))
|
||||
|
||||
|
||||
def compile(self, sources,
|
||||
output_dir=None, macros=None, include_dirs=None, debug=0,
|
||||
extra_preargs=None, extra_postargs=None, depends=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
compile_info = self._setup_compile(output_dir, macros, include_dirs,
|
||||
sources, depends, extra_postargs)
|
||||
macros, objects, extra_postargs, pp_opts, build = compile_info
|
||||
|
||||
compile_opts = extra_preargs or []
|
||||
compile_opts.append('/c')
|
||||
if debug:
|
||||
compile_opts.extend(self.compile_options_debug)
|
||||
else:
|
||||
compile_opts.extend(self.compile_options)
|
||||
|
||||
|
||||
add_cpp_opts = False
|
||||
|
||||
for obj in objects:
|
||||
try:
|
||||
src, ext = build[obj]
|
||||
except KeyError:
|
||||
continue
|
||||
if debug:
|
||||
# pass the full pathname to MSVC in debug mode,
|
||||
# this allows the debugger to find the source file
|
||||
# without asking the user to browse for it
|
||||
src = os.path.abspath(src)
|
||||
|
||||
if ext in self._c_extensions:
|
||||
input_opt = "/Tc" + src
|
||||
elif ext in self._cpp_extensions:
|
||||
input_opt = "/Tp" + src
|
||||
add_cpp_opts = True
|
||||
elif ext in self._rc_extensions:
|
||||
# compile .RC to .RES file
|
||||
input_opt = src
|
||||
output_opt = "/fo" + obj
|
||||
try:
|
||||
self.spawn([self.rc] + pp_opts + [output_opt, input_opt])
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
continue
|
||||
elif ext in self._mc_extensions:
|
||||
# Compile .MC to .RC file to .RES file.
|
||||
# * '-h dir' specifies the directory for the
|
||||
# generated include file
|
||||
# * '-r dir' specifies the target directory of the
|
||||
# generated RC file and the binary message resource
|
||||
# it includes
|
||||
#
|
||||
# For now (since there are no options to change this),
|
||||
# we use the source-directory for the include file and
|
||||
# the build directory for the RC file and message
|
||||
# resources. This works at least for win32all.
|
||||
h_dir = os.path.dirname(src)
|
||||
rc_dir = os.path.dirname(obj)
|
||||
try:
|
||||
# first compile .MC to .RC and .H file
|
||||
self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src])
|
||||
base, _ = os.path.splitext(os.path.basename (src))
|
||||
rc_file = os.path.join(rc_dir, base + '.rc')
|
||||
# then compile .RC to .RES file
|
||||
self.spawn([self.rc, "/fo" + obj, rc_file])
|
||||
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
continue
|
||||
else:
|
||||
# how to handle this file?
|
||||
raise CompileError("Don't know how to compile {} to {}"
|
||||
.format(src, obj))
|
||||
|
||||
args = [self.cc] + compile_opts + pp_opts
|
||||
if add_cpp_opts:
|
||||
args.append('/EHsc')
|
||||
args.append(input_opt)
|
||||
args.append("/Fo" + obj)
|
||||
args.extend(extra_postargs)
|
||||
|
||||
try:
|
||||
self.spawn(args)
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
return objects
|
||||
|
||||
|
||||
def create_static_lib(self,
|
||||
objects,
|
||||
output_libname,
|
||||
output_dir=None,
|
||||
debug=0,
|
||||
target_lang=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
output_filename = self.library_filename(output_libname,
|
||||
output_dir=output_dir)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
lib_args = objects + ['/OUT:' + output_filename]
|
||||
if debug:
|
||||
pass # XXX what goes here?
|
||||
try:
|
||||
log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args))
|
||||
self.spawn([self.lib] + lib_args)
|
||||
except DistutilsExecError as msg:
|
||||
raise LibError(msg)
|
||||
else:
|
||||
log.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
|
||||
def link(self,
|
||||
target_desc,
|
||||
objects,
|
||||
output_filename,
|
||||
output_dir=None,
|
||||
libraries=None,
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
target_lang=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
fixed_args = self._fix_lib_args(libraries, library_dirs,
|
||||
runtime_library_dirs)
|
||||
libraries, library_dirs, runtime_library_dirs = fixed_args
|
||||
|
||||
if runtime_library_dirs:
|
||||
self.warn("I don't know what to do with 'runtime_library_dirs': "
|
||||
+ str(runtime_library_dirs))
|
||||
|
||||
lib_opts = gen_lib_options(self,
|
||||
library_dirs, runtime_library_dirs,
|
||||
libraries)
|
||||
if output_dir is not None:
|
||||
output_filename = os.path.join(output_dir, output_filename)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
ldflags = self._ldflags[target_desc, debug]
|
||||
|
||||
export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])]
|
||||
|
||||
ld_args = (ldflags + lib_opts + export_opts +
|
||||
objects + ['/OUT:' + output_filename])
|
||||
|
||||
# The MSVC linker generates .lib and .exp files, which cannot be
|
||||
# suppressed by any linker switches. The .lib files may even be
|
||||
# needed! Make sure they are generated in the temporary build
|
||||
# directory. Since they have different names for debug and release
|
||||
# builds, they can go into the same directory.
|
||||
build_temp = os.path.dirname(objects[0])
|
||||
if export_symbols is not None:
|
||||
(dll_name, dll_ext) = os.path.splitext(
|
||||
os.path.basename(output_filename))
|
||||
implib_file = os.path.join(
|
||||
build_temp,
|
||||
self.library_filename(dll_name))
|
||||
ld_args.append ('/IMPLIB:' + implib_file)
|
||||
|
||||
if extra_preargs:
|
||||
ld_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
ld_args.extend(extra_postargs)
|
||||
|
||||
output_dir = os.path.dirname(os.path.abspath(output_filename))
|
||||
self.mkpath(output_dir)
|
||||
try:
|
||||
log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args))
|
||||
self.spawn([self.linker] + ld_args)
|
||||
except DistutilsExecError as msg:
|
||||
raise LinkError(msg)
|
||||
else:
|
||||
log.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
def spawn(self, cmd):
|
||||
env = dict(os.environ, PATH=self._paths)
|
||||
with self._fallback_spawn(cmd, env) as fallback:
|
||||
return super().spawn(cmd, env=env)
|
||||
return fallback.value
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _fallback_spawn(self, cmd, env):
|
||||
"""
|
||||
Discovered in pypa/distutils#15, some tools monkeypatch the compiler,
|
||||
so the 'env' kwarg causes a TypeError. Detect this condition and
|
||||
restore the legacy, unsafe behavior.
|
||||
"""
|
||||
bag = type('Bag', (), {})()
|
||||
try:
|
||||
yield bag
|
||||
except TypeError as exc:
|
||||
if "unexpected keyword argument 'env'" not in str(exc):
|
||||
raise
|
||||
else:
|
||||
return
|
||||
warnings.warn(
|
||||
"Fallback spawn triggered. Please update distutils monkeypatch.")
|
||||
with unittest.mock.patch.dict('os.environ', env):
|
||||
bag.value = super().spawn(cmd)
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
# These are all used by the 'gen_lib_options() function, in
|
||||
# ccompiler.py.
|
||||
|
||||
def library_dir_option(self, dir):
|
||||
return "/LIBPATH:" + dir
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
raise DistutilsPlatformError(
|
||||
"don't know how to set runtime library search path for MSVC")
|
||||
|
||||
def library_option(self, lib):
|
||||
return self.library_filename(lib)
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=0):
|
||||
# Prefer a debugging library if found (and requested), but deal
|
||||
# with it if we don't have one.
|
||||
if debug:
|
||||
try_names = [lib + "_d", lib]
|
||||
else:
|
||||
try_names = [lib]
|
||||
for dir in dirs:
|
||||
for name in try_names:
|
||||
libfile = os.path.join(dir, self.library_filename(name))
|
||||
if os.path.isfile(libfile):
|
||||
return libfile
|
||||
else:
|
||||
# Oops, didn't find it in *any* of 'dirs'
|
||||
return None
|
@ -0,0 +1,256 @@
|
||||
"""distutils.archive_util
|
||||
|
||||
Utility functions for creating archive files (tarballs, zip files,
|
||||
that sort of thing)."""
|
||||
|
||||
import os
|
||||
from warnings import warn
|
||||
import sys
|
||||
|
||||
try:
|
||||
import zipfile
|
||||
except ImportError:
|
||||
zipfile = None
|
||||
|
||||
|
||||
from distutils.errors import DistutilsExecError
|
||||
from distutils.spawn import spawn
|
||||
from distutils.dir_util import mkpath
|
||||
from distutils import log
|
||||
|
||||
try:
|
||||
from pwd import getpwnam
|
||||
except ImportError:
|
||||
getpwnam = None
|
||||
|
||||
try:
|
||||
from grp import getgrnam
|
||||
except ImportError:
|
||||
getgrnam = None
|
||||
|
||||
def _get_gid(name):
|
||||
"""Returns a gid, given a group name."""
|
||||
if getgrnam is None or name is None:
|
||||
return None
|
||||
try:
|
||||
result = getgrnam(name)
|
||||
except KeyError:
|
||||
result = None
|
||||
if result is not None:
|
||||
return result[2]
|
||||
return None
|
||||
|
||||
def _get_uid(name):
|
||||
"""Returns an uid, given a user name."""
|
||||
if getpwnam is None or name is None:
|
||||
return None
|
||||
try:
|
||||
result = getpwnam(name)
|
||||
except KeyError:
|
||||
result = None
|
||||
if result is not None:
|
||||
return result[2]
|
||||
return None
|
||||
|
||||
def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
|
||||
owner=None, group=None):
|
||||
"""Create a (possibly compressed) tar file from all the files under
|
||||
'base_dir'.
|
||||
|
||||
'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or
|
||||
None. ("compress" will be deprecated in Python 3.2)
|
||||
|
||||
'owner' and 'group' can be used to define an owner and a group for the
|
||||
archive that is being built. If not provided, the current owner and group
|
||||
will be used.
|
||||
|
||||
The output tar file will be named 'base_dir' + ".tar", possibly plus
|
||||
the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z").
|
||||
|
||||
Returns the output filename.
|
||||
"""
|
||||
tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', 'xz': 'xz', None: '',
|
||||
'compress': ''}
|
||||
compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz',
|
||||
'compress': '.Z'}
|
||||
|
||||
# flags for compression program, each element of list will be an argument
|
||||
if compress is not None and compress not in compress_ext.keys():
|
||||
raise ValueError(
|
||||
"bad value for 'compress': must be None, 'gzip', 'bzip2', "
|
||||
"'xz' or 'compress'")
|
||||
|
||||
archive_name = base_name + '.tar'
|
||||
if compress != 'compress':
|
||||
archive_name += compress_ext.get(compress, '')
|
||||
|
||||
mkpath(os.path.dirname(archive_name), dry_run=dry_run)
|
||||
|
||||
# creating the tarball
|
||||
import tarfile # late import so Python build itself doesn't break
|
||||
|
||||
log.info('Creating tar archive')
|
||||
|
||||
uid = _get_uid(owner)
|
||||
gid = _get_gid(group)
|
||||
|
||||
def _set_uid_gid(tarinfo):
|
||||
if gid is not None:
|
||||
tarinfo.gid = gid
|
||||
tarinfo.gname = group
|
||||
if uid is not None:
|
||||
tarinfo.uid = uid
|
||||
tarinfo.uname = owner
|
||||
return tarinfo
|
||||
|
||||
if not dry_run:
|
||||
tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
|
||||
try:
|
||||
tar.add(base_dir, filter=_set_uid_gid)
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
# compression using `compress`
|
||||
if compress == 'compress':
|
||||
warn("'compress' will be deprecated.", PendingDeprecationWarning)
|
||||
# the option varies depending on the platform
|
||||
compressed_name = archive_name + compress_ext[compress]
|
||||
if sys.platform == 'win32':
|
||||
cmd = [compress, archive_name, compressed_name]
|
||||
else:
|
||||
cmd = [compress, '-f', archive_name]
|
||||
spawn(cmd, dry_run=dry_run)
|
||||
return compressed_name
|
||||
|
||||
return archive_name
|
||||
|
||||
def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):
|
||||
"""Create a zip file from all the files under 'base_dir'.
|
||||
|
||||
The output zip file will be named 'base_name' + ".zip". Uses either the
|
||||
"zipfile" Python module (if available) or the InfoZIP "zip" utility
|
||||
(if installed and found on the default search path). If neither tool is
|
||||
available, raises DistutilsExecError. Returns the name of the output zip
|
||||
file.
|
||||
"""
|
||||
zip_filename = base_name + ".zip"
|
||||
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
||||
|
||||
# If zipfile module is not available, try spawning an external
|
||||
# 'zip' command.
|
||||
if zipfile is None:
|
||||
if verbose:
|
||||
zipoptions = "-r"
|
||||
else:
|
||||
zipoptions = "-rq"
|
||||
|
||||
try:
|
||||
spawn(["zip", zipoptions, zip_filename, base_dir],
|
||||
dry_run=dry_run)
|
||||
except DistutilsExecError:
|
||||
# XXX really should distinguish between "couldn't find
|
||||
# external 'zip' command" and "zip failed".
|
||||
raise DistutilsExecError(("unable to create zip file '%s': "
|
||||
"could neither import the 'zipfile' module nor "
|
||||
"find a standalone zip utility") % zip_filename)
|
||||
|
||||
else:
|
||||
log.info("creating '%s' and adding '%s' to it",
|
||||
zip_filename, base_dir)
|
||||
|
||||
if not dry_run:
|
||||
try:
|
||||
zip = zipfile.ZipFile(zip_filename, "w",
|
||||
compression=zipfile.ZIP_DEFLATED)
|
||||
except RuntimeError:
|
||||
zip = zipfile.ZipFile(zip_filename, "w",
|
||||
compression=zipfile.ZIP_STORED)
|
||||
|
||||
with zip:
|
||||
if base_dir != os.curdir:
|
||||
path = os.path.normpath(os.path.join(base_dir, ''))
|
||||
zip.write(path, path)
|
||||
log.info("adding '%s'", path)
|
||||
for dirpath, dirnames, filenames in os.walk(base_dir):
|
||||
for name in dirnames:
|
||||
path = os.path.normpath(os.path.join(dirpath, name, ''))
|
||||
zip.write(path, path)
|
||||
log.info("adding '%s'", path)
|
||||
for name in filenames:
|
||||
path = os.path.normpath(os.path.join(dirpath, name))
|
||||
if os.path.isfile(path):
|
||||
zip.write(path, path)
|
||||
log.info("adding '%s'", path)
|
||||
|
||||
return zip_filename
|
||||
|
||||
ARCHIVE_FORMATS = {
|
||||
'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
|
||||
'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
|
||||
'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"),
|
||||
'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"),
|
||||
'tar': (make_tarball, [('compress', None)], "uncompressed tar file"),
|
||||
'zip': (make_zipfile, [],"ZIP file")
|
||||
}
|
||||
|
||||
def check_archive_formats(formats):
|
||||
"""Returns the first format from the 'format' list that is unknown.
|
||||
|
||||
If all formats are known, returns None
|
||||
"""
|
||||
for format in formats:
|
||||
if format not in ARCHIVE_FORMATS:
|
||||
return format
|
||||
return None
|
||||
|
||||
def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
|
||||
dry_run=0, owner=None, group=None):
|
||||
"""Create an archive file (eg. zip or tar).
|
||||
|
||||
'base_name' is the name of the file to create, minus any format-specific
|
||||
extension; 'format' is the archive format: one of "zip", "tar", "gztar",
|
||||
"bztar", "xztar", or "ztar".
|
||||
|
||||
'root_dir' is a directory that will be the root directory of the
|
||||
archive; ie. we typically chdir into 'root_dir' before creating the
|
||||
archive. 'base_dir' is the directory where we start archiving from;
|
||||
ie. 'base_dir' will be the common prefix of all files and
|
||||
directories in the archive. 'root_dir' and 'base_dir' both default
|
||||
to the current directory. Returns the name of the archive file.
|
||||
|
||||
'owner' and 'group' are used when creating a tar archive. By default,
|
||||
uses the current owner and group.
|
||||
"""
|
||||
save_cwd = os.getcwd()
|
||||
if root_dir is not None:
|
||||
log.debug("changing into '%s'", root_dir)
|
||||
base_name = os.path.abspath(base_name)
|
||||
if not dry_run:
|
||||
os.chdir(root_dir)
|
||||
|
||||
if base_dir is None:
|
||||
base_dir = os.curdir
|
||||
|
||||
kwargs = {'dry_run': dry_run}
|
||||
|
||||
try:
|
||||
format_info = ARCHIVE_FORMATS[format]
|
||||
except KeyError:
|
||||
raise ValueError("unknown archive format '%s'" % format)
|
||||
|
||||
func = format_info[0]
|
||||
for arg, val in format_info[1]:
|
||||
kwargs[arg] = val
|
||||
|
||||
if format != 'zip':
|
||||
kwargs['owner'] = owner
|
||||
kwargs['group'] = group
|
||||
|
||||
try:
|
||||
filename = func(base_name, base_dir, **kwargs)
|
||||
finally:
|
||||
if root_dir is not None:
|
||||
log.debug("changing back to '%s'", save_cwd)
|
||||
os.chdir(save_cwd)
|
||||
|
||||
return filename
|
@ -0,0 +1,393 @@
|
||||
"""distutils.bcppcompiler
|
||||
|
||||
Contains BorlandCCompiler, an implementation of the abstract CCompiler class
|
||||
for the Borland C++ compiler.
|
||||
"""
|
||||
|
||||
# This implementation by Lyle Johnson, based on the original msvccompiler.py
|
||||
# module and using the directions originally published by Gordon Williams.
|
||||
|
||||
# XXX looks like there's a LOT of overlap between these two classes:
|
||||
# someone should sit down and factor out the common code as
|
||||
# WindowsCCompiler! --GPW
|
||||
|
||||
|
||||
import os
|
||||
from distutils.errors import \
|
||||
DistutilsExecError, \
|
||||
CompileError, LibError, LinkError, UnknownFileError
|
||||
from distutils.ccompiler import \
|
||||
CCompiler, gen_preprocess_options
|
||||
from distutils.file_util import write_file
|
||||
from distutils.dep_util import newer
|
||||
from distutils import log
|
||||
|
||||
class BCPPCompiler(CCompiler) :
|
||||
"""Concrete class that implements an interface to the Borland C/C++
|
||||
compiler, as defined by the CCompiler abstract class.
|
||||
"""
|
||||
|
||||
compiler_type = 'bcpp'
|
||||
|
||||
# Just set this so CCompiler's constructor doesn't barf. We currently
|
||||
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
||||
# as it really isn't necessary for this sort of single-compiler class.
|
||||
# Would be nice to have a consistent interface with UnixCCompiler,
|
||||
# though, so it's worth thinking about.
|
||||
executables = {}
|
||||
|
||||
# Private class data (need to distinguish C from C++ source for compiler)
|
||||
_c_extensions = ['.c']
|
||||
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
||||
|
||||
# Needed for the filename generation methods provided by the
|
||||
# base class, CCompiler.
|
||||
src_extensions = _c_extensions + _cpp_extensions
|
||||
obj_extension = '.obj'
|
||||
static_lib_extension = '.lib'
|
||||
shared_lib_extension = '.dll'
|
||||
static_lib_format = shared_lib_format = '%s%s'
|
||||
exe_extension = '.exe'
|
||||
|
||||
|
||||
def __init__ (self,
|
||||
verbose=0,
|
||||
dry_run=0,
|
||||
force=0):
|
||||
|
||||
CCompiler.__init__ (self, verbose, dry_run, force)
|
||||
|
||||
# These executables are assumed to all be in the path.
|
||||
# Borland doesn't seem to use any special registry settings to
|
||||
# indicate their installation locations.
|
||||
|
||||
self.cc = "bcc32.exe"
|
||||
self.linker = "ilink32.exe"
|
||||
self.lib = "tlib.exe"
|
||||
|
||||
self.preprocess_options = None
|
||||
self.compile_options = ['/tWM', '/O2', '/q', '/g0']
|
||||
self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
|
||||
|
||||
self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
|
||||
self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
|
||||
self.ldflags_static = []
|
||||
self.ldflags_exe = ['/Gn', '/q', '/x']
|
||||
self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
|
||||
|
||||
|
||||
# -- Worker methods ------------------------------------------------
|
||||
|
||||
def compile(self, sources,
|
||||
output_dir=None, macros=None, include_dirs=None, debug=0,
|
||||
extra_preargs=None, extra_postargs=None, depends=None):
|
||||
|
||||
macros, objects, extra_postargs, pp_opts, build = \
|
||||
self._setup_compile(output_dir, macros, include_dirs, sources,
|
||||
depends, extra_postargs)
|
||||
compile_opts = extra_preargs or []
|
||||
compile_opts.append ('-c')
|
||||
if debug:
|
||||
compile_opts.extend (self.compile_options_debug)
|
||||
else:
|
||||
compile_opts.extend (self.compile_options)
|
||||
|
||||
for obj in objects:
|
||||
try:
|
||||
src, ext = build[obj]
|
||||
except KeyError:
|
||||
continue
|
||||
# XXX why do the normpath here?
|
||||
src = os.path.normpath(src)
|
||||
obj = os.path.normpath(obj)
|
||||
# XXX _setup_compile() did a mkpath() too but before the normpath.
|
||||
# Is it possible to skip the normpath?
|
||||
self.mkpath(os.path.dirname(obj))
|
||||
|
||||
if ext == '.res':
|
||||
# This is already a binary file -- skip it.
|
||||
continue # the 'for' loop
|
||||
if ext == '.rc':
|
||||
# This needs to be compiled to a .res file -- do it now.
|
||||
try:
|
||||
self.spawn (["brcc32", "-fo", obj, src])
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
continue # the 'for' loop
|
||||
|
||||
# The next two are both for the real compiler.
|
||||
if ext in self._c_extensions:
|
||||
input_opt = ""
|
||||
elif ext in self._cpp_extensions:
|
||||
input_opt = "-P"
|
||||
else:
|
||||
# Unknown file type -- no extra options. The compiler
|
||||
# will probably fail, but let it just in case this is a
|
||||
# file the compiler recognizes even if we don't.
|
||||
input_opt = ""
|
||||
|
||||
output_opt = "-o" + obj
|
||||
|
||||
# Compiler command line syntax is: "bcc32 [options] file(s)".
|
||||
# Note that the source file names must appear at the end of
|
||||
# the command line.
|
||||
try:
|
||||
self.spawn ([self.cc] + compile_opts + pp_opts +
|
||||
[input_opt, output_opt] +
|
||||
extra_postargs + [src])
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
return objects
|
||||
|
||||
# compile ()
|
||||
|
||||
|
||||
def create_static_lib (self,
|
||||
objects,
|
||||
output_libname,
|
||||
output_dir=None,
|
||||
debug=0,
|
||||
target_lang=None):
|
||||
|
||||
(objects, output_dir) = self._fix_object_args (objects, output_dir)
|
||||
output_filename = \
|
||||
self.library_filename (output_libname, output_dir=output_dir)
|
||||
|
||||
if self._need_link (objects, output_filename):
|
||||
lib_args = [output_filename, '/u'] + objects
|
||||
if debug:
|
||||
pass # XXX what goes here?
|
||||
try:
|
||||
self.spawn ([self.lib] + lib_args)
|
||||
except DistutilsExecError as msg:
|
||||
raise LibError(msg)
|
||||
else:
|
||||
log.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
# create_static_lib ()
|
||||
|
||||
|
||||
def link (self,
|
||||
target_desc,
|
||||
objects,
|
||||
output_filename,
|
||||
output_dir=None,
|
||||
libraries=None,
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
target_lang=None):
|
||||
|
||||
# XXX this ignores 'build_temp'! should follow the lead of
|
||||
# msvccompiler.py
|
||||
|
||||
(objects, output_dir) = self._fix_object_args (objects, output_dir)
|
||||
(libraries, library_dirs, runtime_library_dirs) = \
|
||||
self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
|
||||
|
||||
if runtime_library_dirs:
|
||||
log.warn("I don't know what to do with 'runtime_library_dirs': %s",
|
||||
str(runtime_library_dirs))
|
||||
|
||||
if output_dir is not None:
|
||||
output_filename = os.path.join (output_dir, output_filename)
|
||||
|
||||
if self._need_link (objects, output_filename):
|
||||
|
||||
# Figure out linker args based on type of target.
|
||||
if target_desc == CCompiler.EXECUTABLE:
|
||||
startup_obj = 'c0w32'
|
||||
if debug:
|
||||
ld_args = self.ldflags_exe_debug[:]
|
||||
else:
|
||||
ld_args = self.ldflags_exe[:]
|
||||
else:
|
||||
startup_obj = 'c0d32'
|
||||
if debug:
|
||||
ld_args = self.ldflags_shared_debug[:]
|
||||
else:
|
||||
ld_args = self.ldflags_shared[:]
|
||||
|
||||
|
||||
# Create a temporary exports file for use by the linker
|
||||
if export_symbols is None:
|
||||
def_file = ''
|
||||
else:
|
||||
head, tail = os.path.split (output_filename)
|
||||
modname, ext = os.path.splitext (tail)
|
||||
temp_dir = os.path.dirname(objects[0]) # preserve tree structure
|
||||
def_file = os.path.join (temp_dir, '%s.def' % modname)
|
||||
contents = ['EXPORTS']
|
||||
for sym in (export_symbols or []):
|
||||
contents.append(' %s=_%s' % (sym, sym))
|
||||
self.execute(write_file, (def_file, contents),
|
||||
"writing %s" % def_file)
|
||||
|
||||
# Borland C++ has problems with '/' in paths
|
||||
objects2 = map(os.path.normpath, objects)
|
||||
# split objects in .obj and .res files
|
||||
# Borland C++ needs them at different positions in the command line
|
||||
objects = [startup_obj]
|
||||
resources = []
|
||||
for file in objects2:
|
||||
(base, ext) = os.path.splitext(os.path.normcase(file))
|
||||
if ext == '.res':
|
||||
resources.append(file)
|
||||
else:
|
||||
objects.append(file)
|
||||
|
||||
|
||||
for l in library_dirs:
|
||||
ld_args.append("/L%s" % os.path.normpath(l))
|
||||
ld_args.append("/L.") # we sometimes use relative paths
|
||||
|
||||
# list of object files
|
||||
ld_args.extend(objects)
|
||||
|
||||
# XXX the command-line syntax for Borland C++ is a bit wonky;
|
||||
# certain filenames are jammed together in one big string, but
|
||||
# comma-delimited. This doesn't mesh too well with the
|
||||
# Unix-centric attitude (with a DOS/Windows quoting hack) of
|
||||
# 'spawn()', so constructing the argument list is a bit
|
||||
# awkward. Note that doing the obvious thing and jamming all
|
||||
# the filenames and commas into one argument would be wrong,
|
||||
# because 'spawn()' would quote any filenames with spaces in
|
||||
# them. Arghghh!. Apparently it works fine as coded...
|
||||
|
||||
# name of dll/exe file
|
||||
ld_args.extend([',',output_filename])
|
||||
# no map file and start libraries
|
||||
ld_args.append(',,')
|
||||
|
||||
for lib in libraries:
|
||||
# see if we find it and if there is a bcpp specific lib
|
||||
# (xxx_bcpp.lib)
|
||||
libfile = self.find_library_file(library_dirs, lib, debug)
|
||||
if libfile is None:
|
||||
ld_args.append(lib)
|
||||
# probably a BCPP internal library -- don't warn
|
||||
else:
|
||||
# full name which prefers bcpp_xxx.lib over xxx.lib
|
||||
ld_args.append(libfile)
|
||||
|
||||
# some default libraries
|
||||
ld_args.append ('import32')
|
||||
ld_args.append ('cw32mt')
|
||||
|
||||
# def file for export symbols
|
||||
ld_args.extend([',',def_file])
|
||||
# add resource files
|
||||
ld_args.append(',')
|
||||
ld_args.extend(resources)
|
||||
|
||||
|
||||
if extra_preargs:
|
||||
ld_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
ld_args.extend(extra_postargs)
|
||||
|
||||
self.mkpath (os.path.dirname (output_filename))
|
||||
try:
|
||||
self.spawn ([self.linker] + ld_args)
|
||||
except DistutilsExecError as msg:
|
||||
raise LinkError(msg)
|
||||
|
||||
else:
|
||||
log.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
# link ()
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
|
||||
|
||||
def find_library_file (self, dirs, lib, debug=0):
|
||||
# List of effective library names to try, in order of preference:
|
||||
# xxx_bcpp.lib is better than xxx.lib
|
||||
# and xxx_d.lib is better than xxx.lib if debug is set
|
||||
#
|
||||
# The "_bcpp" suffix is to handle a Python installation for people
|
||||
# with multiple compilers (primarily Distutils hackers, I suspect
|
||||
# ;-). The idea is they'd have one static library for each
|
||||
# compiler they care about, since (almost?) every Windows compiler
|
||||
# seems to have a different format for static libraries.
|
||||
if debug:
|
||||
dlib = (lib + "_d")
|
||||
try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
|
||||
else:
|
||||
try_names = (lib + "_bcpp", lib)
|
||||
|
||||
for dir in dirs:
|
||||
for name in try_names:
|
||||
libfile = os.path.join(dir, self.library_filename(name))
|
||||
if os.path.exists(libfile):
|
||||
return libfile
|
||||
else:
|
||||
# Oops, didn't find it in *any* of 'dirs'
|
||||
return None
|
||||
|
||||
# overwrite the one from CCompiler to support rc and res-files
|
||||
def object_filenames (self,
|
||||
source_filenames,
|
||||
strip_dir=0,
|
||||
output_dir=''):
|
||||
if output_dir is None: output_dir = ''
|
||||
obj_names = []
|
||||
for src_name in source_filenames:
|
||||
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
|
||||
(base, ext) = os.path.splitext (os.path.normcase(src_name))
|
||||
if ext not in (self.src_extensions + ['.rc','.res']):
|
||||
raise UnknownFileError("unknown file type '%s' (from '%s')" % \
|
||||
(ext, src_name))
|
||||
if strip_dir:
|
||||
base = os.path.basename (base)
|
||||
if ext == '.res':
|
||||
# these can go unchanged
|
||||
obj_names.append (os.path.join (output_dir, base + ext))
|
||||
elif ext == '.rc':
|
||||
# these need to be compiled to .res-files
|
||||
obj_names.append (os.path.join (output_dir, base + '.res'))
|
||||
else:
|
||||
obj_names.append (os.path.join (output_dir,
|
||||
base + self.obj_extension))
|
||||
return obj_names
|
||||
|
||||
# object_filenames ()
|
||||
|
||||
def preprocess (self,
|
||||
source,
|
||||
output_file=None,
|
||||
macros=None,
|
||||
include_dirs=None,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None):
|
||||
|
||||
(_, macros, include_dirs) = \
|
||||
self._fix_compile_args(None, macros, include_dirs)
|
||||
pp_opts = gen_preprocess_options(macros, include_dirs)
|
||||
pp_args = ['cpp32.exe'] + pp_opts
|
||||
if output_file is not None:
|
||||
pp_args.append('-o' + output_file)
|
||||
if extra_preargs:
|
||||
pp_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
pp_args.extend(extra_postargs)
|
||||
pp_args.append(source)
|
||||
|
||||
# We need to preprocess: either we're being forced to, or the
|
||||
# source file is newer than the target (or the target doesn't
|
||||
# exist).
|
||||
if self.force or output_file is None or newer(source, output_file):
|
||||
if output_file:
|
||||
self.mkpath(os.path.dirname(output_file))
|
||||
try:
|
||||
self.spawn(pp_args)
|
||||
except DistutilsExecError as msg:
|
||||
print(msg)
|
||||
raise CompileError(msg)
|
||||
|
||||
# preprocess()
|
File diff suppressed because it is too large
Load Diff
403
utils/python-venv/Lib/site-packages/setuptools/_distutils/cmd.py
Normal file
403
utils/python-venv/Lib/site-packages/setuptools/_distutils/cmd.py
Normal file
@ -0,0 +1,403 @@
|
||||
"""distutils.cmd
|
||||
|
||||
Provides the Command class, the base class for the command classes
|
||||
in the distutils.command package.
|
||||
"""
|
||||
|
||||
import sys, os, re
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils import util, dir_util, file_util, archive_util, dep_util
|
||||
from distutils import log
|
||||
|
||||
class Command:
|
||||
"""Abstract base class for defining command classes, the "worker bees"
|
||||
of the Distutils. A useful analogy for command classes is to think of
|
||||
them as subroutines with local variables called "options". The options
|
||||
are "declared" in 'initialize_options()' and "defined" (given their
|
||||
final values, aka "finalized") in 'finalize_options()', both of which
|
||||
must be defined by every command class. The distinction between the
|
||||
two is necessary because option values might come from the outside
|
||||
world (command line, config file, ...), and any options dependent on
|
||||
other options must be computed *after* these outside influences have
|
||||
been processed -- hence 'finalize_options()'. The "body" of the
|
||||
subroutine, where it does all its work based on the values of its
|
||||
options, is the 'run()' method, which must also be implemented by every
|
||||
command class.
|
||||
"""
|
||||
|
||||
# 'sub_commands' formalizes the notion of a "family" of commands,
|
||||
# eg. "install" as the parent with sub-commands "install_lib",
|
||||
# "install_headers", etc. The parent of a family of commands
|
||||
# defines 'sub_commands' as a class attribute; it's a list of
|
||||
# (command_name : string, predicate : unbound_method | string | None)
|
||||
# tuples, where 'predicate' is a method of the parent command that
|
||||
# determines whether the corresponding command is applicable in the
|
||||
# current situation. (Eg. we "install_headers" is only applicable if
|
||||
# we have any C header files to install.) If 'predicate' is None,
|
||||
# that command is always applicable.
|
||||
#
|
||||
# 'sub_commands' is usually defined at the *end* of a class, because
|
||||
# predicates can be unbound methods, so they must already have been
|
||||
# defined. The canonical example is the "install" command.
|
||||
sub_commands = []
|
||||
|
||||
|
||||
# -- Creation/initialization methods -------------------------------
|
||||
|
||||
def __init__(self, dist):
|
||||
"""Create and initialize a new Command object. Most importantly,
|
||||
invokes the 'initialize_options()' method, which is the real
|
||||
initializer and depends on the actual command being
|
||||
instantiated.
|
||||
"""
|
||||
# late import because of mutual dependence between these classes
|
||||
from distutils.dist import Distribution
|
||||
|
||||
if not isinstance(dist, Distribution):
|
||||
raise TypeError("dist must be a Distribution instance")
|
||||
if self.__class__ is Command:
|
||||
raise RuntimeError("Command is an abstract class")
|
||||
|
||||
self.distribution = dist
|
||||
self.initialize_options()
|
||||
|
||||
# Per-command versions of the global flags, so that the user can
|
||||
# customize Distutils' behaviour command-by-command and let some
|
||||
# commands fall back on the Distribution's behaviour. None means
|
||||
# "not defined, check self.distribution's copy", while 0 or 1 mean
|
||||
# false and true (duh). Note that this means figuring out the real
|
||||
# value of each flag is a touch complicated -- hence "self._dry_run"
|
||||
# will be handled by __getattr__, below.
|
||||
# XXX This needs to be fixed.
|
||||
self._dry_run = None
|
||||
|
||||
# verbose is largely ignored, but needs to be set for
|
||||
# backwards compatibility (I think)?
|
||||
self.verbose = dist.verbose
|
||||
|
||||
# Some commands define a 'self.force' option to ignore file
|
||||
# timestamps, but methods defined *here* assume that
|
||||
# 'self.force' exists for all commands. So define it here
|
||||
# just to be safe.
|
||||
self.force = None
|
||||
|
||||
# The 'help' flag is just used for command-line parsing, so
|
||||
# none of that complicated bureaucracy is needed.
|
||||
self.help = 0
|
||||
|
||||
# 'finalized' records whether or not 'finalize_options()' has been
|
||||
# called. 'finalize_options()' itself should not pay attention to
|
||||
# this flag: it is the business of 'ensure_finalized()', which
|
||||
# always calls 'finalize_options()', to respect/update it.
|
||||
self.finalized = 0
|
||||
|
||||
# XXX A more explicit way to customize dry_run would be better.
|
||||
def __getattr__(self, attr):
|
||||
if attr == 'dry_run':
|
||||
myval = getattr(self, "_" + attr)
|
||||
if myval is None:
|
||||
return getattr(self.distribution, attr)
|
||||
else:
|
||||
return myval
|
||||
else:
|
||||
raise AttributeError(attr)
|
||||
|
||||
def ensure_finalized(self):
|
||||
if not self.finalized:
|
||||
self.finalize_options()
|
||||
self.finalized = 1
|
||||
|
||||
# Subclasses must define:
|
||||
# initialize_options()
|
||||
# provide default values for all options; may be customized by
|
||||
# setup script, by options from config file(s), or by command-line
|
||||
# options
|
||||
# finalize_options()
|
||||
# decide on the final values for all options; this is called
|
||||
# after all possible intervention from the outside world
|
||||
# (command-line, option file, etc.) has been processed
|
||||
# run()
|
||||
# run the command: do whatever it is we're here to do,
|
||||
# controlled by the command's various option values
|
||||
|
||||
def initialize_options(self):
|
||||
"""Set default values for all the options that this command
|
||||
supports. Note that these defaults may be overridden by other
|
||||
commands, by the setup script, by config files, or by the
|
||||
command-line. Thus, this is not the place to code dependencies
|
||||
between options; generally, 'initialize_options()' implementations
|
||||
are just a bunch of "self.foo = None" assignments.
|
||||
|
||||
This method must be implemented by all command classes.
|
||||
"""
|
||||
raise RuntimeError("abstract method -- subclass %s must override"
|
||||
% self.__class__)
|
||||
|
||||
def finalize_options(self):
|
||||
"""Set final values for all the options that this command supports.
|
||||
This is always called as late as possible, ie. after any option
|
||||
assignments from the command-line or from other commands have been
|
||||
done. Thus, this is the place to code option dependencies: if
|
||||
'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
|
||||
long as 'foo' still has the same value it was assigned in
|
||||
'initialize_options()'.
|
||||
|
||||
This method must be implemented by all command classes.
|
||||
"""
|
||||
raise RuntimeError("abstract method -- subclass %s must override"
|
||||
% self.__class__)
|
||||
|
||||
|
||||
def dump_options(self, header=None, indent=""):
|
||||
from distutils.fancy_getopt import longopt_xlate
|
||||
if header is None:
|
||||
header = "command options for '%s':" % self.get_command_name()
|
||||
self.announce(indent + header, level=log.INFO)
|
||||
indent = indent + " "
|
||||
for (option, _, _) in self.user_options:
|
||||
option = option.translate(longopt_xlate)
|
||||
if option[-1] == "=":
|
||||
option = option[:-1]
|
||||
value = getattr(self, option)
|
||||
self.announce(indent + "%s = %s" % (option, value),
|
||||
level=log.INFO)
|
||||
|
||||
def run(self):
|
||||
"""A command's raison d'etre: carry out the action it exists to
|
||||
perform, controlled by the options initialized in
|
||||
'initialize_options()', customized by other commands, the setup
|
||||
script, the command-line, and config files, and finalized in
|
||||
'finalize_options()'. All terminal output and filesystem
|
||||
interaction should be done by 'run()'.
|
||||
|
||||
This method must be implemented by all command classes.
|
||||
"""
|
||||
raise RuntimeError("abstract method -- subclass %s must override"
|
||||
% self.__class__)
|
||||
|
||||
def announce(self, msg, level=1):
|
||||
"""If the current verbosity level is of greater than or equal to
|
||||
'level' print 'msg' to stdout.
|
||||
"""
|
||||
log.log(level, msg)
|
||||
|
||||
def debug_print(self, msg):
|
||||
"""Print 'msg' to stdout if the global DEBUG (taken from the
|
||||
DISTUTILS_DEBUG environment variable) flag is true.
|
||||
"""
|
||||
from distutils.debug import DEBUG
|
||||
if DEBUG:
|
||||
print(msg)
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
# -- Option validation methods -------------------------------------
|
||||
# (these are very handy in writing the 'finalize_options()' method)
|
||||
#
|
||||
# NB. the general philosophy here is to ensure that a particular option
|
||||
# value meets certain type and value constraints. If not, we try to
|
||||
# force it into conformance (eg. if we expect a list but have a string,
|
||||
# split the string on comma and/or whitespace). If we can't force the
|
||||
# option into conformance, raise DistutilsOptionError. Thus, command
|
||||
# classes need do nothing more than (eg.)
|
||||
# self.ensure_string_list('foo')
|
||||
# and they can be guaranteed that thereafter, self.foo will be
|
||||
# a list of strings.
|
||||
|
||||
def _ensure_stringlike(self, option, what, default=None):
|
||||
val = getattr(self, option)
|
||||
if val is None:
|
||||
setattr(self, option, default)
|
||||
return default
|
||||
elif not isinstance(val, str):
|
||||
raise DistutilsOptionError("'%s' must be a %s (got `%s`)"
|
||||
% (option, what, val))
|
||||
return val
|
||||
|
||||
def ensure_string(self, option, default=None):
|
||||
"""Ensure that 'option' is a string; if not defined, set it to
|
||||
'default'.
|
||||
"""
|
||||
self._ensure_stringlike(option, "string", default)
|
||||
|
||||
def ensure_string_list(self, option):
|
||||
r"""Ensure that 'option' is a list of strings. If 'option' is
|
||||
currently a string, we split it either on /,\s*/ or /\s+/, so
|
||||
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
|
||||
["foo", "bar", "baz"].
|
||||
"""
|
||||
val = getattr(self, option)
|
||||
if val is None:
|
||||
return
|
||||
elif isinstance(val, str):
|
||||
setattr(self, option, re.split(r',\s*|\s+', val))
|
||||
else:
|
||||
if isinstance(val, list):
|
||||
ok = all(isinstance(v, str) for v in val)
|
||||
else:
|
||||
ok = False
|
||||
if not ok:
|
||||
raise DistutilsOptionError(
|
||||
"'%s' must be a list of strings (got %r)"
|
||||
% (option, val))
|
||||
|
||||
def _ensure_tested_string(self, option, tester, what, error_fmt,
|
||||
default=None):
|
||||
val = self._ensure_stringlike(option, what, default)
|
||||
if val is not None and not tester(val):
|
||||
raise DistutilsOptionError(("error in '%s' option: " + error_fmt)
|
||||
% (option, val))
|
||||
|
||||
def ensure_filename(self, option):
|
||||
"""Ensure that 'option' is the name of an existing file."""
|
||||
self._ensure_tested_string(option, os.path.isfile,
|
||||
"filename",
|
||||
"'%s' does not exist or is not a file")
|
||||
|
||||
def ensure_dirname(self, option):
|
||||
self._ensure_tested_string(option, os.path.isdir,
|
||||
"directory name",
|
||||
"'%s' does not exist or is not a directory")
|
||||
|
||||
|
||||
# -- Convenience methods for commands ------------------------------
|
||||
|
||||
def get_command_name(self):
|
||||
if hasattr(self, 'command_name'):
|
||||
return self.command_name
|
||||
else:
|
||||
return self.__class__.__name__
|
||||
|
||||
def set_undefined_options(self, src_cmd, *option_pairs):
|
||||
"""Set the values of any "undefined" options from corresponding
|
||||
option values in some other command object. "Undefined" here means
|
||||
"is None", which is the convention used to indicate that an option
|
||||
has not been changed between 'initialize_options()' and
|
||||
'finalize_options()'. Usually called from 'finalize_options()' for
|
||||
options that depend on some other command rather than another
|
||||
option of the same command. 'src_cmd' is the other command from
|
||||
which option values will be taken (a command object will be created
|
||||
for it if necessary); the remaining arguments are
|
||||
'(src_option,dst_option)' tuples which mean "take the value of
|
||||
'src_option' in the 'src_cmd' command object, and copy it to
|
||||
'dst_option' in the current command object".
|
||||
"""
|
||||
# Option_pairs: list of (src_option, dst_option) tuples
|
||||
src_cmd_obj = self.distribution.get_command_obj(src_cmd)
|
||||
src_cmd_obj.ensure_finalized()
|
||||
for (src_option, dst_option) in option_pairs:
|
||||
if getattr(self, dst_option) is None:
|
||||
setattr(self, dst_option, getattr(src_cmd_obj, src_option))
|
||||
|
||||
def get_finalized_command(self, command, create=1):
|
||||
"""Wrapper around Distribution's 'get_command_obj()' method: find
|
||||
(create if necessary and 'create' is true) the command object for
|
||||
'command', call its 'ensure_finalized()' method, and return the
|
||||
finalized command object.
|
||||
"""
|
||||
cmd_obj = self.distribution.get_command_obj(command, create)
|
||||
cmd_obj.ensure_finalized()
|
||||
return cmd_obj
|
||||
|
||||
# XXX rename to 'get_reinitialized_command()'? (should do the
|
||||
# same in dist.py, if so)
|
||||
def reinitialize_command(self, command, reinit_subcommands=0):
|
||||
return self.distribution.reinitialize_command(command,
|
||||
reinit_subcommands)
|
||||
|
||||
def run_command(self, command):
|
||||
"""Run some other command: uses the 'run_command()' method of
|
||||
Distribution, which creates and finalizes the command object if
|
||||
necessary and then invokes its 'run()' method.
|
||||
"""
|
||||
self.distribution.run_command(command)
|
||||
|
||||
def get_sub_commands(self):
|
||||
"""Determine the sub-commands that are relevant in the current
|
||||
distribution (ie., that need to be run). This is based on the
|
||||
'sub_commands' class attribute: each tuple in that list may include
|
||||
a method that we call to determine if the subcommand needs to be
|
||||
run for the current distribution. Return a list of command names.
|
||||
"""
|
||||
commands = []
|
||||
for (cmd_name, method) in self.sub_commands:
|
||||
if method is None or method(self):
|
||||
commands.append(cmd_name)
|
||||
return commands
|
||||
|
||||
|
||||
# -- External world manipulation -----------------------------------
|
||||
|
||||
def warn(self, msg):
|
||||
log.warn("warning: %s: %s\n", self.get_command_name(), msg)
|
||||
|
||||
def execute(self, func, args, msg=None, level=1):
|
||||
util.execute(func, args, msg, dry_run=self.dry_run)
|
||||
|
||||
def mkpath(self, name, mode=0o777):
|
||||
dir_util.mkpath(name, mode, dry_run=self.dry_run)
|
||||
|
||||
def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1,
|
||||
link=None, level=1):
|
||||
"""Copy a file respecting verbose, dry-run and force flags. (The
|
||||
former two default to whatever is in the Distribution object, and
|
||||
the latter defaults to false for commands that don't define it.)"""
|
||||
return file_util.copy_file(infile, outfile, preserve_mode,
|
||||
preserve_times, not self.force, link,
|
||||
dry_run=self.dry_run)
|
||||
|
||||
def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1,
|
||||
preserve_symlinks=0, level=1):
|
||||
"""Copy an entire directory tree respecting verbose, dry-run,
|
||||
and force flags.
|
||||
"""
|
||||
return dir_util.copy_tree(infile, outfile, preserve_mode,
|
||||
preserve_times, preserve_symlinks,
|
||||
not self.force, dry_run=self.dry_run)
|
||||
|
||||
def move_file (self, src, dst, level=1):
|
||||
"""Move a file respecting dry-run flag."""
|
||||
return file_util.move_file(src, dst, dry_run=self.dry_run)
|
||||
|
||||
def spawn(self, cmd, search_path=1, level=1):
|
||||
"""Spawn an external command respecting dry-run flag."""
|
||||
from distutils.spawn import spawn
|
||||
spawn(cmd, search_path, dry_run=self.dry_run)
|
||||
|
||||
def make_archive(self, base_name, format, root_dir=None, base_dir=None,
|
||||
owner=None, group=None):
|
||||
return archive_util.make_archive(base_name, format, root_dir, base_dir,
|
||||
dry_run=self.dry_run,
|
||||
owner=owner, group=group)
|
||||
|
||||
def make_file(self, infiles, outfile, func, args,
|
||||
exec_msg=None, skip_msg=None, level=1):
|
||||
"""Special case of 'execute()' for operations that process one or
|
||||
more input files and generate one output file. Works just like
|
||||
'execute()', except the operation is skipped and a different
|
||||
message printed if 'outfile' already exists and is newer than all
|
||||
files listed in 'infiles'. If the command defined 'self.force',
|
||||
and it is true, then the command is unconditionally run -- does no
|
||||
timestamp checks.
|
||||
"""
|
||||
if skip_msg is None:
|
||||
skip_msg = "skipping %s (inputs unchanged)" % outfile
|
||||
|
||||
# Allow 'infiles' to be a single string
|
||||
if isinstance(infiles, str):
|
||||
infiles = (infiles,)
|
||||
elif not isinstance(infiles, (list, tuple)):
|
||||
raise TypeError(
|
||||
"'infiles' must be a string, or a list or tuple of strings")
|
||||
|
||||
if exec_msg is None:
|
||||
exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles))
|
||||
|
||||
# If 'outfile' must be regenerated (either because it doesn't
|
||||
# exist, is out-of-date, or the 'force' flag is true) then
|
||||
# perform the action that presumably regenerates it
|
||||
if self.force or dep_util.newer_group(infiles, outfile):
|
||||
self.execute(func, args, exec_msg, level)
|
||||
# Otherwise, print the "skip" message
|
||||
else:
|
||||
log.debug(skip_msg)
|
@ -0,0 +1,31 @@
|
||||
"""distutils.command
|
||||
|
||||
Package containing implementation of all the standard Distutils
|
||||
commands."""
|
||||
|
||||
__all__ = ['build',
|
||||
'build_py',
|
||||
'build_ext',
|
||||
'build_clib',
|
||||
'build_scripts',
|
||||
'clean',
|
||||
'install',
|
||||
'install_lib',
|
||||
'install_headers',
|
||||
'install_scripts',
|
||||
'install_data',
|
||||
'sdist',
|
||||
'register',
|
||||
'bdist',
|
||||
'bdist_dumb',
|
||||
'bdist_rpm',
|
||||
'bdist_wininst',
|
||||
'check',
|
||||
'upload',
|
||||
# These two are reserved for future use:
|
||||
#'bdist_sdux',
|
||||
#'bdist_pkgtool',
|
||||
# Note:
|
||||
# bdist_packager is not included because it only provides
|
||||
# an abstract base class
|
||||
]
|
@ -0,0 +1,143 @@
|
||||
"""distutils.command.bdist
|
||||
|
||||
Implements the Distutils 'bdist' command (create a built [binary]
|
||||
distribution)."""
|
||||
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils.errors import *
|
||||
from distutils.util import get_platform
|
||||
|
||||
|
||||
def show_formats():
|
||||
"""Print list of available formats (arguments to "--format" option).
|
||||
"""
|
||||
from distutils.fancy_getopt import FancyGetopt
|
||||
formats = []
|
||||
for format in bdist.format_commands:
|
||||
formats.append(("formats=" + format, None,
|
||||
bdist.format_command[format][1]))
|
||||
pretty_printer = FancyGetopt(formats)
|
||||
pretty_printer.print_help("List of available distribution formats:")
|
||||
|
||||
|
||||
class bdist(Command):
|
||||
|
||||
description = "create a built (binary) distribution"
|
||||
|
||||
user_options = [('bdist-base=', 'b',
|
||||
"temporary directory for creating built distributions"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform()),
|
||||
('formats=', None,
|
||||
"formats for distribution (comma-separated list)"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in "
|
||||
"[default: dist]"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
('owner=', 'u',
|
||||
"Owner name used when creating a tar file"
|
||||
" [default: current user]"),
|
||||
('group=', 'g',
|
||||
"Group name used when creating a tar file"
|
||||
" [default: current group]"),
|
||||
]
|
||||
|
||||
boolean_options = ['skip-build']
|
||||
|
||||
help_options = [
|
||||
('help-formats', None,
|
||||
"lists available distribution formats", show_formats),
|
||||
]
|
||||
|
||||
# The following commands do not take a format option from bdist
|
||||
no_format_option = ('bdist_rpm',)
|
||||
|
||||
# This won't do in reality: will need to distinguish RPM-ish Linux,
|
||||
# Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
|
||||
default_format = {'posix': 'gztar',
|
||||
'nt': 'zip'}
|
||||
|
||||
# Establish the preferred order (for the --help-formats option).
|
||||
format_commands = ['rpm', 'gztar', 'bztar', 'xztar', 'ztar', 'tar',
|
||||
'wininst', 'zip', 'msi']
|
||||
|
||||
# And the real information.
|
||||
format_command = {'rpm': ('bdist_rpm', "RPM distribution"),
|
||||
'gztar': ('bdist_dumb', "gzip'ed tar file"),
|
||||
'bztar': ('bdist_dumb', "bzip2'ed tar file"),
|
||||
'xztar': ('bdist_dumb', "xz'ed tar file"),
|
||||
'ztar': ('bdist_dumb', "compressed tar file"),
|
||||
'tar': ('bdist_dumb', "tar file"),
|
||||
'wininst': ('bdist_wininst',
|
||||
"Windows executable installer"),
|
||||
'zip': ('bdist_dumb', "ZIP file"),
|
||||
'msi': ('bdist_msi', "Microsoft Installer")
|
||||
}
|
||||
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_base = None
|
||||
self.plat_name = None
|
||||
self.formats = None
|
||||
self.dist_dir = None
|
||||
self.skip_build = 0
|
||||
self.group = None
|
||||
self.owner = None
|
||||
|
||||
def finalize_options(self):
|
||||
# have to finalize 'plat_name' before 'bdist_base'
|
||||
if self.plat_name is None:
|
||||
if self.skip_build:
|
||||
self.plat_name = get_platform()
|
||||
else:
|
||||
self.plat_name = self.get_finalized_command('build').plat_name
|
||||
|
||||
# 'bdist_base' -- parent of per-built-distribution-format
|
||||
# temporary directories (eg. we'll probably have
|
||||
# "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
|
||||
if self.bdist_base is None:
|
||||
build_base = self.get_finalized_command('build').build_base
|
||||
self.bdist_base = os.path.join(build_base,
|
||||
'bdist.' + self.plat_name)
|
||||
|
||||
self.ensure_string_list('formats')
|
||||
if self.formats is None:
|
||||
try:
|
||||
self.formats = [self.default_format[os.name]]
|
||||
except KeyError:
|
||||
raise DistutilsPlatformError(
|
||||
"don't know how to create built distributions "
|
||||
"on platform %s" % os.name)
|
||||
|
||||
if self.dist_dir is None:
|
||||
self.dist_dir = "dist"
|
||||
|
||||
def run(self):
|
||||
# Figure out which sub-commands we need to run.
|
||||
commands = []
|
||||
for format in self.formats:
|
||||
try:
|
||||
commands.append(self.format_command[format][0])
|
||||
except KeyError:
|
||||
raise DistutilsOptionError("invalid format '%s'" % format)
|
||||
|
||||
# Reinitialize and run each command.
|
||||
for i in range(len(self.formats)):
|
||||
cmd_name = commands[i]
|
||||
sub_cmd = self.reinitialize_command(cmd_name)
|
||||
if cmd_name not in self.no_format_option:
|
||||
sub_cmd.format = self.formats[i]
|
||||
|
||||
# passing the owner and group names for tar archiving
|
||||
if cmd_name == 'bdist_dumb':
|
||||
sub_cmd.owner = self.owner
|
||||
sub_cmd.group = self.group
|
||||
|
||||
# If we're going to need to run this command again, tell it to
|
||||
# keep its temporary files around so subsequent runs go faster.
|
||||
if cmd_name in commands[i+1:]:
|
||||
sub_cmd.keep_temp = 1
|
||||
self.run_command(cmd_name)
|
@ -0,0 +1,123 @@
|
||||
"""distutils.command.bdist_dumb
|
||||
|
||||
Implements the Distutils 'bdist_dumb' command (create a "dumb" built
|
||||
distribution -- i.e., just an archive to be unpacked under $prefix or
|
||||
$exec_prefix)."""
|
||||
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils.util import get_platform
|
||||
from distutils.dir_util import remove_tree, ensure_relative
|
||||
from distutils.errors import *
|
||||
from distutils.sysconfig import get_python_version
|
||||
from distutils import log
|
||||
|
||||
class bdist_dumb(Command):
|
||||
|
||||
description = "create a \"dumb\" built distribution"
|
||||
|
||||
user_options = [('bdist-dir=', 'd',
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform()),
|
||||
('format=', 'f',
|
||||
"archive format to create (tar, gztar, bztar, xztar, "
|
||||
"ztar, zip)"),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
('relative', None,
|
||||
"build the archive using relative paths "
|
||||
"(default: false)"),
|
||||
('owner=', 'u',
|
||||
"Owner name used when creating a tar file"
|
||||
" [default: current user]"),
|
||||
('group=', 'g',
|
||||
"Group name used when creating a tar file"
|
||||
" [default: current group]"),
|
||||
]
|
||||
|
||||
boolean_options = ['keep-temp', 'skip-build', 'relative']
|
||||
|
||||
default_format = { 'posix': 'gztar',
|
||||
'nt': 'zip' }
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.format = None
|
||||
self.keep_temp = 0
|
||||
self.dist_dir = None
|
||||
self.skip_build = None
|
||||
self.relative = 0
|
||||
self.owner = None
|
||||
self.group = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'dumb')
|
||||
|
||||
if self.format is None:
|
||||
try:
|
||||
self.format = self.default_format[os.name]
|
||||
except KeyError:
|
||||
raise DistutilsPlatformError(
|
||||
"don't know how to create dumb built distributions "
|
||||
"on platform %s" % os.name)
|
||||
|
||||
self.set_undefined_options('bdist',
|
||||
('dist_dir', 'dist_dir'),
|
||||
('plat_name', 'plat_name'),
|
||||
('skip_build', 'skip_build'))
|
||||
|
||||
def run(self):
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
|
||||
install = self.reinitialize_command('install', reinit_subcommands=1)
|
||||
install.root = self.bdist_dir
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = 0
|
||||
|
||||
log.info("installing to %s", self.bdist_dir)
|
||||
self.run_command('install')
|
||||
|
||||
# And make an archive relative to the root of the
|
||||
# pseudo-installation tree.
|
||||
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
|
||||
self.plat_name)
|
||||
|
||||
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
|
||||
if not self.relative:
|
||||
archive_root = self.bdist_dir
|
||||
else:
|
||||
if (self.distribution.has_ext_modules() and
|
||||
(install.install_base != install.install_platbase)):
|
||||
raise DistutilsPlatformError(
|
||||
"can't make a dumb built distribution where "
|
||||
"base and platbase are different (%s, %s)"
|
||||
% (repr(install.install_base),
|
||||
repr(install.install_platbase)))
|
||||
else:
|
||||
archive_root = os.path.join(self.bdist_dir,
|
||||
ensure_relative(install.install_base))
|
||||
|
||||
# Make the archive
|
||||
filename = self.make_archive(pseudoinstall_root,
|
||||
self.format, root_dir=archive_root,
|
||||
owner=self.owner, group=self.group)
|
||||
if self.distribution.has_ext_modules():
|
||||
pyversion = get_python_version()
|
||||
else:
|
||||
pyversion = 'any'
|
||||
self.distribution.dist_files.append(('bdist_dumb', pyversion,
|
||||
filename))
|
||||
|
||||
if not self.keep_temp:
|
||||
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
@ -0,0 +1,749 @@
|
||||
# Copyright (C) 2005, 2006 Martin von Löwis
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
# The bdist_wininst command proper
|
||||
# based on bdist_wininst
|
||||
"""
|
||||
Implements the bdist_msi command.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from distutils.core import Command
|
||||
from distutils.dir_util import remove_tree
|
||||
from distutils.sysconfig import get_python_version
|
||||
from distutils.version import StrictVersion
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.util import get_platform
|
||||
from distutils import log
|
||||
import msilib
|
||||
from msilib import schema, sequence, text
|
||||
from msilib import Directory, Feature, Dialog, add_data
|
||||
|
||||
class PyDialog(Dialog):
|
||||
"""Dialog class with a fixed layout: controls at the top, then a ruler,
|
||||
then a list of buttons: back, next, cancel. Optionally a bitmap at the
|
||||
left."""
|
||||
def __init__(self, *args, **kw):
|
||||
"""Dialog(database, name, x, y, w, h, attributes, title, first,
|
||||
default, cancel, bitmap=true)"""
|
||||
Dialog.__init__(self, *args)
|
||||
ruler = self.h - 36
|
||||
bmwidth = 152*ruler/328
|
||||
#if kw.get("bitmap", True):
|
||||
# self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
|
||||
self.line("BottomLine", 0, ruler, self.w, 0)
|
||||
|
||||
def title(self, title):
|
||||
"Set the title text of the dialog at the top."
|
||||
# name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
|
||||
# text, in VerdanaBold10
|
||||
self.text("Title", 15, 10, 320, 60, 0x30003,
|
||||
r"{\VerdanaBold10}%s" % title)
|
||||
|
||||
def back(self, title, next, name = "Back", active = 1):
|
||||
"""Add a back button with a given title, the tab-next button,
|
||||
its name in the Control table, possibly initially disabled.
|
||||
|
||||
Return the button, so that events can be associated"""
|
||||
if active:
|
||||
flags = 3 # Visible|Enabled
|
||||
else:
|
||||
flags = 1 # Visible
|
||||
return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
|
||||
|
||||
def cancel(self, title, next, name = "Cancel", active = 1):
|
||||
"""Add a cancel button with a given title, the tab-next button,
|
||||
its name in the Control table, possibly initially disabled.
|
||||
|
||||
Return the button, so that events can be associated"""
|
||||
if active:
|
||||
flags = 3 # Visible|Enabled
|
||||
else:
|
||||
flags = 1 # Visible
|
||||
return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
|
||||
|
||||
def next(self, title, next, name = "Next", active = 1):
|
||||
"""Add a Next button with a given title, the tab-next button,
|
||||
its name in the Control table, possibly initially disabled.
|
||||
|
||||
Return the button, so that events can be associated"""
|
||||
if active:
|
||||
flags = 3 # Visible|Enabled
|
||||
else:
|
||||
flags = 1 # Visible
|
||||
return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
|
||||
|
||||
def xbutton(self, name, title, next, xpos):
|
||||
"""Add a button with a given title, the tab-next button,
|
||||
its name in the Control table, giving its x position; the
|
||||
y-position is aligned with the other buttons.
|
||||
|
||||
Return the button, so that events can be associated"""
|
||||
return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
|
||||
|
||||
class bdist_msi(Command):
|
||||
|
||||
description = "create a Microsoft Installer (.msi) binary distribution"
|
||||
|
||||
user_options = [('bdist-dir=', None,
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform()),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('target-version=', None,
|
||||
"require a specific python version" +
|
||||
" on the target system"),
|
||||
('no-target-compile', 'c',
|
||||
"do not compile .py to .pyc on the target system"),
|
||||
('no-target-optimize', 'o',
|
||||
"do not compile .py to .pyo (optimized) "
|
||||
"on the target system"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
('install-script=', None,
|
||||
"basename of installation script to be run after "
|
||||
"installation or before deinstallation"),
|
||||
('pre-install-script=', None,
|
||||
"Fully qualified filename of a script to be run before "
|
||||
"any files are installed. This script need not be in the "
|
||||
"distribution"),
|
||||
]
|
||||
|
||||
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
|
||||
'skip-build']
|
||||
|
||||
all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
|
||||
'2.5', '2.6', '2.7', '2.8', '2.9',
|
||||
'3.0', '3.1', '3.2', '3.3', '3.4',
|
||||
'3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
other_version = 'X'
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super().__init__(*args, **kw)
|
||||
warnings.warn("bdist_msi command is deprecated since Python 3.9, "
|
||||
"use bdist_wheel (wheel packages) instead",
|
||||
DeprecationWarning, 2)
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.keep_temp = 0
|
||||
self.no_target_compile = 0
|
||||
self.no_target_optimize = 0
|
||||
self.target_version = None
|
||||
self.dist_dir = None
|
||||
self.skip_build = None
|
||||
self.install_script = None
|
||||
self.pre_install_script = None
|
||||
self.versions = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
|
||||
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'msi')
|
||||
|
||||
short_version = get_python_version()
|
||||
if (not self.target_version) and self.distribution.has_ext_modules():
|
||||
self.target_version = short_version
|
||||
|
||||
if self.target_version:
|
||||
self.versions = [self.target_version]
|
||||
if not self.skip_build and self.distribution.has_ext_modules()\
|
||||
and self.target_version != short_version:
|
||||
raise DistutilsOptionError(
|
||||
"target version can only be %s, or the '--skip-build'"
|
||||
" option must be specified" % (short_version,))
|
||||
else:
|
||||
self.versions = list(self.all_versions)
|
||||
|
||||
self.set_undefined_options('bdist',
|
||||
('dist_dir', 'dist_dir'),
|
||||
('plat_name', 'plat_name'),
|
||||
)
|
||||
|
||||
if self.pre_install_script:
|
||||
raise DistutilsOptionError(
|
||||
"the pre-install-script feature is not yet implemented")
|
||||
|
||||
if self.install_script:
|
||||
for script in self.distribution.scripts:
|
||||
if self.install_script == os.path.basename(script):
|
||||
break
|
||||
else:
|
||||
raise DistutilsOptionError(
|
||||
"install_script '%s' not found in scripts"
|
||||
% self.install_script)
|
||||
self.install_script_key = None
|
||||
|
||||
def run(self):
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
|
||||
install = self.reinitialize_command('install', reinit_subcommands=1)
|
||||
install.prefix = self.bdist_dir
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = 0
|
||||
|
||||
install_lib = self.reinitialize_command('install_lib')
|
||||
# we do not want to include pyc or pyo files
|
||||
install_lib.compile = 0
|
||||
install_lib.optimize = 0
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
# If we are building an installer for a Python version other
|
||||
# than the one we are currently running, then we need to ensure
|
||||
# our build_lib reflects the other Python version rather than ours.
|
||||
# Note that for target_version!=sys.version, we must have skipped the
|
||||
# build step, so there is no issue with enforcing the build of this
|
||||
# version.
|
||||
target_version = self.target_version
|
||||
if not target_version:
|
||||
assert self.skip_build, "Should have already checked this"
|
||||
target_version = '%d.%d' % sys.version_info[:2]
|
||||
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
|
||||
build = self.get_finalized_command('build')
|
||||
build.build_lib = os.path.join(build.build_base,
|
||||
'lib' + plat_specifier)
|
||||
|
||||
log.info("installing to %s", self.bdist_dir)
|
||||
install.ensure_finalized()
|
||||
|
||||
# avoid warning of 'install_lib' about installing
|
||||
# into a directory not in sys.path
|
||||
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
|
||||
|
||||
install.run()
|
||||
|
||||
del sys.path[0]
|
||||
|
||||
self.mkpath(self.dist_dir)
|
||||
fullname = self.distribution.get_fullname()
|
||||
installer_name = self.get_installer_filename(fullname)
|
||||
installer_name = os.path.abspath(installer_name)
|
||||
if os.path.exists(installer_name): os.unlink(installer_name)
|
||||
|
||||
metadata = self.distribution.metadata
|
||||
author = metadata.author
|
||||
if not author:
|
||||
author = metadata.maintainer
|
||||
if not author:
|
||||
author = "UNKNOWN"
|
||||
version = metadata.get_version()
|
||||
# ProductVersion must be strictly numeric
|
||||
# XXX need to deal with prerelease versions
|
||||
sversion = "%d.%d.%d" % StrictVersion(version).version
|
||||
# Prefix ProductName with Python x.y, so that
|
||||
# it sorts together with the other Python packages
|
||||
# in Add-Remove-Programs (APR)
|
||||
fullname = self.distribution.get_fullname()
|
||||
if self.target_version:
|
||||
product_name = "Python %s %s" % (self.target_version, fullname)
|
||||
else:
|
||||
product_name = "Python %s" % (fullname)
|
||||
self.db = msilib.init_database(installer_name, schema,
|
||||
product_name, msilib.gen_uuid(),
|
||||
sversion, author)
|
||||
msilib.add_tables(self.db, sequence)
|
||||
props = [('DistVersion', version)]
|
||||
email = metadata.author_email or metadata.maintainer_email
|
||||
if email:
|
||||
props.append(("ARPCONTACT", email))
|
||||
if metadata.url:
|
||||
props.append(("ARPURLINFOABOUT", metadata.url))
|
||||
if props:
|
||||
add_data(self.db, 'Property', props)
|
||||
|
||||
self.add_find_python()
|
||||
self.add_files()
|
||||
self.add_scripts()
|
||||
self.add_ui()
|
||||
self.db.Commit()
|
||||
|
||||
if hasattr(self.distribution, 'dist_files'):
|
||||
tup = 'bdist_msi', self.target_version or 'any', fullname
|
||||
self.distribution.dist_files.append(tup)
|
||||
|
||||
if not self.keep_temp:
|
||||
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
||||
|
||||
def add_files(self):
|
||||
db = self.db
|
||||
cab = msilib.CAB("distfiles")
|
||||
rootdir = os.path.abspath(self.bdist_dir)
|
||||
|
||||
root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
|
||||
f = Feature(db, "Python", "Python", "Everything",
|
||||
0, 1, directory="TARGETDIR")
|
||||
|
||||
items = [(f, root, '')]
|
||||
for version in self.versions + [self.other_version]:
|
||||
target = "TARGETDIR" + version
|
||||
name = default = "Python" + version
|
||||
desc = "Everything"
|
||||
if version is self.other_version:
|
||||
title = "Python from another location"
|
||||
level = 2
|
||||
else:
|
||||
title = "Python %s from registry" % version
|
||||
level = 1
|
||||
f = Feature(db, name, title, desc, 1, level, directory=target)
|
||||
dir = Directory(db, cab, root, rootdir, target, default)
|
||||
items.append((f, dir, version))
|
||||
db.Commit()
|
||||
|
||||
seen = {}
|
||||
for feature, dir, version in items:
|
||||
todo = [dir]
|
||||
while todo:
|
||||
dir = todo.pop()
|
||||
for file in os.listdir(dir.absolute):
|
||||
afile = os.path.join(dir.absolute, file)
|
||||
if os.path.isdir(afile):
|
||||
short = "%s|%s" % (dir.make_short(file), file)
|
||||
default = file + version
|
||||
newdir = Directory(db, cab, dir, file, default, short)
|
||||
todo.append(newdir)
|
||||
else:
|
||||
if not dir.component:
|
||||
dir.start_component(dir.logical, feature, 0)
|
||||
if afile not in seen:
|
||||
key = seen[afile] = dir.add_file(file)
|
||||
if file==self.install_script:
|
||||
if self.install_script_key:
|
||||
raise DistutilsOptionError(
|
||||
"Multiple files with name %s" % file)
|
||||
self.install_script_key = '[#%s]' % key
|
||||
else:
|
||||
key = seen[afile]
|
||||
add_data(self.db, "DuplicateFile",
|
||||
[(key + version, dir.component, key, None, dir.logical)])
|
||||
db.Commit()
|
||||
cab.commit(db)
|
||||
|
||||
def add_find_python(self):
|
||||
"""Adds code to the installer to compute the location of Python.
|
||||
|
||||
Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
|
||||
registry for each version of Python.
|
||||
|
||||
Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
|
||||
else from PYTHON.MACHINE.X.Y.
|
||||
|
||||
Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
|
||||
|
||||
start = 402
|
||||
for ver in self.versions:
|
||||
install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
|
||||
machine_reg = "python.machine." + ver
|
||||
user_reg = "python.user." + ver
|
||||
machine_prop = "PYTHON.MACHINE." + ver
|
||||
user_prop = "PYTHON.USER." + ver
|
||||
machine_action = "PythonFromMachine" + ver
|
||||
user_action = "PythonFromUser" + ver
|
||||
exe_action = "PythonExe" + ver
|
||||
target_dir_prop = "TARGETDIR" + ver
|
||||
exe_prop = "PYTHON" + ver
|
||||
if msilib.Win64:
|
||||
# type: msidbLocatorTypeRawValue + msidbLocatorType64bit
|
||||
Type = 2+16
|
||||
else:
|
||||
Type = 2
|
||||
add_data(self.db, "RegLocator",
|
||||
[(machine_reg, 2, install_path, None, Type),
|
||||
(user_reg, 1, install_path, None, Type)])
|
||||
add_data(self.db, "AppSearch",
|
||||
[(machine_prop, machine_reg),
|
||||
(user_prop, user_reg)])
|
||||
add_data(self.db, "CustomAction",
|
||||
[(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
|
||||
(user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
|
||||
(exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
|
||||
])
|
||||
add_data(self.db, "InstallExecuteSequence",
|
||||
[(machine_action, machine_prop, start),
|
||||
(user_action, user_prop, start + 1),
|
||||
(exe_action, None, start + 2),
|
||||
])
|
||||
add_data(self.db, "InstallUISequence",
|
||||
[(machine_action, machine_prop, start),
|
||||
(user_action, user_prop, start + 1),
|
||||
(exe_action, None, start + 2),
|
||||
])
|
||||
add_data(self.db, "Condition",
|
||||
[("Python" + ver, 0, "NOT TARGETDIR" + ver)])
|
||||
start += 4
|
||||
assert start < 500
|
||||
|
||||
def add_scripts(self):
|
||||
if self.install_script:
|
||||
start = 6800
|
||||
for ver in self.versions + [self.other_version]:
|
||||
install_action = "install_script." + ver
|
||||
exe_prop = "PYTHON" + ver
|
||||
add_data(self.db, "CustomAction",
|
||||
[(install_action, 50, exe_prop, self.install_script_key)])
|
||||
add_data(self.db, "InstallExecuteSequence",
|
||||
[(install_action, "&Python%s=3" % ver, start)])
|
||||
start += 1
|
||||
# XXX pre-install scripts are currently refused in finalize_options()
|
||||
# but if this feature is completed, it will also need to add
|
||||
# entries for each version as the above code does
|
||||
if self.pre_install_script:
|
||||
scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
|
||||
with open(scriptfn, "w") as f:
|
||||
# The batch file will be executed with [PYTHON], so that %1
|
||||
# is the path to the Python interpreter; %0 will be the path
|
||||
# of the batch file.
|
||||
# rem ="""
|
||||
# %1 %0
|
||||
# exit
|
||||
# """
|
||||
# <actual script>
|
||||
f.write('rem ="""\n%1 %0\nexit\n"""\n')
|
||||
with open(self.pre_install_script) as fin:
|
||||
f.write(fin.read())
|
||||
add_data(self.db, "Binary",
|
||||
[("PreInstall", msilib.Binary(scriptfn))
|
||||
])
|
||||
add_data(self.db, "CustomAction",
|
||||
[("PreInstall", 2, "PreInstall", None)
|
||||
])
|
||||
add_data(self.db, "InstallExecuteSequence",
|
||||
[("PreInstall", "NOT Installed", 450)])
|
||||
|
||||
|
||||
def add_ui(self):
|
||||
db = self.db
|
||||
x = y = 50
|
||||
w = 370
|
||||
h = 300
|
||||
title = "[ProductName] Setup"
|
||||
|
||||
# see "Dialog Style Bits"
|
||||
modal = 3 # visible | modal
|
||||
modeless = 1 # visible
|
||||
track_disk_space = 32
|
||||
|
||||
# UI customization properties
|
||||
add_data(db, "Property",
|
||||
# See "DefaultUIFont Property"
|
||||
[("DefaultUIFont", "DlgFont8"),
|
||||
# See "ErrorDialog Style Bit"
|
||||
("ErrorDialog", "ErrorDlg"),
|
||||
("Progress1", "Install"), # modified in maintenance type dlg
|
||||
("Progress2", "installs"),
|
||||
("MaintenanceForm_Action", "Repair"),
|
||||
# possible values: ALL, JUSTME
|
||||
("WhichUsers", "ALL")
|
||||
])
|
||||
|
||||
# Fonts, see "TextStyle Table"
|
||||
add_data(db, "TextStyle",
|
||||
[("DlgFont8", "Tahoma", 9, None, 0),
|
||||
("DlgFontBold8", "Tahoma", 8, None, 1), #bold
|
||||
("VerdanaBold10", "Verdana", 10, None, 1),
|
||||
("VerdanaRed9", "Verdana", 9, 255, 0),
|
||||
])
|
||||
|
||||
# UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
|
||||
# Numbers indicate sequence; see sequence.py for how these action integrate
|
||||
add_data(db, "InstallUISequence",
|
||||
[("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
|
||||
("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
|
||||
# In the user interface, assume all-users installation if privileged.
|
||||
("SelectFeaturesDlg", "Not Installed", 1230),
|
||||
# XXX no support for resume installations yet
|
||||
#("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
|
||||
("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
|
||||
("ProgressDlg", None, 1280)])
|
||||
|
||||
add_data(db, 'ActionText', text.ActionText)
|
||||
add_data(db, 'UIText', text.UIText)
|
||||
#####################################################################
|
||||
# Standard dialogs: FatalError, UserExit, ExitDialog
|
||||
fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
|
||||
"Finish", "Finish", "Finish")
|
||||
fatal.title("[ProductName] Installer ended prematurely")
|
||||
fatal.back("< Back", "Finish", active = 0)
|
||||
fatal.cancel("Cancel", "Back", active = 0)
|
||||
fatal.text("Description1", 15, 70, 320, 80, 0x30003,
|
||||
"[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.")
|
||||
fatal.text("Description2", 15, 155, 320, 20, 0x30003,
|
||||
"Click the Finish button to exit the Installer.")
|
||||
c=fatal.next("Finish", "Cancel", name="Finish")
|
||||
c.event("EndDialog", "Exit")
|
||||
|
||||
user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
|
||||
"Finish", "Finish", "Finish")
|
||||
user_exit.title("[ProductName] Installer was interrupted")
|
||||
user_exit.back("< Back", "Finish", active = 0)
|
||||
user_exit.cancel("Cancel", "Back", active = 0)
|
||||
user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
|
||||
"[ProductName] setup was interrupted. Your system has not been modified. "
|
||||
"To install this program at a later time, please run the installation again.")
|
||||
user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
|
||||
"Click the Finish button to exit the Installer.")
|
||||
c = user_exit.next("Finish", "Cancel", name="Finish")
|
||||
c.event("EndDialog", "Exit")
|
||||
|
||||
exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
|
||||
"Finish", "Finish", "Finish")
|
||||
exit_dialog.title("Completing the [ProductName] Installer")
|
||||
exit_dialog.back("< Back", "Finish", active = 0)
|
||||
exit_dialog.cancel("Cancel", "Back", active = 0)
|
||||
exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
|
||||
"Click the Finish button to exit the Installer.")
|
||||
c = exit_dialog.next("Finish", "Cancel", name="Finish")
|
||||
c.event("EndDialog", "Return")
|
||||
|
||||
#####################################################################
|
||||
# Required dialog: FilesInUse, ErrorDlg
|
||||
inuse = PyDialog(db, "FilesInUse",
|
||||
x, y, w, h,
|
||||
19, # KeepModeless|Modal|Visible
|
||||
title,
|
||||
"Retry", "Retry", "Retry", bitmap=False)
|
||||
inuse.text("Title", 15, 6, 200, 15, 0x30003,
|
||||
r"{\DlgFontBold8}Files in Use")
|
||||
inuse.text("Description", 20, 23, 280, 20, 0x30003,
|
||||
"Some files that need to be updated are currently in use.")
|
||||
inuse.text("Text", 20, 55, 330, 50, 3,
|
||||
"The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
|
||||
inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
|
||||
None, None, None)
|
||||
c=inuse.back("Exit", "Ignore", name="Exit")
|
||||
c.event("EndDialog", "Exit")
|
||||
c=inuse.next("Ignore", "Retry", name="Ignore")
|
||||
c.event("EndDialog", "Ignore")
|
||||
c=inuse.cancel("Retry", "Exit", name="Retry")
|
||||
c.event("EndDialog","Retry")
|
||||
|
||||
# See "Error Dialog". See "ICE20" for the required names of the controls.
|
||||
error = Dialog(db, "ErrorDlg",
|
||||
50, 10, 330, 101,
|
||||
65543, # Error|Minimize|Modal|Visible
|
||||
title,
|
||||
"ErrorText", None, None)
|
||||
error.text("ErrorText", 50,9,280,48,3, "")
|
||||
#error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
|
||||
error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
|
||||
error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
|
||||
error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
|
||||
error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
|
||||
error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
|
||||
error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
|
||||
error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
|
||||
|
||||
#####################################################################
|
||||
# Global "Query Cancel" dialog
|
||||
cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
|
||||
"No", "No", "No")
|
||||
cancel.text("Text", 48, 15, 194, 30, 3,
|
||||
"Are you sure you want to cancel [ProductName] installation?")
|
||||
#cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
|
||||
# "py.ico", None, None)
|
||||
c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
|
||||
c.event("EndDialog", "Exit")
|
||||
|
||||
c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
|
||||
c.event("EndDialog", "Return")
|
||||
|
||||
#####################################################################
|
||||
# Global "Wait for costing" dialog
|
||||
costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
|
||||
"Return", "Return", "Return")
|
||||
costing.text("Text", 48, 15, 194, 30, 3,
|
||||
"Please wait while the installer finishes determining your disk space requirements.")
|
||||
c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
|
||||
c.event("EndDialog", "Exit")
|
||||
|
||||
#####################################################################
|
||||
# Preparation dialog: no user input except cancellation
|
||||
prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
|
||||
"Cancel", "Cancel", "Cancel")
|
||||
prep.text("Description", 15, 70, 320, 40, 0x30003,
|
||||
"Please wait while the Installer prepares to guide you through the installation.")
|
||||
prep.title("Welcome to the [ProductName] Installer")
|
||||
c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
|
||||
c.mapping("ActionText", "Text")
|
||||
c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
|
||||
c.mapping("ActionData", "Text")
|
||||
prep.back("Back", None, active=0)
|
||||
prep.next("Next", None, active=0)
|
||||
c=prep.cancel("Cancel", None)
|
||||
c.event("SpawnDialog", "CancelDlg")
|
||||
|
||||
#####################################################################
|
||||
# Feature (Python directory) selection
|
||||
seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
|
||||
"Next", "Next", "Cancel")
|
||||
seldlg.title("Select Python Installations")
|
||||
|
||||
seldlg.text("Hint", 15, 30, 300, 20, 3,
|
||||
"Select the Python locations where %s should be installed."
|
||||
% self.distribution.get_fullname())
|
||||
|
||||
seldlg.back("< Back", None, active=0)
|
||||
c = seldlg.next("Next >", "Cancel")
|
||||
order = 1
|
||||
c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
|
||||
for version in self.versions + [self.other_version]:
|
||||
order += 1
|
||||
c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
|
||||
"FEATURE_SELECTED AND &Python%s=3" % version,
|
||||
ordering=order)
|
||||
c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
|
||||
c.event("EndDialog", "Return", ordering=order + 2)
|
||||
c = seldlg.cancel("Cancel", "Features")
|
||||
c.event("SpawnDialog", "CancelDlg")
|
||||
|
||||
c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
|
||||
"FEATURE", None, "PathEdit", None)
|
||||
c.event("[FEATURE_SELECTED]", "1")
|
||||
ver = self.other_version
|
||||
install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
|
||||
dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
|
||||
|
||||
c = seldlg.text("Other", 15, 200, 300, 15, 3,
|
||||
"Provide an alternate Python location")
|
||||
c.condition("Enable", install_other_cond)
|
||||
c.condition("Show", install_other_cond)
|
||||
c.condition("Disable", dont_install_other_cond)
|
||||
c.condition("Hide", dont_install_other_cond)
|
||||
|
||||
c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
|
||||
"TARGETDIR" + ver, None, "Next", None)
|
||||
c.condition("Enable", install_other_cond)
|
||||
c.condition("Show", install_other_cond)
|
||||
c.condition("Disable", dont_install_other_cond)
|
||||
c.condition("Hide", dont_install_other_cond)
|
||||
|
||||
#####################################################################
|
||||
# Disk cost
|
||||
cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
|
||||
"OK", "OK", "OK", bitmap=False)
|
||||
cost.text("Title", 15, 6, 200, 15, 0x30003,
|
||||
r"{\DlgFontBold8}Disk Space Requirements")
|
||||
cost.text("Description", 20, 20, 280, 20, 0x30003,
|
||||
"The disk space required for the installation of the selected features.")
|
||||
cost.text("Text", 20, 53, 330, 60, 3,
|
||||
"The highlighted volumes (if any) do not have enough disk space "
|
||||
"available for the currently selected features. You can either "
|
||||
"remove some files from the highlighted volumes, or choose to "
|
||||
"install less features onto local drive(s), or select different "
|
||||
"destination drive(s).")
|
||||
cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
|
||||
None, "{120}{70}{70}{70}{70}", None, None)
|
||||
cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
|
||||
|
||||
#####################################################################
|
||||
# WhichUsers Dialog. Only available on NT, and for privileged users.
|
||||
# This must be run before FindRelatedProducts, because that will
|
||||
# take into account whether the previous installation was per-user
|
||||
# or per-machine. We currently don't support going back to this
|
||||
# dialog after "Next" was selected; to support this, we would need to
|
||||
# find how to reset the ALLUSERS property, and how to re-run
|
||||
# FindRelatedProducts.
|
||||
# On Windows9x, the ALLUSERS property is ignored on the command line
|
||||
# and in the Property table, but installer fails according to the documentation
|
||||
# if a dialog attempts to set ALLUSERS.
|
||||
whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
|
||||
"AdminInstall", "Next", "Cancel")
|
||||
whichusers.title("Select whether to install [ProductName] for all users of this computer.")
|
||||
# A radio group with two options: allusers, justme
|
||||
g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
|
||||
"WhichUsers", "", "Next")
|
||||
g.add("ALL", 0, 5, 150, 20, "Install for all users")
|
||||
g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
|
||||
|
||||
whichusers.back("Back", None, active=0)
|
||||
|
||||
c = whichusers.next("Next >", "Cancel")
|
||||
c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
|
||||
c.event("EndDialog", "Return", ordering = 2)
|
||||
|
||||
c = whichusers.cancel("Cancel", "AdminInstall")
|
||||
c.event("SpawnDialog", "CancelDlg")
|
||||
|
||||
#####################################################################
|
||||
# Installation Progress dialog (modeless)
|
||||
progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
|
||||
"Cancel", "Cancel", "Cancel", bitmap=False)
|
||||
progress.text("Title", 20, 15, 200, 15, 0x30003,
|
||||
r"{\DlgFontBold8}[Progress1] [ProductName]")
|
||||
progress.text("Text", 35, 65, 300, 30, 3,
|
||||
"Please wait while the Installer [Progress2] [ProductName]. "
|
||||
"This may take several minutes.")
|
||||
progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
|
||||
|
||||
c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
|
||||
c.mapping("ActionText", "Text")
|
||||
|
||||
#c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
|
||||
#c.mapping("ActionData", "Text")
|
||||
|
||||
c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
|
||||
None, "Progress done", None, None)
|
||||
c.mapping("SetProgress", "Progress")
|
||||
|
||||
progress.back("< Back", "Next", active=False)
|
||||
progress.next("Next >", "Cancel", active=False)
|
||||
progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
|
||||
|
||||
###################################################################
|
||||
# Maintenance type: repair/uninstall
|
||||
maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
|
||||
"Next", "Next", "Cancel")
|
||||
maint.title("Welcome to the [ProductName] Setup Wizard")
|
||||
maint.text("BodyText", 15, 63, 330, 42, 3,
|
||||
"Select whether you want to repair or remove [ProductName].")
|
||||
g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
|
||||
"MaintenanceForm_Action", "", "Next")
|
||||
#g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
|
||||
g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
|
||||
g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
|
||||
|
||||
maint.back("< Back", None, active=False)
|
||||
c=maint.next("Finish", "Cancel")
|
||||
# Change installation: Change progress dialog to "Change", then ask
|
||||
# for feature selection
|
||||
#c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
|
||||
#c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
|
||||
|
||||
# Reinstall: Change progress dialog to "Repair", then invoke reinstall
|
||||
# Also set list of reinstalled features to "ALL"
|
||||
c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
|
||||
c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
|
||||
c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
|
||||
c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
|
||||
|
||||
# Uninstall: Change progress to "Remove", then invoke uninstall
|
||||
# Also set list of removed features to "ALL"
|
||||
c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
|
||||
c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
|
||||
c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
|
||||
c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
|
||||
|
||||
# Close dialog when maintenance action scheduled
|
||||
c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
|
||||
#c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
|
||||
|
||||
maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
|
||||
|
||||
def get_installer_filename(self, fullname):
|
||||
# Factored out to allow overriding in subclasses
|
||||
if self.target_version:
|
||||
base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
|
||||
self.target_version)
|
||||
else:
|
||||
base_name = "%s.%s.msi" % (fullname, self.plat_name)
|
||||
installer_name = os.path.join(self.dist_dir, base_name)
|
||||
return installer_name
|
@ -0,0 +1,579 @@
|
||||
"""distutils.command.bdist_rpm
|
||||
|
||||
Implements the Distutils 'bdist_rpm' command (create RPM source and binary
|
||||
distributions)."""
|
||||
|
||||
import subprocess, sys, os
|
||||
from distutils.core import Command
|
||||
from distutils.debug import DEBUG
|
||||
from distutils.file_util import write_file
|
||||
from distutils.errors import *
|
||||
from distutils.sysconfig import get_python_version
|
||||
from distutils import log
|
||||
|
||||
class bdist_rpm(Command):
|
||||
|
||||
description = "create an RPM distribution"
|
||||
|
||||
user_options = [
|
||||
('bdist-base=', None,
|
||||
"base directory for creating built distributions"),
|
||||
('rpm-base=', None,
|
||||
"base directory for creating RPMs (defaults to \"rpm\" under "
|
||||
"--bdist-base; must be specified for RPM 2)"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final RPM files in "
|
||||
"(and .spec files if --spec-only)"),
|
||||
('python=', None,
|
||||
"path to Python interpreter to hard-code in the .spec file "
|
||||
"(default: \"python\")"),
|
||||
('fix-python', None,
|
||||
"hard-code the exact path to the current Python interpreter in "
|
||||
"the .spec file"),
|
||||
('spec-only', None,
|
||||
"only regenerate spec file"),
|
||||
('source-only', None,
|
||||
"only generate source RPM"),
|
||||
('binary-only', None,
|
||||
"only generate binary RPM"),
|
||||
('use-bzip2', None,
|
||||
"use bzip2 instead of gzip to create source distribution"),
|
||||
|
||||
# More meta-data: too RPM-specific to put in the setup script,
|
||||
# but needs to go in the .spec file -- so we make these options
|
||||
# to "bdist_rpm". The idea is that packagers would put this
|
||||
# info in setup.cfg, although they are of course free to
|
||||
# supply it on the command line.
|
||||
('distribution-name=', None,
|
||||
"name of the (Linux) distribution to which this "
|
||||
"RPM applies (*not* the name of the module distribution!)"),
|
||||
('group=', None,
|
||||
"package classification [default: \"Development/Libraries\"]"),
|
||||
('release=', None,
|
||||
"RPM release number"),
|
||||
('serial=', None,
|
||||
"RPM serial number"),
|
||||
('vendor=', None,
|
||||
"RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") "
|
||||
"[default: maintainer or author from setup script]"),
|
||||
('packager=', None,
|
||||
"RPM packager (eg. \"Jane Doe <jane@example.net>\") "
|
||||
"[default: vendor]"),
|
||||
('doc-files=', None,
|
||||
"list of documentation files (space or comma-separated)"),
|
||||
('changelog=', None,
|
||||
"RPM changelog"),
|
||||
('icon=', None,
|
||||
"name of icon file"),
|
||||
('provides=', None,
|
||||
"capabilities provided by this package"),
|
||||
('requires=', None,
|
||||
"capabilities required by this package"),
|
||||
('conflicts=', None,
|
||||
"capabilities which conflict with this package"),
|
||||
('build-requires=', None,
|
||||
"capabilities required to build this package"),
|
||||
('obsoletes=', None,
|
||||
"capabilities made obsolete by this package"),
|
||||
('no-autoreq', None,
|
||||
"do not automatically calculate dependencies"),
|
||||
|
||||
# Actions to take when building RPM
|
||||
('keep-temp', 'k',
|
||||
"don't clean up RPM build directory"),
|
||||
('no-keep-temp', None,
|
||||
"clean up RPM build directory [default]"),
|
||||
('use-rpm-opt-flags', None,
|
||||
"compile with RPM_OPT_FLAGS when building from source RPM"),
|
||||
('no-rpm-opt-flags', None,
|
||||
"do not pass any RPM CFLAGS to compiler"),
|
||||
('rpm3-mode', None,
|
||||
"RPM 3 compatibility mode (default)"),
|
||||
('rpm2-mode', None,
|
||||
"RPM 2 compatibility mode"),
|
||||
|
||||
# Add the hooks necessary for specifying custom scripts
|
||||
('prep-script=', None,
|
||||
"Specify a script for the PREP phase of RPM building"),
|
||||
('build-script=', None,
|
||||
"Specify a script for the BUILD phase of RPM building"),
|
||||
|
||||
('pre-install=', None,
|
||||
"Specify a script for the pre-INSTALL phase of RPM building"),
|
||||
('install-script=', None,
|
||||
"Specify a script for the INSTALL phase of RPM building"),
|
||||
('post-install=', None,
|
||||
"Specify a script for the post-INSTALL phase of RPM building"),
|
||||
|
||||
('pre-uninstall=', None,
|
||||
"Specify a script for the pre-UNINSTALL phase of RPM building"),
|
||||
('post-uninstall=', None,
|
||||
"Specify a script for the post-UNINSTALL phase of RPM building"),
|
||||
|
||||
('clean-script=', None,
|
||||
"Specify a script for the CLEAN phase of RPM building"),
|
||||
|
||||
('verify-script=', None,
|
||||
"Specify a script for the VERIFY phase of the RPM build"),
|
||||
|
||||
# Allow a packager to explicitly force an architecture
|
||||
('force-arch=', None,
|
||||
"Force an architecture onto the RPM build process"),
|
||||
|
||||
('quiet', 'q',
|
||||
"Run the INSTALL phase of RPM building in quiet mode"),
|
||||
]
|
||||
|
||||
boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode',
|
||||
'no-autoreq', 'quiet']
|
||||
|
||||
negative_opt = {'no-keep-temp': 'keep-temp',
|
||||
'no-rpm-opt-flags': 'use-rpm-opt-flags',
|
||||
'rpm2-mode': 'rpm3-mode'}
|
||||
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_base = None
|
||||
self.rpm_base = None
|
||||
self.dist_dir = None
|
||||
self.python = None
|
||||
self.fix_python = None
|
||||
self.spec_only = None
|
||||
self.binary_only = None
|
||||
self.source_only = None
|
||||
self.use_bzip2 = None
|
||||
|
||||
self.distribution_name = None
|
||||
self.group = None
|
||||
self.release = None
|
||||
self.serial = None
|
||||
self.vendor = None
|
||||
self.packager = None
|
||||
self.doc_files = None
|
||||
self.changelog = None
|
||||
self.icon = None
|
||||
|
||||
self.prep_script = None
|
||||
self.build_script = None
|
||||
self.install_script = None
|
||||
self.clean_script = None
|
||||
self.verify_script = None
|
||||
self.pre_install = None
|
||||
self.post_install = None
|
||||
self.pre_uninstall = None
|
||||
self.post_uninstall = None
|
||||
self.prep = None
|
||||
self.provides = None
|
||||
self.requires = None
|
||||
self.conflicts = None
|
||||
self.build_requires = None
|
||||
self.obsoletes = None
|
||||
|
||||
self.keep_temp = 0
|
||||
self.use_rpm_opt_flags = 1
|
||||
self.rpm3_mode = 1
|
||||
self.no_autoreq = 0
|
||||
|
||||
self.force_arch = None
|
||||
self.quiet = 0
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
|
||||
if self.rpm_base is None:
|
||||
if not self.rpm3_mode:
|
||||
raise DistutilsOptionError(
|
||||
"you must specify --rpm-base in RPM 2 mode")
|
||||
self.rpm_base = os.path.join(self.bdist_base, "rpm")
|
||||
|
||||
if self.python is None:
|
||||
if self.fix_python:
|
||||
self.python = sys.executable
|
||||
else:
|
||||
self.python = "python3"
|
||||
elif self.fix_python:
|
||||
raise DistutilsOptionError(
|
||||
"--python and --fix-python are mutually exclusive options")
|
||||
|
||||
if os.name != 'posix':
|
||||
raise DistutilsPlatformError("don't know how to create RPM "
|
||||
"distributions on platform %s" % os.name)
|
||||
if self.binary_only and self.source_only:
|
||||
raise DistutilsOptionError(
|
||||
"cannot supply both '--source-only' and '--binary-only'")
|
||||
|
||||
# don't pass CFLAGS to pure python distributions
|
||||
if not self.distribution.has_ext_modules():
|
||||
self.use_rpm_opt_flags = 0
|
||||
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
self.finalize_package_data()
|
||||
|
||||
def finalize_package_data(self):
|
||||
self.ensure_string('group', "Development/Libraries")
|
||||
self.ensure_string('vendor',
|
||||
"%s <%s>" % (self.distribution.get_contact(),
|
||||
self.distribution.get_contact_email()))
|
||||
self.ensure_string('packager')
|
||||
self.ensure_string_list('doc_files')
|
||||
if isinstance(self.doc_files, list):
|
||||
for readme in ('README', 'README.txt'):
|
||||
if os.path.exists(readme) and readme not in self.doc_files:
|
||||
self.doc_files.append(readme)
|
||||
|
||||
self.ensure_string('release', "1")
|
||||
self.ensure_string('serial') # should it be an int?
|
||||
|
||||
self.ensure_string('distribution_name')
|
||||
|
||||
self.ensure_string('changelog')
|
||||
# Format changelog correctly
|
||||
self.changelog = self._format_changelog(self.changelog)
|
||||
|
||||
self.ensure_filename('icon')
|
||||
|
||||
self.ensure_filename('prep_script')
|
||||
self.ensure_filename('build_script')
|
||||
self.ensure_filename('install_script')
|
||||
self.ensure_filename('clean_script')
|
||||
self.ensure_filename('verify_script')
|
||||
self.ensure_filename('pre_install')
|
||||
self.ensure_filename('post_install')
|
||||
self.ensure_filename('pre_uninstall')
|
||||
self.ensure_filename('post_uninstall')
|
||||
|
||||
# XXX don't forget we punted on summaries and descriptions -- they
|
||||
# should be handled here eventually!
|
||||
|
||||
# Now *this* is some meta-data that belongs in the setup script...
|
||||
self.ensure_string_list('provides')
|
||||
self.ensure_string_list('requires')
|
||||
self.ensure_string_list('conflicts')
|
||||
self.ensure_string_list('build_requires')
|
||||
self.ensure_string_list('obsoletes')
|
||||
|
||||
self.ensure_string('force_arch')
|
||||
|
||||
def run(self):
|
||||
if DEBUG:
|
||||
print("before _get_package_data():")
|
||||
print("vendor =", self.vendor)
|
||||
print("packager =", self.packager)
|
||||
print("doc_files =", self.doc_files)
|
||||
print("changelog =", self.changelog)
|
||||
|
||||
# make directories
|
||||
if self.spec_only:
|
||||
spec_dir = self.dist_dir
|
||||
self.mkpath(spec_dir)
|
||||
else:
|
||||
rpm_dir = {}
|
||||
for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
|
||||
rpm_dir[d] = os.path.join(self.rpm_base, d)
|
||||
self.mkpath(rpm_dir[d])
|
||||
spec_dir = rpm_dir['SPECS']
|
||||
|
||||
# Spec file goes into 'dist_dir' if '--spec-only specified',
|
||||
# build/rpm.<plat> otherwise.
|
||||
spec_path = os.path.join(spec_dir,
|
||||
"%s.spec" % self.distribution.get_name())
|
||||
self.execute(write_file,
|
||||
(spec_path,
|
||||
self._make_spec_file()),
|
||||
"writing '%s'" % spec_path)
|
||||
|
||||
if self.spec_only: # stop if requested
|
||||
return
|
||||
|
||||
# Make a source distribution and copy to SOURCES directory with
|
||||
# optional icon.
|
||||
saved_dist_files = self.distribution.dist_files[:]
|
||||
sdist = self.reinitialize_command('sdist')
|
||||
if self.use_bzip2:
|
||||
sdist.formats = ['bztar']
|
||||
else:
|
||||
sdist.formats = ['gztar']
|
||||
self.run_command('sdist')
|
||||
self.distribution.dist_files = saved_dist_files
|
||||
|
||||
source = sdist.get_archive_files()[0]
|
||||
source_dir = rpm_dir['SOURCES']
|
||||
self.copy_file(source, source_dir)
|
||||
|
||||
if self.icon:
|
||||
if os.path.exists(self.icon):
|
||||
self.copy_file(self.icon, source_dir)
|
||||
else:
|
||||
raise DistutilsFileError(
|
||||
"icon file '%s' does not exist" % self.icon)
|
||||
|
||||
# build package
|
||||
log.info("building RPMs")
|
||||
rpm_cmd = ['rpmbuild']
|
||||
|
||||
if self.source_only: # what kind of RPMs?
|
||||
rpm_cmd.append('-bs')
|
||||
elif self.binary_only:
|
||||
rpm_cmd.append('-bb')
|
||||
else:
|
||||
rpm_cmd.append('-ba')
|
||||
rpm_cmd.extend(['--define', '__python %s' % self.python])
|
||||
if self.rpm3_mode:
|
||||
rpm_cmd.extend(['--define',
|
||||
'_topdir %s' % os.path.abspath(self.rpm_base)])
|
||||
if not self.keep_temp:
|
||||
rpm_cmd.append('--clean')
|
||||
|
||||
if self.quiet:
|
||||
rpm_cmd.append('--quiet')
|
||||
|
||||
rpm_cmd.append(spec_path)
|
||||
# Determine the binary rpm names that should be built out of this spec
|
||||
# file
|
||||
# Note that some of these may not be really built (if the file
|
||||
# list is empty)
|
||||
nvr_string = "%{name}-%{version}-%{release}"
|
||||
src_rpm = nvr_string + ".src.rpm"
|
||||
non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
|
||||
q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % (
|
||||
src_rpm, non_src_rpm, spec_path)
|
||||
|
||||
out = os.popen(q_cmd)
|
||||
try:
|
||||
binary_rpms = []
|
||||
source_rpm = None
|
||||
while True:
|
||||
line = out.readline()
|
||||
if not line:
|
||||
break
|
||||
l = line.strip().split()
|
||||
assert(len(l) == 2)
|
||||
binary_rpms.append(l[1])
|
||||
# The source rpm is named after the first entry in the spec file
|
||||
if source_rpm is None:
|
||||
source_rpm = l[0]
|
||||
|
||||
status = out.close()
|
||||
if status:
|
||||
raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
|
||||
|
||||
finally:
|
||||
out.close()
|
||||
|
||||
self.spawn(rpm_cmd)
|
||||
|
||||
if not self.dry_run:
|
||||
if self.distribution.has_ext_modules():
|
||||
pyversion = get_python_version()
|
||||
else:
|
||||
pyversion = 'any'
|
||||
|
||||
if not self.binary_only:
|
||||
srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
|
||||
assert(os.path.exists(srpm))
|
||||
self.move_file(srpm, self.dist_dir)
|
||||
filename = os.path.join(self.dist_dir, source_rpm)
|
||||
self.distribution.dist_files.append(
|
||||
('bdist_rpm', pyversion, filename))
|
||||
|
||||
if not self.source_only:
|
||||
for rpm in binary_rpms:
|
||||
rpm = os.path.join(rpm_dir['RPMS'], rpm)
|
||||
if os.path.exists(rpm):
|
||||
self.move_file(rpm, self.dist_dir)
|
||||
filename = os.path.join(self.dist_dir,
|
||||
os.path.basename(rpm))
|
||||
self.distribution.dist_files.append(
|
||||
('bdist_rpm', pyversion, filename))
|
||||
|
||||
def _dist_path(self, path):
|
||||
return os.path.join(self.dist_dir, os.path.basename(path))
|
||||
|
||||
def _make_spec_file(self):
|
||||
"""Generate the text of an RPM spec file and return it as a
|
||||
list of strings (one per line).
|
||||
"""
|
||||
# definitions and headers
|
||||
spec_file = [
|
||||
'%define name ' + self.distribution.get_name(),
|
||||
'%define version ' + self.distribution.get_version().replace('-','_'),
|
||||
'%define unmangled_version ' + self.distribution.get_version(),
|
||||
'%define release ' + self.release.replace('-','_'),
|
||||
'',
|
||||
'Summary: ' + self.distribution.get_description(),
|
||||
]
|
||||
|
||||
# Workaround for #14443 which affects some RPM based systems such as
|
||||
# RHEL6 (and probably derivatives)
|
||||
vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}')
|
||||
# Generate a potential replacement value for __os_install_post (whilst
|
||||
# normalizing the whitespace to simplify the test for whether the
|
||||
# invocation of brp-python-bytecompile passes in __python):
|
||||
vendor_hook = '\n'.join([' %s \\' % line.strip()
|
||||
for line in vendor_hook.splitlines()])
|
||||
problem = "brp-python-bytecompile \\\n"
|
||||
fixed = "brp-python-bytecompile %{__python} \\\n"
|
||||
fixed_hook = vendor_hook.replace(problem, fixed)
|
||||
if fixed_hook != vendor_hook:
|
||||
spec_file.append('# Workaround for http://bugs.python.org/issue14443')
|
||||
spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
|
||||
|
||||
# put locale summaries into spec file
|
||||
# XXX not supported for now (hard to put a dictionary
|
||||
# in a config file -- arg!)
|
||||
#for locale in self.summaries.keys():
|
||||
# spec_file.append('Summary(%s): %s' % (locale,
|
||||
# self.summaries[locale]))
|
||||
|
||||
spec_file.extend([
|
||||
'Name: %{name}',
|
||||
'Version: %{version}',
|
||||
'Release: %{release}',])
|
||||
|
||||
# XXX yuck! this filename is available from the "sdist" command,
|
||||
# but only after it has run: and we create the spec file before
|
||||
# running "sdist", in case of --spec-only.
|
||||
if self.use_bzip2:
|
||||
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
|
||||
else:
|
||||
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
|
||||
|
||||
spec_file.extend([
|
||||
'License: ' + self.distribution.get_license(),
|
||||
'Group: ' + self.group,
|
||||
'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
|
||||
'Prefix: %{_prefix}', ])
|
||||
|
||||
if not self.force_arch:
|
||||
# noarch if no extension modules
|
||||
if not self.distribution.has_ext_modules():
|
||||
spec_file.append('BuildArch: noarch')
|
||||
else:
|
||||
spec_file.append( 'BuildArch: %s' % self.force_arch )
|
||||
|
||||
for field in ('Vendor',
|
||||
'Packager',
|
||||
'Provides',
|
||||
'Requires',
|
||||
'Conflicts',
|
||||
'Obsoletes',
|
||||
):
|
||||
val = getattr(self, field.lower())
|
||||
if isinstance(val, list):
|
||||
spec_file.append('%s: %s' % (field, ' '.join(val)))
|
||||
elif val is not None:
|
||||
spec_file.append('%s: %s' % (field, val))
|
||||
|
||||
|
||||
if self.distribution.get_url() != 'UNKNOWN':
|
||||
spec_file.append('Url: ' + self.distribution.get_url())
|
||||
|
||||
if self.distribution_name:
|
||||
spec_file.append('Distribution: ' + self.distribution_name)
|
||||
|
||||
if self.build_requires:
|
||||
spec_file.append('BuildRequires: ' +
|
||||
' '.join(self.build_requires))
|
||||
|
||||
if self.icon:
|
||||
spec_file.append('Icon: ' + os.path.basename(self.icon))
|
||||
|
||||
if self.no_autoreq:
|
||||
spec_file.append('AutoReq: 0')
|
||||
|
||||
spec_file.extend([
|
||||
'',
|
||||
'%description',
|
||||
self.distribution.get_long_description()
|
||||
])
|
||||
|
||||
# put locale descriptions into spec file
|
||||
# XXX again, suppressed because config file syntax doesn't
|
||||
# easily support this ;-(
|
||||
#for locale in self.descriptions.keys():
|
||||
# spec_file.extend([
|
||||
# '',
|
||||
# '%description -l ' + locale,
|
||||
# self.descriptions[locale],
|
||||
# ])
|
||||
|
||||
# rpm scripts
|
||||
# figure out default build script
|
||||
def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0]))
|
||||
def_build = "%s build" % def_setup_call
|
||||
if self.use_rpm_opt_flags:
|
||||
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
|
||||
|
||||
# insert contents of files
|
||||
|
||||
# XXX this is kind of misleading: user-supplied options are files
|
||||
# that we open and interpolate into the spec file, but the defaults
|
||||
# are just text that we drop in as-is. Hmmm.
|
||||
|
||||
install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT '
|
||||
'--record=INSTALLED_FILES') % def_setup_call
|
||||
|
||||
script_options = [
|
||||
('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
|
||||
('build', 'build_script', def_build),
|
||||
('install', 'install_script', install_cmd),
|
||||
('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
|
||||
('verifyscript', 'verify_script', None),
|
||||
('pre', 'pre_install', None),
|
||||
('post', 'post_install', None),
|
||||
('preun', 'pre_uninstall', None),
|
||||
('postun', 'post_uninstall', None),
|
||||
]
|
||||
|
||||
for (rpm_opt, attr, default) in script_options:
|
||||
# Insert contents of file referred to, if no file is referred to
|
||||
# use 'default' as contents of script
|
||||
val = getattr(self, attr)
|
||||
if val or default:
|
||||
spec_file.extend([
|
||||
'',
|
||||
'%' + rpm_opt,])
|
||||
if val:
|
||||
with open(val) as f:
|
||||
spec_file.extend(f.read().split('\n'))
|
||||
else:
|
||||
spec_file.append(default)
|
||||
|
||||
|
||||
# files section
|
||||
spec_file.extend([
|
||||
'',
|
||||
'%files -f INSTALLED_FILES',
|
||||
'%defattr(-,root,root)',
|
||||
])
|
||||
|
||||
if self.doc_files:
|
||||
spec_file.append('%doc ' + ' '.join(self.doc_files))
|
||||
|
||||
if self.changelog:
|
||||
spec_file.extend([
|
||||
'',
|
||||
'%changelog',])
|
||||
spec_file.extend(self.changelog)
|
||||
|
||||
return spec_file
|
||||
|
||||
def _format_changelog(self, changelog):
|
||||
"""Format the changelog correctly and convert it to a list of strings
|
||||
"""
|
||||
if not changelog:
|
||||
return changelog
|
||||
new_changelog = []
|
||||
for line in changelog.strip().split('\n'):
|
||||
line = line.strip()
|
||||
if line[0] == '*':
|
||||
new_changelog.extend(['', line])
|
||||
elif line[0] == '-':
|
||||
new_changelog.append(line)
|
||||
else:
|
||||
new_changelog.append(' ' + line)
|
||||
|
||||
# strip trailing newline inserted by first changelog entry
|
||||
if not new_changelog[0]:
|
||||
del new_changelog[0]
|
||||
|
||||
return new_changelog
|
@ -0,0 +1,377 @@
|
||||
"""distutils.command.bdist_wininst
|
||||
|
||||
Implements the Distutils 'bdist_wininst' command: create a windows installer
|
||||
exe-program."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from distutils.core import Command
|
||||
from distutils.util import get_platform
|
||||
from distutils.dir_util import remove_tree
|
||||
from distutils.errors import *
|
||||
from distutils.sysconfig import get_python_version
|
||||
from distutils import log
|
||||
|
||||
class bdist_wininst(Command):
|
||||
|
||||
description = "create an executable installer for MS Windows"
|
||||
|
||||
user_options = [('bdist-dir=', None,
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform()),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('target-version=', None,
|
||||
"require a specific python version" +
|
||||
" on the target system"),
|
||||
('no-target-compile', 'c',
|
||||
"do not compile .py to .pyc on the target system"),
|
||||
('no-target-optimize', 'o',
|
||||
"do not compile .py to .pyo (optimized) "
|
||||
"on the target system"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
('bitmap=', 'b',
|
||||
"bitmap to use for the installer instead of python-powered logo"),
|
||||
('title=', 't',
|
||||
"title to display on the installer background instead of default"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
('install-script=', None,
|
||||
"basename of installation script to be run after "
|
||||
"installation or before deinstallation"),
|
||||
('pre-install-script=', None,
|
||||
"Fully qualified filename of a script to be run before "
|
||||
"any files are installed. This script need not be in the "
|
||||
"distribution"),
|
||||
('user-access-control=', None,
|
||||
"specify Vista's UAC handling - 'none'/default=no "
|
||||
"handling, 'auto'=use UAC if target Python installed for "
|
||||
"all users, 'force'=always use UAC"),
|
||||
]
|
||||
|
||||
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
|
||||
'skip-build']
|
||||
|
||||
# bpo-10945: bdist_wininst requires mbcs encoding only available on Windows
|
||||
_unsupported = (sys.platform != "win32")
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super().__init__(*args, **kw)
|
||||
warnings.warn("bdist_wininst command is deprecated since Python 3.8, "
|
||||
"use bdist_wheel (wheel packages) instead",
|
||||
DeprecationWarning, 2)
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.keep_temp = 0
|
||||
self.no_target_compile = 0
|
||||
self.no_target_optimize = 0
|
||||
self.target_version = None
|
||||
self.dist_dir = None
|
||||
self.bitmap = None
|
||||
self.title = None
|
||||
self.skip_build = None
|
||||
self.install_script = None
|
||||
self.pre_install_script = None
|
||||
self.user_access_control = None
|
||||
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
|
||||
|
||||
if self.bdist_dir is None:
|
||||
if self.skip_build and self.plat_name:
|
||||
# If build is skipped and plat_name is overridden, bdist will
|
||||
# not see the correct 'plat_name' - so set that up manually.
|
||||
bdist = self.distribution.get_command_obj('bdist')
|
||||
bdist.plat_name = self.plat_name
|
||||
# next the command will be initialized using that name
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'wininst')
|
||||
|
||||
if not self.target_version:
|
||||
self.target_version = ""
|
||||
|
||||
if not self.skip_build and self.distribution.has_ext_modules():
|
||||
short_version = get_python_version()
|
||||
if self.target_version and self.target_version != short_version:
|
||||
raise DistutilsOptionError(
|
||||
"target version can only be %s, or the '--skip-build'" \
|
||||
" option must be specified" % (short_version,))
|
||||
self.target_version = short_version
|
||||
|
||||
self.set_undefined_options('bdist',
|
||||
('dist_dir', 'dist_dir'),
|
||||
('plat_name', 'plat_name'),
|
||||
)
|
||||
|
||||
if self.install_script:
|
||||
for script in self.distribution.scripts:
|
||||
if self.install_script == os.path.basename(script):
|
||||
break
|
||||
else:
|
||||
raise DistutilsOptionError(
|
||||
"install_script '%s' not found in scripts"
|
||||
% self.install_script)
|
||||
|
||||
def run(self):
|
||||
if (sys.platform != "win32" and
|
||||
(self.distribution.has_ext_modules() or
|
||||
self.distribution.has_c_libraries())):
|
||||
raise DistutilsPlatformError \
|
||||
("distribution contains extensions and/or C libraries; "
|
||||
"must be compiled on a Windows 32 platform")
|
||||
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
|
||||
install = self.reinitialize_command('install', reinit_subcommands=1)
|
||||
install.root = self.bdist_dir
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = 0
|
||||
install.plat_name = self.plat_name
|
||||
|
||||
install_lib = self.reinitialize_command('install_lib')
|
||||
# we do not want to include pyc or pyo files
|
||||
install_lib.compile = 0
|
||||
install_lib.optimize = 0
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
# If we are building an installer for a Python version other
|
||||
# than the one we are currently running, then we need to ensure
|
||||
# our build_lib reflects the other Python version rather than ours.
|
||||
# Note that for target_version!=sys.version, we must have skipped the
|
||||
# build step, so there is no issue with enforcing the build of this
|
||||
# version.
|
||||
target_version = self.target_version
|
||||
if not target_version:
|
||||
assert self.skip_build, "Should have already checked this"
|
||||
target_version = '%d.%d' % sys.version_info[:2]
|
||||
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
|
||||
build = self.get_finalized_command('build')
|
||||
build.build_lib = os.path.join(build.build_base,
|
||||
'lib' + plat_specifier)
|
||||
|
||||
# Use a custom scheme for the zip-file, because we have to decide
|
||||
# at installation time which scheme to use.
|
||||
for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
|
||||
value = key.upper()
|
||||
if key == 'headers':
|
||||
value = value + '/Include/$dist_name'
|
||||
setattr(install,
|
||||
'install_' + key,
|
||||
value)
|
||||
|
||||
log.info("installing to %s", self.bdist_dir)
|
||||
install.ensure_finalized()
|
||||
|
||||
# avoid warning of 'install_lib' about installing
|
||||
# into a directory not in sys.path
|
||||
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
|
||||
|
||||
install.run()
|
||||
|
||||
del sys.path[0]
|
||||
|
||||
# And make an archive relative to the root of the
|
||||
# pseudo-installation tree.
|
||||
from tempfile import mktemp
|
||||
archive_basename = mktemp()
|
||||
fullname = self.distribution.get_fullname()
|
||||
arcname = self.make_archive(archive_basename, "zip",
|
||||
root_dir=self.bdist_dir)
|
||||
# create an exe containing the zip-file
|
||||
self.create_exe(arcname, fullname, self.bitmap)
|
||||
if self.distribution.has_ext_modules():
|
||||
pyversion = get_python_version()
|
||||
else:
|
||||
pyversion = 'any'
|
||||
self.distribution.dist_files.append(('bdist_wininst', pyversion,
|
||||
self.get_installer_filename(fullname)))
|
||||
# remove the zip-file again
|
||||
log.debug("removing temporary file '%s'", arcname)
|
||||
os.remove(arcname)
|
||||
|
||||
if not self.keep_temp:
|
||||
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
||||
|
||||
def get_inidata(self):
|
||||
# Return data describing the installation.
|
||||
lines = []
|
||||
metadata = self.distribution.metadata
|
||||
|
||||
# Write the [metadata] section.
|
||||
lines.append("[metadata]")
|
||||
|
||||
# 'info' will be displayed in the installer's dialog box,
|
||||
# describing the items to be installed.
|
||||
info = (metadata.long_description or '') + '\n'
|
||||
|
||||
# Escape newline characters
|
||||
def escape(s):
|
||||
return s.replace("\n", "\\n")
|
||||
|
||||
for name in ["author", "author_email", "description", "maintainer",
|
||||
"maintainer_email", "name", "url", "version"]:
|
||||
data = getattr(metadata, name, "")
|
||||
if data:
|
||||
info = info + ("\n %s: %s" % \
|
||||
(name.capitalize(), escape(data)))
|
||||
lines.append("%s=%s" % (name, escape(data)))
|
||||
|
||||
# The [setup] section contains entries controlling
|
||||
# the installer runtime.
|
||||
lines.append("\n[Setup]")
|
||||
if self.install_script:
|
||||
lines.append("install_script=%s" % self.install_script)
|
||||
lines.append("info=%s" % escape(info))
|
||||
lines.append("target_compile=%d" % (not self.no_target_compile))
|
||||
lines.append("target_optimize=%d" % (not self.no_target_optimize))
|
||||
if self.target_version:
|
||||
lines.append("target_version=%s" % self.target_version)
|
||||
if self.user_access_control:
|
||||
lines.append("user_access_control=%s" % self.user_access_control)
|
||||
|
||||
title = self.title or self.distribution.get_fullname()
|
||||
lines.append("title=%s" % escape(title))
|
||||
import time
|
||||
import distutils
|
||||
build_info = "Built %s with distutils-%s" % \
|
||||
(time.ctime(time.time()), distutils.__version__)
|
||||
lines.append("build_info=%s" % build_info)
|
||||
return "\n".join(lines)
|
||||
|
||||
def create_exe(self, arcname, fullname, bitmap=None):
|
||||
import struct
|
||||
|
||||
self.mkpath(self.dist_dir)
|
||||
|
||||
cfgdata = self.get_inidata()
|
||||
|
||||
installer_name = self.get_installer_filename(fullname)
|
||||
self.announce("creating %s" % installer_name)
|
||||
|
||||
if bitmap:
|
||||
with open(bitmap, "rb") as f:
|
||||
bitmapdata = f.read()
|
||||
bitmaplen = len(bitmapdata)
|
||||
else:
|
||||
bitmaplen = 0
|
||||
|
||||
with open(installer_name, "wb") as file:
|
||||
file.write(self.get_exe_bytes())
|
||||
if bitmap:
|
||||
file.write(bitmapdata)
|
||||
|
||||
# Convert cfgdata from unicode to ascii, mbcs encoded
|
||||
if isinstance(cfgdata, str):
|
||||
cfgdata = cfgdata.encode("mbcs")
|
||||
|
||||
# Append the pre-install script
|
||||
cfgdata = cfgdata + b"\0"
|
||||
if self.pre_install_script:
|
||||
# We need to normalize newlines, so we open in text mode and
|
||||
# convert back to bytes. "latin-1" simply avoids any possible
|
||||
# failures.
|
||||
with open(self.pre_install_script, "r",
|
||||
encoding="latin-1") as script:
|
||||
script_data = script.read().encode("latin-1")
|
||||
cfgdata = cfgdata + script_data + b"\n\0"
|
||||
else:
|
||||
# empty pre-install script
|
||||
cfgdata = cfgdata + b"\0"
|
||||
file.write(cfgdata)
|
||||
|
||||
# The 'magic number' 0x1234567B is used to make sure that the
|
||||
# binary layout of 'cfgdata' is what the wininst.exe binary
|
||||
# expects. If the layout changes, increment that number, make
|
||||
# the corresponding changes to the wininst.exe sources, and
|
||||
# recompile them.
|
||||
header = struct.pack("<iii",
|
||||
0x1234567B, # tag
|
||||
len(cfgdata), # length
|
||||
bitmaplen, # number of bytes in bitmap
|
||||
)
|
||||
file.write(header)
|
||||
with open(arcname, "rb") as f:
|
||||
file.write(f.read())
|
||||
|
||||
def get_installer_filename(self, fullname):
|
||||
# Factored out to allow overriding in subclasses
|
||||
if self.target_version:
|
||||
# if we create an installer for a specific python version,
|
||||
# it's better to include this in the name
|
||||
installer_name = os.path.join(self.dist_dir,
|
||||
"%s.%s-py%s.exe" %
|
||||
(fullname, self.plat_name, self.target_version))
|
||||
else:
|
||||
installer_name = os.path.join(self.dist_dir,
|
||||
"%s.%s.exe" % (fullname, self.plat_name))
|
||||
return installer_name
|
||||
|
||||
def get_exe_bytes(self):
|
||||
# If a target-version other than the current version has been
|
||||
# specified, then using the MSVC version from *this* build is no good.
|
||||
# Without actually finding and executing the target version and parsing
|
||||
# its sys.version, we just hard-code our knowledge of old versions.
|
||||
# NOTE: Possible alternative is to allow "--target-version" to
|
||||
# specify a Python executable rather than a simple version string.
|
||||
# We can then execute this program to obtain any info we need, such
|
||||
# as the real sys.version string for the build.
|
||||
cur_version = get_python_version()
|
||||
|
||||
# If the target version is *later* than us, then we assume they
|
||||
# use what we use
|
||||
# string compares seem wrong, but are what sysconfig.py itself uses
|
||||
if self.target_version and self.target_version < cur_version:
|
||||
if self.target_version < "2.4":
|
||||
bv = '6.0'
|
||||
elif self.target_version == "2.4":
|
||||
bv = '7.1'
|
||||
elif self.target_version == "2.5":
|
||||
bv = '8.0'
|
||||
elif self.target_version <= "3.2":
|
||||
bv = '9.0'
|
||||
elif self.target_version <= "3.4":
|
||||
bv = '10.0'
|
||||
else:
|
||||
bv = '14.0'
|
||||
else:
|
||||
# for current version - use authoritative check.
|
||||
try:
|
||||
from msvcrt import CRT_ASSEMBLY_VERSION
|
||||
except ImportError:
|
||||
# cross-building, so assume the latest version
|
||||
bv = '14.0'
|
||||
else:
|
||||
# as far as we know, CRT is binary compatible based on
|
||||
# the first field, so assume 'x.0' until proven otherwise
|
||||
major = CRT_ASSEMBLY_VERSION.partition('.')[0]
|
||||
bv = major + '.0'
|
||||
|
||||
|
||||
# wininst-x.y.exe is in the same directory as this file
|
||||
directory = os.path.dirname(__file__)
|
||||
# we must use a wininst-x.y.exe built with the same C compiler
|
||||
# used for python. XXX What about mingw, borland, and so on?
|
||||
|
||||
# if plat_name starts with "win" but is not "win32"
|
||||
# we want to strip "win" and leave the rest (e.g. -amd64)
|
||||
# for all other cases, we don't want any suffix
|
||||
if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
|
||||
sfix = self.plat_name[3:]
|
||||
else:
|
||||
sfix = ''
|
||||
|
||||
filename = os.path.join(directory, "wininst-%s%s.exe" % (bv, sfix))
|
||||
f = open(filename, "rb")
|
||||
try:
|
||||
return f.read()
|
||||
finally:
|
||||
f.close()
|
@ -0,0 +1,157 @@
|
||||
"""distutils.command.build
|
||||
|
||||
Implements the Distutils 'build' command."""
|
||||
|
||||
import sys, os
|
||||
from distutils.core import Command
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.util import get_platform
|
||||
|
||||
|
||||
def show_compilers():
|
||||
from distutils.ccompiler import show_compilers
|
||||
show_compilers()
|
||||
|
||||
|
||||
class build(Command):
|
||||
|
||||
description = "build everything needed to install"
|
||||
|
||||
user_options = [
|
||||
('build-base=', 'b',
|
||||
"base directory for build library"),
|
||||
('build-purelib=', None,
|
||||
"build directory for platform-neutral distributions"),
|
||||
('build-platlib=', None,
|
||||
"build directory for platform-specific distributions"),
|
||||
('build-lib=', None,
|
||||
"build directory for all distribution (defaults to either " +
|
||||
"build-purelib or build-platlib"),
|
||||
('build-scripts=', None,
|
||||
"build directory for scripts"),
|
||||
('build-temp=', 't',
|
||||
"temporary build directory"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to build for, if supported "
|
||||
"(default: %s)" % get_platform()),
|
||||
('compiler=', 'c',
|
||||
"specify the compiler type"),
|
||||
('parallel=', 'j',
|
||||
"number of parallel build jobs"),
|
||||
('debug', 'g',
|
||||
"compile extensions and libraries with debugging information"),
|
||||
('force', 'f',
|
||||
"forcibly build everything (ignore file timestamps)"),
|
||||
('executable=', 'e',
|
||||
"specify final destination interpreter path (build.py)"),
|
||||
]
|
||||
|
||||
boolean_options = ['debug', 'force']
|
||||
|
||||
help_options = [
|
||||
('help-compiler', None,
|
||||
"list available compilers", show_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_base = 'build'
|
||||
# these are decided only after 'build_base' has its final value
|
||||
# (unless overridden by the user or client)
|
||||
self.build_purelib = None
|
||||
self.build_platlib = None
|
||||
self.build_lib = None
|
||||
self.build_temp = None
|
||||
self.build_scripts = None
|
||||
self.compiler = None
|
||||
self.plat_name = None
|
||||
self.debug = None
|
||||
self.force = 0
|
||||
self.executable = None
|
||||
self.parallel = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.plat_name is None:
|
||||
self.plat_name = get_platform()
|
||||
else:
|
||||
# plat-name only supported for windows (other platforms are
|
||||
# supported via ./configure flags, if at all). Avoid misleading
|
||||
# other platforms.
|
||||
if os.name != 'nt':
|
||||
raise DistutilsOptionError(
|
||||
"--plat-name only supported on Windows (try "
|
||||
"using './configure --help' on your platform)")
|
||||
|
||||
plat_specifier = ".%s-%d.%d" % (self.plat_name, *sys.version_info[:2])
|
||||
|
||||
# Make it so Python 2.x and Python 2.x with --with-pydebug don't
|
||||
# share the same build directories. Doing so confuses the build
|
||||
# process for C modules
|
||||
if hasattr(sys, 'gettotalrefcount'):
|
||||
plat_specifier += '-pydebug'
|
||||
|
||||
# 'build_purelib' and 'build_platlib' just default to 'lib' and
|
||||
# 'lib.<plat>' under the base build directory. We only use one of
|
||||
# them for a given distribution, though --
|
||||
if self.build_purelib is None:
|
||||
self.build_purelib = os.path.join(self.build_base, 'lib')
|
||||
if self.build_platlib is None:
|
||||
self.build_platlib = os.path.join(self.build_base,
|
||||
'lib' + plat_specifier)
|
||||
|
||||
# 'build_lib' is the actual directory that we will use for this
|
||||
# particular module distribution -- if user didn't supply it, pick
|
||||
# one of 'build_purelib' or 'build_platlib'.
|
||||
if self.build_lib is None:
|
||||
if self.distribution.has_ext_modules():
|
||||
self.build_lib = self.build_platlib
|
||||
else:
|
||||
self.build_lib = self.build_purelib
|
||||
|
||||
# 'build_temp' -- temporary directory for compiler turds,
|
||||
# "build/temp.<plat>"
|
||||
if self.build_temp is None:
|
||||
self.build_temp = os.path.join(self.build_base,
|
||||
'temp' + plat_specifier)
|
||||
if self.build_scripts is None:
|
||||
self.build_scripts = os.path.join(self.build_base,
|
||||
'scripts-%d.%d' % sys.version_info[:2])
|
||||
|
||||
if self.executable is None and sys.executable:
|
||||
self.executable = os.path.normpath(sys.executable)
|
||||
|
||||
if isinstance(self.parallel, str):
|
||||
try:
|
||||
self.parallel = int(self.parallel)
|
||||
except ValueError:
|
||||
raise DistutilsOptionError("parallel should be an integer")
|
||||
|
||||
def run(self):
|
||||
# Run all relevant sub-commands. This will be some subset of:
|
||||
# - build_py - pure Python modules
|
||||
# - build_clib - standalone C libraries
|
||||
# - build_ext - Python extensions
|
||||
# - build_scripts - (Python) scripts
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
|
||||
# -- Predicates for the sub-command list ---------------------------
|
||||
|
||||
def has_pure_modules(self):
|
||||
return self.distribution.has_pure_modules()
|
||||
|
||||
def has_c_libraries(self):
|
||||
return self.distribution.has_c_libraries()
|
||||
|
||||
def has_ext_modules(self):
|
||||
return self.distribution.has_ext_modules()
|
||||
|
||||
def has_scripts(self):
|
||||
return self.distribution.has_scripts()
|
||||
|
||||
|
||||
sub_commands = [('build_py', has_pure_modules),
|
||||
('build_clib', has_c_libraries),
|
||||
('build_ext', has_ext_modules),
|
||||
('build_scripts', has_scripts),
|
||||
]
|
@ -0,0 +1,209 @@
|
||||
"""distutils.command.build_clib
|
||||
|
||||
Implements the Distutils 'build_clib' command, to build a C/C++ library
|
||||
that is included in the module distribution and needed by an extension
|
||||
module."""
|
||||
|
||||
|
||||
# XXX this module has *lots* of code ripped-off quite transparently from
|
||||
# build_ext.py -- not surprisingly really, as the work required to build
|
||||
# a static library from a collection of C source files is not really all
|
||||
# that different from what's required to build a shared object file from
|
||||
# a collection of C source files. Nevertheless, I haven't done the
|
||||
# necessary refactoring to account for the overlap in code between the
|
||||
# two modules, mainly because a number of subtle details changed in the
|
||||
# cut 'n paste. Sigh.
|
||||
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils.errors import *
|
||||
from distutils.sysconfig import customize_compiler
|
||||
from distutils import log
|
||||
|
||||
def show_compilers():
|
||||
from distutils.ccompiler import show_compilers
|
||||
show_compilers()
|
||||
|
||||
|
||||
class build_clib(Command):
|
||||
|
||||
description = "build C/C++ libraries used by Python extensions"
|
||||
|
||||
user_options = [
|
||||
('build-clib=', 'b',
|
||||
"directory to build C/C++ libraries to"),
|
||||
('build-temp=', 't',
|
||||
"directory to put temporary build by-products"),
|
||||
('debug', 'g',
|
||||
"compile with debugging information"),
|
||||
('force', 'f',
|
||||
"forcibly build everything (ignore file timestamps)"),
|
||||
('compiler=', 'c',
|
||||
"specify the compiler type"),
|
||||
]
|
||||
|
||||
boolean_options = ['debug', 'force']
|
||||
|
||||
help_options = [
|
||||
('help-compiler', None,
|
||||
"list available compilers", show_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_clib = None
|
||||
self.build_temp = None
|
||||
|
||||
# List of libraries to build
|
||||
self.libraries = None
|
||||
|
||||
# Compilation options for all libraries
|
||||
self.include_dirs = None
|
||||
self.define = None
|
||||
self.undef = None
|
||||
self.debug = None
|
||||
self.force = 0
|
||||
self.compiler = None
|
||||
|
||||
|
||||
def finalize_options(self):
|
||||
# This might be confusing: both build-clib and build-temp default
|
||||
# to build-temp as defined by the "build" command. This is because
|
||||
# I think that C libraries are really just temporary build
|
||||
# by-products, at least from the point of view of building Python
|
||||
# extensions -- but I want to keep my options open.
|
||||
self.set_undefined_options('build',
|
||||
('build_temp', 'build_clib'),
|
||||
('build_temp', 'build_temp'),
|
||||
('compiler', 'compiler'),
|
||||
('debug', 'debug'),
|
||||
('force', 'force'))
|
||||
|
||||
self.libraries = self.distribution.libraries
|
||||
if self.libraries:
|
||||
self.check_library_list(self.libraries)
|
||||
|
||||
if self.include_dirs is None:
|
||||
self.include_dirs = self.distribution.include_dirs or []
|
||||
if isinstance(self.include_dirs, str):
|
||||
self.include_dirs = self.include_dirs.split(os.pathsep)
|
||||
|
||||
# XXX same as for build_ext -- what about 'self.define' and
|
||||
# 'self.undef' ?
|
||||
|
||||
|
||||
def run(self):
|
||||
if not self.libraries:
|
||||
return
|
||||
|
||||
# Yech -- this is cut 'n pasted from build_ext.py!
|
||||
from distutils.ccompiler import new_compiler
|
||||
self.compiler = new_compiler(compiler=self.compiler,
|
||||
dry_run=self.dry_run,
|
||||
force=self.force)
|
||||
customize_compiler(self.compiler)
|
||||
|
||||
if self.include_dirs is not None:
|
||||
self.compiler.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for (name,value) in self.define:
|
||||
self.compiler.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
self.compiler.undefine_macro(macro)
|
||||
|
||||
self.build_libraries(self.libraries)
|
||||
|
||||
|
||||
def check_library_list(self, libraries):
|
||||
"""Ensure that the list of libraries is valid.
|
||||
|
||||
`library` is presumably provided as a command option 'libraries'.
|
||||
This method checks that it is a list of 2-tuples, where the tuples
|
||||
are (library_name, build_info_dict).
|
||||
|
||||
Raise DistutilsSetupError if the structure is invalid anywhere;
|
||||
just returns otherwise.
|
||||
"""
|
||||
if not isinstance(libraries, list):
|
||||
raise DistutilsSetupError(
|
||||
"'libraries' option must be a list of tuples")
|
||||
|
||||
for lib in libraries:
|
||||
if not isinstance(lib, tuple) and len(lib) != 2:
|
||||
raise DistutilsSetupError(
|
||||
"each element of 'libraries' must a 2-tuple")
|
||||
|
||||
name, build_info = lib
|
||||
|
||||
if not isinstance(name, str):
|
||||
raise DistutilsSetupError(
|
||||
"first element of each tuple in 'libraries' "
|
||||
"must be a string (the library name)")
|
||||
|
||||
if '/' in name or (os.sep != '/' and os.sep in name):
|
||||
raise DistutilsSetupError("bad library name '%s': "
|
||||
"may not contain directory separators" % lib[0])
|
||||
|
||||
if not isinstance(build_info, dict):
|
||||
raise DistutilsSetupError(
|
||||
"second element of each tuple in 'libraries' "
|
||||
"must be a dictionary (build info)")
|
||||
|
||||
|
||||
def get_library_names(self):
|
||||
# Assume the library list is valid -- 'check_library_list()' is
|
||||
# called from 'finalize_options()', so it should be!
|
||||
if not self.libraries:
|
||||
return None
|
||||
|
||||
lib_names = []
|
||||
for (lib_name, build_info) in self.libraries:
|
||||
lib_names.append(lib_name)
|
||||
return lib_names
|
||||
|
||||
|
||||
def get_source_files(self):
|
||||
self.check_library_list(self.libraries)
|
||||
filenames = []
|
||||
for (lib_name, build_info) in self.libraries:
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % lib_name)
|
||||
|
||||
filenames.extend(sources)
|
||||
return filenames
|
||||
|
||||
|
||||
def build_libraries(self, libraries):
|
||||
for (lib_name, build_info) in libraries:
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % lib_name)
|
||||
sources = list(sources)
|
||||
|
||||
log.info("building '%s' library", lib_name)
|
||||
|
||||
# First, compile the source code to object files in the library
|
||||
# directory. (This should probably change to putting object
|
||||
# files in a temporary build directory.)
|
||||
macros = build_info.get('macros')
|
||||
include_dirs = build_info.get('include_dirs')
|
||||
objects = self.compiler.compile(sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
debug=self.debug)
|
||||
|
||||
# Now "link" the object files together into a static library.
|
||||
# (On Unix at least, this isn't really linking -- it just
|
||||
# builds an archive. Whatever.)
|
||||
self.compiler.create_static_lib(objects, lib_name,
|
||||
output_dir=self.build_clib,
|
||||
debug=self.debug)
|
@ -0,0 +1,755 @@
|
||||
"""distutils.command.build_ext
|
||||
|
||||
Implements the Distutils 'build_ext' command, for building extension
|
||||
modules (currently limited to C extensions, should accommodate C++
|
||||
extensions ASAP)."""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from distutils.core import Command
|
||||
from distutils.errors import *
|
||||
from distutils.sysconfig import customize_compiler, get_python_version
|
||||
from distutils.sysconfig import get_config_h_filename
|
||||
from distutils.dep_util import newer_group
|
||||
from distutils.extension import Extension
|
||||
from distutils.util import get_platform
|
||||
from distutils import log
|
||||
from . import py37compat
|
||||
|
||||
from site import USER_BASE
|
||||
|
||||
# An extension name is just a dot-separated list of Python NAMEs (ie.
|
||||
# the same as a fully-qualified module name).
|
||||
extension_name_re = re.compile \
|
||||
(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
|
||||
|
||||
|
||||
def show_compilers ():
|
||||
from distutils.ccompiler import show_compilers
|
||||
show_compilers()
|
||||
|
||||
|
||||
class build_ext(Command):
|
||||
|
||||
description = "build C/C++ extensions (compile/link to build directory)"
|
||||
|
||||
# XXX thoughts on how to deal with complex command-line options like
|
||||
# these, i.e. how to make it so fancy_getopt can suck them off the
|
||||
# command line and make it look like setup.py defined the appropriate
|
||||
# lists of tuples of what-have-you.
|
||||
# - each command needs a callback to process its command-line options
|
||||
# - Command.__init__() needs access to its share of the whole
|
||||
# command line (must ultimately come from
|
||||
# Distribution.parse_command_line())
|
||||
# - it then calls the current command class' option-parsing
|
||||
# callback to deal with weird options like -D, which have to
|
||||
# parse the option text and churn out some custom data
|
||||
# structure
|
||||
# - that data structure (in this case, a list of 2-tuples)
|
||||
# will then be present in the command object by the time
|
||||
# we get to finalize_options() (i.e. the constructor
|
||||
# takes care of both command-line and client options
|
||||
# in between initialize_options() and finalize_options())
|
||||
|
||||
sep_by = " (separated by '%s')" % os.pathsep
|
||||
user_options = [
|
||||
('build-lib=', 'b',
|
||||
"directory for compiled extension modules"),
|
||||
('build-temp=', 't',
|
||||
"directory for temporary files (build by-products)"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to cross-compile for, if supported "
|
||||
"(default: %s)" % get_platform()),
|
||||
('inplace', 'i',
|
||||
"ignore build-lib and put compiled extensions into the source " +
|
||||
"directory alongside your pure Python modules"),
|
||||
('include-dirs=', 'I',
|
||||
"list of directories to search for header files" + sep_by),
|
||||
('define=', 'D',
|
||||
"C preprocessor macros to define"),
|
||||
('undef=', 'U',
|
||||
"C preprocessor macros to undefine"),
|
||||
('libraries=', 'l',
|
||||
"external C libraries to link with"),
|
||||
('library-dirs=', 'L',
|
||||
"directories to search for external C libraries" + sep_by),
|
||||
('rpath=', 'R',
|
||||
"directories to search for shared C libraries at runtime"),
|
||||
('link-objects=', 'O',
|
||||
"extra explicit link objects to include in the link"),
|
||||
('debug', 'g',
|
||||
"compile/link with debugging information"),
|
||||
('force', 'f',
|
||||
"forcibly build everything (ignore file timestamps)"),
|
||||
('compiler=', 'c',
|
||||
"specify the compiler type"),
|
||||
('parallel=', 'j',
|
||||
"number of parallel build jobs"),
|
||||
('swig-cpp', None,
|
||||
"make SWIG create C++ files (default is C)"),
|
||||
('swig-opts=', None,
|
||||
"list of SWIG command line options"),
|
||||
('swig=', None,
|
||||
"path to the SWIG executable"),
|
||||
('user', None,
|
||||
"add user include, library and rpath")
|
||||
]
|
||||
|
||||
boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user']
|
||||
|
||||
help_options = [
|
||||
('help-compiler', None,
|
||||
"list available compilers", show_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.extensions = None
|
||||
self.build_lib = None
|
||||
self.plat_name = None
|
||||
self.build_temp = None
|
||||
self.inplace = 0
|
||||
self.package = None
|
||||
|
||||
self.include_dirs = None
|
||||
self.define = None
|
||||
self.undef = None
|
||||
self.libraries = None
|
||||
self.library_dirs = None
|
||||
self.rpath = None
|
||||
self.link_objects = None
|
||||
self.debug = None
|
||||
self.force = None
|
||||
self.compiler = None
|
||||
self.swig = None
|
||||
self.swig_cpp = None
|
||||
self.swig_opts = None
|
||||
self.user = None
|
||||
self.parallel = None
|
||||
|
||||
def finalize_options(self):
|
||||
from distutils import sysconfig
|
||||
|
||||
self.set_undefined_options('build',
|
||||
('build_lib', 'build_lib'),
|
||||
('build_temp', 'build_temp'),
|
||||
('compiler', 'compiler'),
|
||||
('debug', 'debug'),
|
||||
('force', 'force'),
|
||||
('parallel', 'parallel'),
|
||||
('plat_name', 'plat_name'),
|
||||
)
|
||||
|
||||
if self.package is None:
|
||||
self.package = self.distribution.ext_package
|
||||
|
||||
self.extensions = self.distribution.ext_modules
|
||||
|
||||
# Make sure Python's include directories (for Python.h, pyconfig.h,
|
||||
# etc.) are in the include search path.
|
||||
py_include = sysconfig.get_python_inc()
|
||||
plat_py_include = sysconfig.get_python_inc(plat_specific=1)
|
||||
if self.include_dirs is None:
|
||||
self.include_dirs = self.distribution.include_dirs or []
|
||||
if isinstance(self.include_dirs, str):
|
||||
self.include_dirs = self.include_dirs.split(os.pathsep)
|
||||
|
||||
# If in a virtualenv, add its include directory
|
||||
# Issue 16116
|
||||
if sys.exec_prefix != sys.base_exec_prefix:
|
||||
self.include_dirs.append(os.path.join(sys.exec_prefix, 'include'))
|
||||
|
||||
# Put the Python "system" include dir at the end, so that
|
||||
# any local include dirs take precedence.
|
||||
self.include_dirs.extend(py_include.split(os.path.pathsep))
|
||||
if plat_py_include != py_include:
|
||||
self.include_dirs.extend(
|
||||
plat_py_include.split(os.path.pathsep))
|
||||
|
||||
self.ensure_string_list('libraries')
|
||||
self.ensure_string_list('link_objects')
|
||||
|
||||
# Life is easier if we're not forever checking for None, so
|
||||
# simplify these options to empty lists if unset
|
||||
if self.libraries is None:
|
||||
self.libraries = []
|
||||
if self.library_dirs is None:
|
||||
self.library_dirs = []
|
||||
elif isinstance(self.library_dirs, str):
|
||||
self.library_dirs = self.library_dirs.split(os.pathsep)
|
||||
|
||||
if self.rpath is None:
|
||||
self.rpath = []
|
||||
elif isinstance(self.rpath, str):
|
||||
self.rpath = self.rpath.split(os.pathsep)
|
||||
|
||||
# for extensions under windows use different directories
|
||||
# for Release and Debug builds.
|
||||
# also Python's library directory must be appended to library_dirs
|
||||
if os.name == 'nt':
|
||||
# the 'libs' directory is for binary installs - we assume that
|
||||
# must be the *native* platform. But we don't really support
|
||||
# cross-compiling via a binary install anyway, so we let it go.
|
||||
self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
|
||||
if sys.base_exec_prefix != sys.prefix: # Issue 16116
|
||||
self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs'))
|
||||
if self.debug:
|
||||
self.build_temp = os.path.join(self.build_temp, "Debug")
|
||||
else:
|
||||
self.build_temp = os.path.join(self.build_temp, "Release")
|
||||
|
||||
# Append the source distribution include and library directories,
|
||||
# this allows distutils on windows to work in the source tree
|
||||
self.include_dirs.append(os.path.dirname(get_config_h_filename()))
|
||||
self.library_dirs.append(sys.base_exec_prefix)
|
||||
|
||||
# Use the .lib files for the correct architecture
|
||||
if self.plat_name == 'win32':
|
||||
suffix = 'win32'
|
||||
else:
|
||||
# win-amd64
|
||||
suffix = self.plat_name[4:]
|
||||
new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
|
||||
if suffix:
|
||||
new_lib = os.path.join(new_lib, suffix)
|
||||
self.library_dirs.append(new_lib)
|
||||
|
||||
# For extensions under Cygwin, Python's library directory must be
|
||||
# appended to library_dirs
|
||||
if sys.platform[:6] == 'cygwin':
|
||||
if not sysconfig.python_build:
|
||||
# building third party extensions
|
||||
self.library_dirs.append(os.path.join(sys.prefix, "lib",
|
||||
"python" + get_python_version(),
|
||||
"config"))
|
||||
else:
|
||||
# building python standard extensions
|
||||
self.library_dirs.append('.')
|
||||
|
||||
# For building extensions with a shared Python library,
|
||||
# Python's library directory must be appended to library_dirs
|
||||
# See Issues: #1600860, #4366
|
||||
if (sysconfig.get_config_var('Py_ENABLE_SHARED')):
|
||||
if not sysconfig.python_build:
|
||||
# building third party extensions
|
||||
self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
|
||||
else:
|
||||
# building python standard extensions
|
||||
self.library_dirs.append('.')
|
||||
|
||||
# The argument parsing will result in self.define being a string, but
|
||||
# it has to be a list of 2-tuples. All the preprocessor symbols
|
||||
# specified by the 'define' option will be set to '1'. Multiple
|
||||
# symbols can be separated with commas.
|
||||
|
||||
if self.define:
|
||||
defines = self.define.split(',')
|
||||
self.define = [(symbol, '1') for symbol in defines]
|
||||
|
||||
# The option for macros to undefine is also a string from the
|
||||
# option parsing, but has to be a list. Multiple symbols can also
|
||||
# be separated with commas here.
|
||||
if self.undef:
|
||||
self.undef = self.undef.split(',')
|
||||
|
||||
if self.swig_opts is None:
|
||||
self.swig_opts = []
|
||||
else:
|
||||
self.swig_opts = self.swig_opts.split(' ')
|
||||
|
||||
# Finally add the user include and library directories if requested
|
||||
if self.user:
|
||||
user_include = os.path.join(USER_BASE, "include")
|
||||
user_lib = os.path.join(USER_BASE, "lib")
|
||||
if os.path.isdir(user_include):
|
||||
self.include_dirs.append(user_include)
|
||||
if os.path.isdir(user_lib):
|
||||
self.library_dirs.append(user_lib)
|
||||
self.rpath.append(user_lib)
|
||||
|
||||
if isinstance(self.parallel, str):
|
||||
try:
|
||||
self.parallel = int(self.parallel)
|
||||
except ValueError:
|
||||
raise DistutilsOptionError("parallel should be an integer")
|
||||
|
||||
def run(self):
|
||||
from distutils.ccompiler import new_compiler
|
||||
|
||||
# 'self.extensions', as supplied by setup.py, is a list of
|
||||
# Extension instances. See the documentation for Extension (in
|
||||
# distutils.extension) for details.
|
||||
#
|
||||
# For backwards compatibility with Distutils 0.8.2 and earlier, we
|
||||
# also allow the 'extensions' list to be a list of tuples:
|
||||
# (ext_name, build_info)
|
||||
# where build_info is a dictionary containing everything that
|
||||
# Extension instances do except the name, with a few things being
|
||||
# differently named. We convert these 2-tuples to Extension
|
||||
# instances as needed.
|
||||
|
||||
if not self.extensions:
|
||||
return
|
||||
|
||||
# If we were asked to build any C/C++ libraries, make sure that the
|
||||
# directory where we put them is in the library search path for
|
||||
# linking extensions.
|
||||
if self.distribution.has_c_libraries():
|
||||
build_clib = self.get_finalized_command('build_clib')
|
||||
self.libraries.extend(build_clib.get_library_names() or [])
|
||||
self.library_dirs.append(build_clib.build_clib)
|
||||
|
||||
# Setup the CCompiler object that we'll use to do all the
|
||||
# compiling and linking
|
||||
self.compiler = new_compiler(compiler=self.compiler,
|
||||
verbose=self.verbose,
|
||||
dry_run=self.dry_run,
|
||||
force=self.force)
|
||||
customize_compiler(self.compiler)
|
||||
# If we are cross-compiling, init the compiler now (if we are not
|
||||
# cross-compiling, init would not hurt, but people may rely on
|
||||
# late initialization of compiler even if they shouldn't...)
|
||||
if os.name == 'nt' and self.plat_name != get_platform():
|
||||
self.compiler.initialize(self.plat_name)
|
||||
|
||||
# And make sure that any compile/link-related options (which might
|
||||
# come from the command-line or from the setup script) are set in
|
||||
# that CCompiler object -- that way, they automatically apply to
|
||||
# all compiling and linking done here.
|
||||
if self.include_dirs is not None:
|
||||
self.compiler.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for (name, value) in self.define:
|
||||
self.compiler.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
self.compiler.undefine_macro(macro)
|
||||
if self.libraries is not None:
|
||||
self.compiler.set_libraries(self.libraries)
|
||||
if self.library_dirs is not None:
|
||||
self.compiler.set_library_dirs(self.library_dirs)
|
||||
if self.rpath is not None:
|
||||
self.compiler.set_runtime_library_dirs(self.rpath)
|
||||
if self.link_objects is not None:
|
||||
self.compiler.set_link_objects(self.link_objects)
|
||||
|
||||
# Now actually compile and link everything.
|
||||
self.build_extensions()
|
||||
|
||||
def check_extensions_list(self, extensions):
|
||||
"""Ensure that the list of extensions (presumably provided as a
|
||||
command option 'extensions') is valid, i.e. it is a list of
|
||||
Extension objects. We also support the old-style list of 2-tuples,
|
||||
where the tuples are (ext_name, build_info), which are converted to
|
||||
Extension instances here.
|
||||
|
||||
Raise DistutilsSetupError if the structure is invalid anywhere;
|
||||
just returns otherwise.
|
||||
"""
|
||||
if not isinstance(extensions, list):
|
||||
raise DistutilsSetupError(
|
||||
"'ext_modules' option must be a list of Extension instances")
|
||||
|
||||
for i, ext in enumerate(extensions):
|
||||
if isinstance(ext, Extension):
|
||||
continue # OK! (assume type-checking done
|
||||
# by Extension constructor)
|
||||
|
||||
if not isinstance(ext, tuple) or len(ext) != 2:
|
||||
raise DistutilsSetupError(
|
||||
"each element of 'ext_modules' option must be an "
|
||||
"Extension instance or 2-tuple")
|
||||
|
||||
ext_name, build_info = ext
|
||||
|
||||
log.warn("old-style (ext_name, build_info) tuple found in "
|
||||
"ext_modules for extension '%s' "
|
||||
"-- please convert to Extension instance", ext_name)
|
||||
|
||||
if not (isinstance(ext_name, str) and
|
||||
extension_name_re.match(ext_name)):
|
||||
raise DistutilsSetupError(
|
||||
"first element of each tuple in 'ext_modules' "
|
||||
"must be the extension name (a string)")
|
||||
|
||||
if not isinstance(build_info, dict):
|
||||
raise DistutilsSetupError(
|
||||
"second element of each tuple in 'ext_modules' "
|
||||
"must be a dictionary (build info)")
|
||||
|
||||
# OK, the (ext_name, build_info) dict is type-safe: convert it
|
||||
# to an Extension instance.
|
||||
ext = Extension(ext_name, build_info['sources'])
|
||||
|
||||
# Easy stuff: one-to-one mapping from dict elements to
|
||||
# instance attributes.
|
||||
for key in ('include_dirs', 'library_dirs', 'libraries',
|
||||
'extra_objects', 'extra_compile_args',
|
||||
'extra_link_args'):
|
||||
val = build_info.get(key)
|
||||
if val is not None:
|
||||
setattr(ext, key, val)
|
||||
|
||||
# Medium-easy stuff: same syntax/semantics, different names.
|
||||
ext.runtime_library_dirs = build_info.get('rpath')
|
||||
if 'def_file' in build_info:
|
||||
log.warn("'def_file' element of build info dict "
|
||||
"no longer supported")
|
||||
|
||||
# Non-trivial stuff: 'macros' split into 'define_macros'
|
||||
# and 'undef_macros'.
|
||||
macros = build_info.get('macros')
|
||||
if macros:
|
||||
ext.define_macros = []
|
||||
ext.undef_macros = []
|
||||
for macro in macros:
|
||||
if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
|
||||
raise DistutilsSetupError(
|
||||
"'macros' element of build info dict "
|
||||
"must be 1- or 2-tuple")
|
||||
if len(macro) == 1:
|
||||
ext.undef_macros.append(macro[0])
|
||||
elif len(macro) == 2:
|
||||
ext.define_macros.append(macro)
|
||||
|
||||
extensions[i] = ext
|
||||
|
||||
def get_source_files(self):
|
||||
self.check_extensions_list(self.extensions)
|
||||
filenames = []
|
||||
|
||||
# Wouldn't it be neat if we knew the names of header files too...
|
||||
for ext in self.extensions:
|
||||
filenames.extend(ext.sources)
|
||||
return filenames
|
||||
|
||||
def get_outputs(self):
|
||||
# Sanity check the 'extensions' list -- can't assume this is being
|
||||
# done in the same run as a 'build_extensions()' call (in fact, we
|
||||
# can probably assume that it *isn't*!).
|
||||
self.check_extensions_list(self.extensions)
|
||||
|
||||
# And build the list of output (built) filenames. Note that this
|
||||
# ignores the 'inplace' flag, and assumes everything goes in the
|
||||
# "build" tree.
|
||||
outputs = []
|
||||
for ext in self.extensions:
|
||||
outputs.append(self.get_ext_fullpath(ext.name))
|
||||
return outputs
|
||||
|
||||
def build_extensions(self):
|
||||
# First, sanity-check the 'extensions' list
|
||||
self.check_extensions_list(self.extensions)
|
||||
if self.parallel:
|
||||
self._build_extensions_parallel()
|
||||
else:
|
||||
self._build_extensions_serial()
|
||||
|
||||
def _build_extensions_parallel(self):
|
||||
workers = self.parallel
|
||||
if self.parallel is True:
|
||||
workers = os.cpu_count() # may return None
|
||||
try:
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
except ImportError:
|
||||
workers = None
|
||||
|
||||
if workers is None:
|
||||
self._build_extensions_serial()
|
||||
return
|
||||
|
||||
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
futures = [executor.submit(self.build_extension, ext)
|
||||
for ext in self.extensions]
|
||||
for ext, fut in zip(self.extensions, futures):
|
||||
with self._filter_build_errors(ext):
|
||||
fut.result()
|
||||
|
||||
def _build_extensions_serial(self):
|
||||
for ext in self.extensions:
|
||||
with self._filter_build_errors(ext):
|
||||
self.build_extension(ext)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _filter_build_errors(self, ext):
|
||||
try:
|
||||
yield
|
||||
except (CCompilerError, DistutilsError, CompileError) as e:
|
||||
if not ext.optional:
|
||||
raise
|
||||
self.warn('building extension "%s" failed: %s' %
|
||||
(ext.name, e))
|
||||
|
||||
def build_extension(self, ext):
|
||||
sources = ext.sources
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'ext_modules' option (extension '%s'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % ext.name)
|
||||
# sort to make the resulting .so file build reproducible
|
||||
sources = sorted(sources)
|
||||
|
||||
ext_path = self.get_ext_fullpath(ext.name)
|
||||
depends = sources + ext.depends
|
||||
if not (self.force or newer_group(depends, ext_path, 'newer')):
|
||||
log.debug("skipping '%s' extension (up-to-date)", ext.name)
|
||||
return
|
||||
else:
|
||||
log.info("building '%s' extension", ext.name)
|
||||
|
||||
# First, scan the sources for SWIG definition files (.i), run
|
||||
# SWIG on 'em to create .c files, and modify the sources list
|
||||
# accordingly.
|
||||
sources = self.swig_sources(sources, ext)
|
||||
|
||||
# Next, compile the source code to object files.
|
||||
|
||||
# XXX not honouring 'define_macros' or 'undef_macros' -- the
|
||||
# CCompiler API needs to change to accommodate this, and I
|
||||
# want to do one thing at a time!
|
||||
|
||||
# Two possible sources for extra compiler arguments:
|
||||
# - 'extra_compile_args' in Extension object
|
||||
# - CFLAGS environment variable (not particularly
|
||||
# elegant, but people seem to expect it and I
|
||||
# guess it's useful)
|
||||
# The environment variable should take precedence, and
|
||||
# any sensible compiler will give precedence to later
|
||||
# command line args. Hence we combine them in order:
|
||||
extra_args = ext.extra_compile_args or []
|
||||
|
||||
macros = ext.define_macros[:]
|
||||
for undef in ext.undef_macros:
|
||||
macros.append((undef,))
|
||||
|
||||
objects = self.compiler.compile(sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=ext.include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_args,
|
||||
depends=ext.depends)
|
||||
|
||||
# XXX outdated variable, kept here in case third-part code
|
||||
# needs it.
|
||||
self._built_objects = objects[:]
|
||||
|
||||
# Now link the object files together into a "shared object" --
|
||||
# of course, first we have to figure out all the other things
|
||||
# that go into the mix.
|
||||
if ext.extra_objects:
|
||||
objects.extend(ext.extra_objects)
|
||||
extra_args = ext.extra_link_args or []
|
||||
|
||||
# Detect target language, if not provided
|
||||
language = ext.language or self.compiler.detect_language(sources)
|
||||
|
||||
self.compiler.link_shared_object(
|
||||
objects, ext_path,
|
||||
libraries=self.get_libraries(ext),
|
||||
library_dirs=ext.library_dirs,
|
||||
runtime_library_dirs=ext.runtime_library_dirs,
|
||||
extra_postargs=extra_args,
|
||||
export_symbols=self.get_export_symbols(ext),
|
||||
debug=self.debug,
|
||||
build_temp=self.build_temp,
|
||||
target_lang=language)
|
||||
|
||||
def swig_sources(self, sources, extension):
|
||||
"""Walk the list of source files in 'sources', looking for SWIG
|
||||
interface (.i) files. Run SWIG on all that are found, and
|
||||
return a modified 'sources' list with SWIG source files replaced
|
||||
by the generated C (or C++) files.
|
||||
"""
|
||||
new_sources = []
|
||||
swig_sources = []
|
||||
swig_targets = {}
|
||||
|
||||
# XXX this drops generated C/C++ files into the source tree, which
|
||||
# is fine for developers who want to distribute the generated
|
||||
# source -- but there should be an option to put SWIG output in
|
||||
# the temp dir.
|
||||
|
||||
if self.swig_cpp:
|
||||
log.warn("--swig-cpp is deprecated - use --swig-opts=-c++")
|
||||
|
||||
if self.swig_cpp or ('-c++' in self.swig_opts) or \
|
||||
('-c++' in extension.swig_opts):
|
||||
target_ext = '.cpp'
|
||||
else:
|
||||
target_ext = '.c'
|
||||
|
||||
for source in sources:
|
||||
(base, ext) = os.path.splitext(source)
|
||||
if ext == ".i": # SWIG interface file
|
||||
new_sources.append(base + '_wrap' + target_ext)
|
||||
swig_sources.append(source)
|
||||
swig_targets[source] = new_sources[-1]
|
||||
else:
|
||||
new_sources.append(source)
|
||||
|
||||
if not swig_sources:
|
||||
return new_sources
|
||||
|
||||
swig = self.swig or self.find_swig()
|
||||
swig_cmd = [swig, "-python"]
|
||||
swig_cmd.extend(self.swig_opts)
|
||||
if self.swig_cpp:
|
||||
swig_cmd.append("-c++")
|
||||
|
||||
# Do not override commandline arguments
|
||||
if not self.swig_opts:
|
||||
for o in extension.swig_opts:
|
||||
swig_cmd.append(o)
|
||||
|
||||
for source in swig_sources:
|
||||
target = swig_targets[source]
|
||||
log.info("swigging %s to %s", source, target)
|
||||
self.spawn(swig_cmd + ["-o", target, source])
|
||||
|
||||
return new_sources
|
||||
|
||||
def find_swig(self):
|
||||
"""Return the name of the SWIG executable. On Unix, this is
|
||||
just "swig" -- it should be in the PATH. Tries a bit harder on
|
||||
Windows.
|
||||
"""
|
||||
if os.name == "posix":
|
||||
return "swig"
|
||||
elif os.name == "nt":
|
||||
# Look for SWIG in its standard installation directory on
|
||||
# Windows (or so I presume!). If we find it there, great;
|
||||
# if not, act like Unix and assume it's in the PATH.
|
||||
for vers in ("1.3", "1.2", "1.1"):
|
||||
fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
else:
|
||||
return "swig.exe"
|
||||
else:
|
||||
raise DistutilsPlatformError(
|
||||
"I don't know how to find (much less run) SWIG "
|
||||
"on platform '%s'" % os.name)
|
||||
|
||||
# -- Name generators -----------------------------------------------
|
||||
# (extension names, filenames, whatever)
|
||||
def get_ext_fullpath(self, ext_name):
|
||||
"""Returns the path of the filename for a given extension.
|
||||
|
||||
The file is located in `build_lib` or directly in the package
|
||||
(inplace option).
|
||||
"""
|
||||
fullname = self.get_ext_fullname(ext_name)
|
||||
modpath = fullname.split('.')
|
||||
filename = self.get_ext_filename(modpath[-1])
|
||||
|
||||
if not self.inplace:
|
||||
# no further work needed
|
||||
# returning :
|
||||
# build_dir/package/path/filename
|
||||
filename = os.path.join(*modpath[:-1]+[filename])
|
||||
return os.path.join(self.build_lib, filename)
|
||||
|
||||
# the inplace option requires to find the package directory
|
||||
# using the build_py command for that
|
||||
package = '.'.join(modpath[0:-1])
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
package_dir = os.path.abspath(build_py.get_package_dir(package))
|
||||
|
||||
# returning
|
||||
# package_dir/filename
|
||||
return os.path.join(package_dir, filename)
|
||||
|
||||
def get_ext_fullname(self, ext_name):
|
||||
"""Returns the fullname of a given extension name.
|
||||
|
||||
Adds the `package.` prefix"""
|
||||
if self.package is None:
|
||||
return ext_name
|
||||
else:
|
||||
return self.package + '.' + ext_name
|
||||
|
||||
def get_ext_filename(self, ext_name):
|
||||
r"""Convert the name of an extension (eg. "foo.bar") into the name
|
||||
of the file from which it will be loaded (eg. "foo/bar.so", or
|
||||
"foo\bar.pyd").
|
||||
"""
|
||||
from distutils.sysconfig import get_config_var
|
||||
ext_path = ext_name.split('.')
|
||||
ext_suffix = get_config_var('EXT_SUFFIX')
|
||||
return os.path.join(*ext_path) + ext_suffix
|
||||
|
||||
def get_export_symbols(self, ext):
|
||||
"""Return the list of symbols that a shared extension has to
|
||||
export. This either uses 'ext.export_symbols' or, if it's not
|
||||
provided, "PyInit_" + module_name. Only relevant on Windows, where
|
||||
the .pyd file (DLL) must export the module "PyInit_" function.
|
||||
"""
|
||||
name = ext.name.split('.')[-1]
|
||||
try:
|
||||
# Unicode module name support as defined in PEP-489
|
||||
# https://www.python.org/dev/peps/pep-0489/#export-hook-name
|
||||
name.encode('ascii')
|
||||
except UnicodeEncodeError:
|
||||
suffix = 'U_' + name.encode('punycode').replace(b'-', b'_').decode('ascii')
|
||||
else:
|
||||
suffix = "_" + name
|
||||
|
||||
initfunc_name = "PyInit" + suffix
|
||||
if initfunc_name not in ext.export_symbols:
|
||||
ext.export_symbols.append(initfunc_name)
|
||||
return ext.export_symbols
|
||||
|
||||
def get_libraries(self, ext):
|
||||
"""Return the list of libraries to link against when building a
|
||||
shared extension. On most platforms, this is just 'ext.libraries';
|
||||
on Windows, we add the Python library (eg. python20.dll).
|
||||
"""
|
||||
# The python library is always needed on Windows. For MSVC, this
|
||||
# is redundant, since the library is mentioned in a pragma in
|
||||
# pyconfig.h that MSVC groks. The other Windows compilers all seem
|
||||
# to need it mentioned explicitly, though, so that's what we do.
|
||||
# Append '_d' to the python import library on debug builds.
|
||||
if sys.platform == "win32":
|
||||
from distutils._msvccompiler import MSVCCompiler
|
||||
if not isinstance(self.compiler, MSVCCompiler):
|
||||
template = "python%d%d"
|
||||
if self.debug:
|
||||
template = template + '_d'
|
||||
pythonlib = (template %
|
||||
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
||||
# don't extend ext.libraries, it may be shared with other
|
||||
# extensions, it is a reference to the original list
|
||||
return ext.libraries + [pythonlib]
|
||||
else:
|
||||
# On Android only the main executable and LD_PRELOADs are considered
|
||||
# to be RTLD_GLOBAL, all the dependencies of the main executable
|
||||
# remain RTLD_LOCAL and so the shared libraries must be linked with
|
||||
# libpython when python is built with a shared python library (issue
|
||||
# bpo-21536).
|
||||
# On Cygwin (and if required, other POSIX-like platforms based on
|
||||
# Windows like MinGW) it is simply necessary that all symbols in
|
||||
# shared libraries are resolved at link time.
|
||||
from distutils.sysconfig import get_config_var
|
||||
link_libpython = False
|
||||
if get_config_var('Py_ENABLE_SHARED'):
|
||||
# A native build on an Android device or on Cygwin
|
||||
if hasattr(sys, 'getandroidapilevel'):
|
||||
link_libpython = True
|
||||
elif sys.platform == 'cygwin':
|
||||
link_libpython = True
|
||||
elif '_PYTHON_HOST_PLATFORM' in os.environ:
|
||||
# We are cross-compiling for one of the relevant platforms
|
||||
if get_config_var('ANDROID_API_LEVEL') != 0:
|
||||
link_libpython = True
|
||||
elif get_config_var('MACHDEP') == 'cygwin':
|
||||
link_libpython = True
|
||||
|
||||
if link_libpython:
|
||||
ldversion = get_config_var('LDVERSION')
|
||||
return ext.libraries + ['python' + ldversion]
|
||||
|
||||
return ext.libraries + py37compat.pythonlib()
|
@ -0,0 +1,392 @@
|
||||
"""distutils.command.build_py
|
||||
|
||||
Implements the Distutils 'build_py' command."""
|
||||
|
||||
import os
|
||||
import importlib.util
|
||||
import sys
|
||||
import glob
|
||||
|
||||
from distutils.core import Command
|
||||
from distutils.errors import *
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
|
||||
class build_py (Command):
|
||||
|
||||
description = "\"build\" pure Python modules (copy to build directory)"
|
||||
|
||||
user_options = [
|
||||
('build-lib=', 'd', "directory to \"build\" (copy) to"),
|
||||
('compile', 'c', "compile .py to .pyc"),
|
||||
('no-compile', None, "don't compile .py files [default]"),
|
||||
('optimize=', 'O',
|
||||
"also compile with optimization: -O1 for \"python -O\", "
|
||||
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
|
||||
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
||||
]
|
||||
|
||||
boolean_options = ['compile', 'force']
|
||||
negative_opt = {'no-compile' : 'compile'}
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_lib = None
|
||||
self.py_modules = None
|
||||
self.package = None
|
||||
self.package_data = None
|
||||
self.package_dir = None
|
||||
self.compile = 0
|
||||
self.optimize = 0
|
||||
self.force = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build',
|
||||
('build_lib', 'build_lib'),
|
||||
('force', 'force'))
|
||||
|
||||
# Get the distribution options that are aliases for build_py
|
||||
# options -- list of packages and list of modules.
|
||||
self.packages = self.distribution.packages
|
||||
self.py_modules = self.distribution.py_modules
|
||||
self.package_data = self.distribution.package_data
|
||||
self.package_dir = {}
|
||||
if self.distribution.package_dir:
|
||||
for name, path in self.distribution.package_dir.items():
|
||||
self.package_dir[name] = convert_path(path)
|
||||
self.data_files = self.get_data_files()
|
||||
|
||||
# Ick, copied straight from install_lib.py (fancy_getopt needs a
|
||||
# type system! Hell, *everything* needs a type system!!!)
|
||||
if not isinstance(self.optimize, int):
|
||||
try:
|
||||
self.optimize = int(self.optimize)
|
||||
assert 0 <= self.optimize <= 2
|
||||
except (ValueError, AssertionError):
|
||||
raise DistutilsOptionError("optimize must be 0, 1, or 2")
|
||||
|
||||
def run(self):
|
||||
# XXX copy_file by default preserves atime and mtime. IMHO this is
|
||||
# the right thing to do, but perhaps it should be an option -- in
|
||||
# particular, a site administrator might want installed files to
|
||||
# reflect the time of installation rather than the last
|
||||
# modification time before the installed release.
|
||||
|
||||
# XXX copy_file by default preserves mode, which appears to be the
|
||||
# wrong thing to do: if a file is read-only in the working
|
||||
# directory, we want it to be installed read/write so that the next
|
||||
# installation of the same module distribution can overwrite it
|
||||
# without problems. (This might be a Unix-specific issue.) Thus
|
||||
# we turn off 'preserve_mode' when copying to the build directory,
|
||||
# since the build directory is supposed to be exactly what the
|
||||
# installation will look like (ie. we preserve mode when
|
||||
# installing).
|
||||
|
||||
# Two options control which modules will be installed: 'packages'
|
||||
# and 'py_modules'. The former lets us work with whole packages, not
|
||||
# specifying individual modules at all; the latter is for
|
||||
# specifying modules one-at-a-time.
|
||||
|
||||
if self.py_modules:
|
||||
self.build_modules()
|
||||
if self.packages:
|
||||
self.build_packages()
|
||||
self.build_package_data()
|
||||
|
||||
self.byte_compile(self.get_outputs(include_bytecode=0))
|
||||
|
||||
def get_data_files(self):
|
||||
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
||||
data = []
|
||||
if not self.packages:
|
||||
return data
|
||||
for package in self.packages:
|
||||
# Locate package source directory
|
||||
src_dir = self.get_package_dir(package)
|
||||
|
||||
# Compute package build directory
|
||||
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
||||
|
||||
# Length of path to strip from found files
|
||||
plen = 0
|
||||
if src_dir:
|
||||
plen = len(src_dir)+1
|
||||
|
||||
# Strip directory from globbed filenames
|
||||
filenames = [
|
||||
file[plen:] for file in self.find_data_files(package, src_dir)
|
||||
]
|
||||
data.append((package, src_dir, build_dir, filenames))
|
||||
return data
|
||||
|
||||
def find_data_files(self, package, src_dir):
|
||||
"""Return filenames for package's data files in 'src_dir'"""
|
||||
globs = (self.package_data.get('', [])
|
||||
+ self.package_data.get(package, []))
|
||||
files = []
|
||||
for pattern in globs:
|
||||
# Each pattern has to be converted to a platform-specific path
|
||||
filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern)))
|
||||
# Files that match more than one pattern are only added once
|
||||
files.extend([fn for fn in filelist if fn not in files
|
||||
and os.path.isfile(fn)])
|
||||
return files
|
||||
|
||||
def build_package_data(self):
|
||||
"""Copy data files into build directory"""
|
||||
lastdir = None
|
||||
for package, src_dir, build_dir, filenames in self.data_files:
|
||||
for filename in filenames:
|
||||
target = os.path.join(build_dir, filename)
|
||||
self.mkpath(os.path.dirname(target))
|
||||
self.copy_file(os.path.join(src_dir, filename), target,
|
||||
preserve_mode=False)
|
||||
|
||||
def get_package_dir(self, package):
|
||||
"""Return the directory, relative to the top of the source
|
||||
distribution, where package 'package' should be found
|
||||
(at least according to the 'package_dir' option, if any)."""
|
||||
path = package.split('.')
|
||||
|
||||
if not self.package_dir:
|
||||
if path:
|
||||
return os.path.join(*path)
|
||||
else:
|
||||
return ''
|
||||
else:
|
||||
tail = []
|
||||
while path:
|
||||
try:
|
||||
pdir = self.package_dir['.'.join(path)]
|
||||
except KeyError:
|
||||
tail.insert(0, path[-1])
|
||||
del path[-1]
|
||||
else:
|
||||
tail.insert(0, pdir)
|
||||
return os.path.join(*tail)
|
||||
else:
|
||||
# Oops, got all the way through 'path' without finding a
|
||||
# match in package_dir. If package_dir defines a directory
|
||||
# for the root (nameless) package, then fallback on it;
|
||||
# otherwise, we might as well have not consulted
|
||||
# package_dir at all, as we just use the directory implied
|
||||
# by 'tail' (which should be the same as the original value
|
||||
# of 'path' at this point).
|
||||
pdir = self.package_dir.get('')
|
||||
if pdir is not None:
|
||||
tail.insert(0, pdir)
|
||||
|
||||
if tail:
|
||||
return os.path.join(*tail)
|
||||
else:
|
||||
return ''
|
||||
|
||||
def check_package(self, package, package_dir):
|
||||
# Empty dir name means current directory, which we can probably
|
||||
# assume exists. Also, os.path.exists and isdir don't know about
|
||||
# my "empty string means current dir" convention, so we have to
|
||||
# circumvent them.
|
||||
if package_dir != "":
|
||||
if not os.path.exists(package_dir):
|
||||
raise DistutilsFileError(
|
||||
"package directory '%s' does not exist" % package_dir)
|
||||
if not os.path.isdir(package_dir):
|
||||
raise DistutilsFileError(
|
||||
"supposed package directory '%s' exists, "
|
||||
"but is not a directory" % package_dir)
|
||||
|
||||
# Require __init__.py for all but the "root package"
|
||||
if package:
|
||||
init_py = os.path.join(package_dir, "__init__.py")
|
||||
if os.path.isfile(init_py):
|
||||
return init_py
|
||||
else:
|
||||
log.warn(("package init file '%s' not found " +
|
||||
"(or not a regular file)"), init_py)
|
||||
|
||||
# Either not in a package at all (__init__.py not expected), or
|
||||
# __init__.py doesn't exist -- so don't return the filename.
|
||||
return None
|
||||
|
||||
def check_module(self, module, module_file):
|
||||
if not os.path.isfile(module_file):
|
||||
log.warn("file %s (for module %s) not found", module_file, module)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def find_package_modules(self, package, package_dir):
|
||||
self.check_package(package, package_dir)
|
||||
module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py"))
|
||||
modules = []
|
||||
setup_script = os.path.abspath(self.distribution.script_name)
|
||||
|
||||
for f in module_files:
|
||||
abs_f = os.path.abspath(f)
|
||||
if abs_f != setup_script:
|
||||
module = os.path.splitext(os.path.basename(f))[0]
|
||||
modules.append((package, module, f))
|
||||
else:
|
||||
self.debug_print("excluding %s" % setup_script)
|
||||
return modules
|
||||
|
||||
def find_modules(self):
|
||||
"""Finds individually-specified Python modules, ie. those listed by
|
||||
module name in 'self.py_modules'. Returns a list of tuples (package,
|
||||
module_base, filename): 'package' is a tuple of the path through
|
||||
package-space to the module; 'module_base' is the bare (no
|
||||
packages, no dots) module name, and 'filename' is the path to the
|
||||
".py" file (relative to the distribution root) that implements the
|
||||
module.
|
||||
"""
|
||||
# Map package names to tuples of useful info about the package:
|
||||
# (package_dir, checked)
|
||||
# package_dir - the directory where we'll find source files for
|
||||
# this package
|
||||
# checked - true if we have checked that the package directory
|
||||
# is valid (exists, contains __init__.py, ... ?)
|
||||
packages = {}
|
||||
|
||||
# List of (package, module, filename) tuples to return
|
||||
modules = []
|
||||
|
||||
# We treat modules-in-packages almost the same as toplevel modules,
|
||||
# just the "package" for a toplevel is empty (either an empty
|
||||
# string or empty list, depending on context). Differences:
|
||||
# - don't check for __init__.py in directory for empty package
|
||||
for module in self.py_modules:
|
||||
path = module.split('.')
|
||||
package = '.'.join(path[0:-1])
|
||||
module_base = path[-1]
|
||||
|
||||
try:
|
||||
(package_dir, checked) = packages[package]
|
||||
except KeyError:
|
||||
package_dir = self.get_package_dir(package)
|
||||
checked = 0
|
||||
|
||||
if not checked:
|
||||
init_py = self.check_package(package, package_dir)
|
||||
packages[package] = (package_dir, 1)
|
||||
if init_py:
|
||||
modules.append((package, "__init__", init_py))
|
||||
|
||||
# XXX perhaps we should also check for just .pyc files
|
||||
# (so greedy closed-source bastards can distribute Python
|
||||
# modules too)
|
||||
module_file = os.path.join(package_dir, module_base + ".py")
|
||||
if not self.check_module(module, module_file):
|
||||
continue
|
||||
|
||||
modules.append((package, module_base, module_file))
|
||||
|
||||
return modules
|
||||
|
||||
def find_all_modules(self):
|
||||
"""Compute the list of all modules that will be built, whether
|
||||
they are specified one-module-at-a-time ('self.py_modules') or
|
||||
by whole packages ('self.packages'). Return a list of tuples
|
||||
(package, module, module_file), just like 'find_modules()' and
|
||||
'find_package_modules()' do."""
|
||||
modules = []
|
||||
if self.py_modules:
|
||||
modules.extend(self.find_modules())
|
||||
if self.packages:
|
||||
for package in self.packages:
|
||||
package_dir = self.get_package_dir(package)
|
||||
m = self.find_package_modules(package, package_dir)
|
||||
modules.extend(m)
|
||||
return modules
|
||||
|
||||
def get_source_files(self):
|
||||
return [module[-1] for module in self.find_all_modules()]
|
||||
|
||||
def get_module_outfile(self, build_dir, package, module):
|
||||
outfile_path = [build_dir] + list(package) + [module + ".py"]
|
||||
return os.path.join(*outfile_path)
|
||||
|
||||
def get_outputs(self, include_bytecode=1):
|
||||
modules = self.find_all_modules()
|
||||
outputs = []
|
||||
for (package, module, module_file) in modules:
|
||||
package = package.split('.')
|
||||
filename = self.get_module_outfile(self.build_lib, package, module)
|
||||
outputs.append(filename)
|
||||
if include_bytecode:
|
||||
if self.compile:
|
||||
outputs.append(importlib.util.cache_from_source(
|
||||
filename, optimization=''))
|
||||
if self.optimize > 0:
|
||||
outputs.append(importlib.util.cache_from_source(
|
||||
filename, optimization=self.optimize))
|
||||
|
||||
outputs += [
|
||||
os.path.join(build_dir, filename)
|
||||
for package, src_dir, build_dir, filenames in self.data_files
|
||||
for filename in filenames
|
||||
]
|
||||
|
||||
return outputs
|
||||
|
||||
def build_module(self, module, module_file, package):
|
||||
if isinstance(package, str):
|
||||
package = package.split('.')
|
||||
elif not isinstance(package, (list, tuple)):
|
||||
raise TypeError(
|
||||
"'package' must be a string (dot-separated), list, or tuple")
|
||||
|
||||
# Now put the module source file into the "build" area -- this is
|
||||
# easy, we just copy it somewhere under self.build_lib (the build
|
||||
# directory for Python source).
|
||||
outfile = self.get_module_outfile(self.build_lib, package, module)
|
||||
dir = os.path.dirname(outfile)
|
||||
self.mkpath(dir)
|
||||
return self.copy_file(module_file, outfile, preserve_mode=0)
|
||||
|
||||
def build_modules(self):
|
||||
modules = self.find_modules()
|
||||
for (package, module, module_file) in modules:
|
||||
# Now "build" the module -- ie. copy the source file to
|
||||
# self.build_lib (the build directory for Python source).
|
||||
# (Actually, it gets copied to the directory for this package
|
||||
# under self.build_lib.)
|
||||
self.build_module(module, module_file, package)
|
||||
|
||||
def build_packages(self):
|
||||
for package in self.packages:
|
||||
# Get list of (package, module, module_file) tuples based on
|
||||
# scanning the package directory. 'package' is only included
|
||||
# in the tuple so that 'find_modules()' and
|
||||
# 'find_package_tuples()' have a consistent interface; it's
|
||||
# ignored here (apart from a sanity check). Also, 'module' is
|
||||
# the *unqualified* module name (ie. no dots, no package -- we
|
||||
# already know its package!), and 'module_file' is the path to
|
||||
# the .py file, relative to the current directory
|
||||
# (ie. including 'package_dir').
|
||||
package_dir = self.get_package_dir(package)
|
||||
modules = self.find_package_modules(package, package_dir)
|
||||
|
||||
# Now loop over the modules we found, "building" each one (just
|
||||
# copy it to self.build_lib).
|
||||
for (package_, module, module_file) in modules:
|
||||
assert package == package_
|
||||
self.build_module(module, module_file, package)
|
||||
|
||||
def byte_compile(self, files):
|
||||
if sys.dont_write_bytecode:
|
||||
self.warn('byte-compiling is disabled, skipping.')
|
||||
return
|
||||
|
||||
from distutils.util import byte_compile
|
||||
prefix = self.build_lib
|
||||
if prefix[-1] != os.sep:
|
||||
prefix = prefix + os.sep
|
||||
|
||||
# XXX this code is essentially the same as the 'byte_compile()
|
||||
# method of the "install_lib" command, except for the determination
|
||||
# of the 'prefix' string. Hmmm.
|
||||
if self.compile:
|
||||
byte_compile(files, optimize=0,
|
||||
force=self.force, prefix=prefix, dry_run=self.dry_run)
|
||||
if self.optimize > 0:
|
||||
byte_compile(files, optimize=self.optimize,
|
||||
force=self.force, prefix=prefix, dry_run=self.dry_run)
|
@ -0,0 +1,152 @@
|
||||
"""distutils.command.build_scripts
|
||||
|
||||
Implements the Distutils 'build_scripts' command."""
|
||||
|
||||
import os, re
|
||||
from stat import ST_MODE
|
||||
from distutils import sysconfig
|
||||
from distutils.core import Command
|
||||
from distutils.dep_util import newer
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
import tokenize
|
||||
|
||||
# check if Python is called on the first line with this expression
|
||||
first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$')
|
||||
|
||||
class build_scripts(Command):
|
||||
|
||||
description = "\"build\" scripts (copy and fixup #! line)"
|
||||
|
||||
user_options = [
|
||||
('build-dir=', 'd', "directory to \"build\" (copy) to"),
|
||||
('force', 'f', "forcibly build everything (ignore file timestamps"),
|
||||
('executable=', 'e', "specify final destination interpreter path"),
|
||||
]
|
||||
|
||||
boolean_options = ['force']
|
||||
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_dir = None
|
||||
self.scripts = None
|
||||
self.force = None
|
||||
self.executable = None
|
||||
self.outfiles = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build',
|
||||
('build_scripts', 'build_dir'),
|
||||
('force', 'force'),
|
||||
('executable', 'executable'))
|
||||
self.scripts = self.distribution.scripts
|
||||
|
||||
def get_source_files(self):
|
||||
return self.scripts
|
||||
|
||||
def run(self):
|
||||
if not self.scripts:
|
||||
return
|
||||
self.copy_scripts()
|
||||
|
||||
|
||||
def copy_scripts(self):
|
||||
r"""Copy each script listed in 'self.scripts'; if it's marked as a
|
||||
Python script in the Unix way (first line matches 'first_line_re',
|
||||
ie. starts with "\#!" and contains "python"), then adjust the first
|
||||
line to refer to the current Python interpreter as we copy.
|
||||
"""
|
||||
self.mkpath(self.build_dir)
|
||||
outfiles = []
|
||||
updated_files = []
|
||||
for script in self.scripts:
|
||||
adjust = False
|
||||
script = convert_path(script)
|
||||
outfile = os.path.join(self.build_dir, os.path.basename(script))
|
||||
outfiles.append(outfile)
|
||||
|
||||
if not self.force and not newer(script, outfile):
|
||||
log.debug("not copying %s (up-to-date)", script)
|
||||
continue
|
||||
|
||||
# Always open the file, but ignore failures in dry-run mode --
|
||||
# that way, we'll get accurate feedback if we can read the
|
||||
# script.
|
||||
try:
|
||||
f = open(script, "rb")
|
||||
except OSError:
|
||||
if not self.dry_run:
|
||||
raise
|
||||
f = None
|
||||
else:
|
||||
encoding, lines = tokenize.detect_encoding(f.readline)
|
||||
f.seek(0)
|
||||
first_line = f.readline()
|
||||
if not first_line:
|
||||
self.warn("%s is an empty file (skipping)" % script)
|
||||
continue
|
||||
|
||||
match = first_line_re.match(first_line)
|
||||
if match:
|
||||
adjust = True
|
||||
post_interp = match.group(1) or b''
|
||||
|
||||
if adjust:
|
||||
log.info("copying and adjusting %s -> %s", script,
|
||||
self.build_dir)
|
||||
updated_files.append(outfile)
|
||||
if not self.dry_run:
|
||||
if not sysconfig.python_build:
|
||||
executable = self.executable
|
||||
else:
|
||||
executable = os.path.join(
|
||||
sysconfig.get_config_var("BINDIR"),
|
||||
"python%s%s" % (sysconfig.get_config_var("VERSION"),
|
||||
sysconfig.get_config_var("EXE")))
|
||||
executable = os.fsencode(executable)
|
||||
shebang = b"#!" + executable + post_interp + b"\n"
|
||||
# Python parser starts to read a script using UTF-8 until
|
||||
# it gets a #coding:xxx cookie. The shebang has to be the
|
||||
# first line of a file, the #coding:xxx cookie cannot be
|
||||
# written before. So the shebang has to be decodable from
|
||||
# UTF-8.
|
||||
try:
|
||||
shebang.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError(
|
||||
"The shebang ({!r}) is not decodable "
|
||||
"from utf-8".format(shebang))
|
||||
# If the script is encoded to a custom encoding (use a
|
||||
# #coding:xxx cookie), the shebang has to be decodable from
|
||||
# the script encoding too.
|
||||
try:
|
||||
shebang.decode(encoding)
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError(
|
||||
"The shebang ({!r}) is not decodable "
|
||||
"from the script encoding ({})"
|
||||
.format(shebang, encoding))
|
||||
with open(outfile, "wb") as outf:
|
||||
outf.write(shebang)
|
||||
outf.writelines(f.readlines())
|
||||
if f:
|
||||
f.close()
|
||||
else:
|
||||
if f:
|
||||
f.close()
|
||||
updated_files.append(outfile)
|
||||
self.copy_file(script, outfile)
|
||||
|
||||
if os.name == 'posix':
|
||||
for file in outfiles:
|
||||
if self.dry_run:
|
||||
log.info("changing mode of %s", file)
|
||||
else:
|
||||
oldmode = os.stat(file)[ST_MODE] & 0o7777
|
||||
newmode = (oldmode | 0o555) & 0o7777
|
||||
if newmode != oldmode:
|
||||
log.info("changing mode of %s from %o to %o",
|
||||
file, oldmode, newmode)
|
||||
os.chmod(file, newmode)
|
||||
# XXX should we modify self.outfiles?
|
||||
return outfiles, updated_files
|
@ -0,0 +1,148 @@
|
||||
"""distutils.command.check
|
||||
|
||||
Implements the Distutils 'check' command.
|
||||
"""
|
||||
from distutils.core import Command
|
||||
from distutils.errors import DistutilsSetupError
|
||||
|
||||
try:
|
||||
# docutils is installed
|
||||
from docutils.utils import Reporter
|
||||
from docutils.parsers.rst import Parser
|
||||
from docutils import frontend
|
||||
from docutils import nodes
|
||||
|
||||
class SilentReporter(Reporter):
|
||||
|
||||
def __init__(self, source, report_level, halt_level, stream=None,
|
||||
debug=0, encoding='ascii', error_handler='replace'):
|
||||
self.messages = []
|
||||
Reporter.__init__(self, source, report_level, halt_level, stream,
|
||||
debug, encoding, error_handler)
|
||||
|
||||
def system_message(self, level, message, *children, **kwargs):
|
||||
self.messages.append((level, message, children, kwargs))
|
||||
return nodes.system_message(message, level=level,
|
||||
type=self.levels[level],
|
||||
*children, **kwargs)
|
||||
|
||||
HAS_DOCUTILS = True
|
||||
except Exception:
|
||||
# Catch all exceptions because exceptions besides ImportError probably
|
||||
# indicate that docutils is not ported to Py3k.
|
||||
HAS_DOCUTILS = False
|
||||
|
||||
class check(Command):
|
||||
"""This command checks the meta-data of the package.
|
||||
"""
|
||||
description = ("perform some checks on the package")
|
||||
user_options = [('metadata', 'm', 'Verify meta-data'),
|
||||
('restructuredtext', 'r',
|
||||
('Checks if long string meta-data syntax '
|
||||
'are reStructuredText-compliant')),
|
||||
('strict', 's',
|
||||
'Will exit with an error if a check fails')]
|
||||
|
||||
boolean_options = ['metadata', 'restructuredtext', 'strict']
|
||||
|
||||
def initialize_options(self):
|
||||
"""Sets default values for options."""
|
||||
self.restructuredtext = 0
|
||||
self.metadata = 1
|
||||
self.strict = 0
|
||||
self._warnings = 0
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def warn(self, msg):
|
||||
"""Counts the number of warnings that occurs."""
|
||||
self._warnings += 1
|
||||
return Command.warn(self, msg)
|
||||
|
||||
def run(self):
|
||||
"""Runs the command."""
|
||||
# perform the various tests
|
||||
if self.metadata:
|
||||
self.check_metadata()
|
||||
if self.restructuredtext:
|
||||
if HAS_DOCUTILS:
|
||||
self.check_restructuredtext()
|
||||
elif self.strict:
|
||||
raise DistutilsSetupError('The docutils package is needed.')
|
||||
|
||||
# let's raise an error in strict mode, if we have at least
|
||||
# one warning
|
||||
if self.strict and self._warnings > 0:
|
||||
raise DistutilsSetupError('Please correct your package.')
|
||||
|
||||
def check_metadata(self):
|
||||
"""Ensures that all required elements of meta-data are supplied.
|
||||
|
||||
Required fields:
|
||||
name, version, URL
|
||||
|
||||
Recommended fields:
|
||||
(author and author_email) or (maintainer and maintainer_email))
|
||||
|
||||
Warns if any are missing.
|
||||
"""
|
||||
metadata = self.distribution.metadata
|
||||
|
||||
missing = []
|
||||
for attr in ('name', 'version', 'url'):
|
||||
if not (hasattr(metadata, attr) and getattr(metadata, attr)):
|
||||
missing.append(attr)
|
||||
|
||||
if missing:
|
||||
self.warn("missing required meta-data: %s" % ', '.join(missing))
|
||||
if metadata.author:
|
||||
if not metadata.author_email:
|
||||
self.warn("missing meta-data: if 'author' supplied, " +
|
||||
"'author_email' should be supplied too")
|
||||
elif metadata.maintainer:
|
||||
if not metadata.maintainer_email:
|
||||
self.warn("missing meta-data: if 'maintainer' supplied, " +
|
||||
"'maintainer_email' should be supplied too")
|
||||
else:
|
||||
self.warn("missing meta-data: either (author and author_email) " +
|
||||
"or (maintainer and maintainer_email) " +
|
||||
"should be supplied")
|
||||
|
||||
def check_restructuredtext(self):
|
||||
"""Checks if the long string fields are reST-compliant."""
|
||||
data = self.distribution.get_long_description()
|
||||
for warning in self._check_rst_data(data):
|
||||
line = warning[-1].get('line')
|
||||
if line is None:
|
||||
warning = warning[1]
|
||||
else:
|
||||
warning = '%s (line %s)' % (warning[1], line)
|
||||
self.warn(warning)
|
||||
|
||||
def _check_rst_data(self, data):
|
||||
"""Returns warnings when the provided data doesn't compile."""
|
||||
# the include and csv_table directives need this to be a path
|
||||
source_path = self.distribution.script_name or 'setup.py'
|
||||
parser = Parser()
|
||||
settings = frontend.OptionParser(components=(Parser,)).get_default_values()
|
||||
settings.tab_width = 4
|
||||
settings.pep_references = None
|
||||
settings.rfc_references = None
|
||||
reporter = SilentReporter(source_path,
|
||||
settings.report_level,
|
||||
settings.halt_level,
|
||||
stream=settings.warning_stream,
|
||||
debug=settings.debug,
|
||||
encoding=settings.error_encoding,
|
||||
error_handler=settings.error_encoding_error_handler)
|
||||
|
||||
document = nodes.document(settings, reporter, source=source_path)
|
||||
document.note_source(source_path, -1)
|
||||
try:
|
||||
parser.parse(data, document)
|
||||
except AttributeError as e:
|
||||
reporter.messages.append(
|
||||
(-1, 'Could not finish the parsing: %s.' % e, '', {}))
|
||||
|
||||
return reporter.messages
|
@ -0,0 +1,76 @@
|
||||
"""distutils.command.clean
|
||||
|
||||
Implements the Distutils 'clean' command."""
|
||||
|
||||
# contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>, added 2000-03-18
|
||||
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils.dir_util import remove_tree
|
||||
from distutils import log
|
||||
|
||||
class clean(Command):
|
||||
|
||||
description = "clean up temporary files from 'build' command"
|
||||
user_options = [
|
||||
('build-base=', 'b',
|
||||
"base build directory (default: 'build.build-base')"),
|
||||
('build-lib=', None,
|
||||
"build directory for all modules (default: 'build.build-lib')"),
|
||||
('build-temp=', 't',
|
||||
"temporary build directory (default: 'build.build-temp')"),
|
||||
('build-scripts=', None,
|
||||
"build directory for scripts (default: 'build.build-scripts')"),
|
||||
('bdist-base=', None,
|
||||
"temporary directory for built distributions"),
|
||||
('all', 'a',
|
||||
"remove all build output, not just temporary by-products")
|
||||
]
|
||||
|
||||
boolean_options = ['all']
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_base = None
|
||||
self.build_lib = None
|
||||
self.build_temp = None
|
||||
self.build_scripts = None
|
||||
self.bdist_base = None
|
||||
self.all = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build',
|
||||
('build_base', 'build_base'),
|
||||
('build_lib', 'build_lib'),
|
||||
('build_scripts', 'build_scripts'),
|
||||
('build_temp', 'build_temp'))
|
||||
self.set_undefined_options('bdist',
|
||||
('bdist_base', 'bdist_base'))
|
||||
|
||||
def run(self):
|
||||
# remove the build/temp.<plat> directory (unless it's already
|
||||
# gone)
|
||||
if os.path.exists(self.build_temp):
|
||||
remove_tree(self.build_temp, dry_run=self.dry_run)
|
||||
else:
|
||||
log.debug("'%s' does not exist -- can't clean it",
|
||||
self.build_temp)
|
||||
|
||||
if self.all:
|
||||
# remove build directories
|
||||
for directory in (self.build_lib,
|
||||
self.bdist_base,
|
||||
self.build_scripts):
|
||||
if os.path.exists(directory):
|
||||
remove_tree(directory, dry_run=self.dry_run)
|
||||
else:
|
||||
log.warn("'%s' does not exist -- can't clean it",
|
||||
directory)
|
||||
|
||||
# just for the heck of it, try to remove the base build directory:
|
||||
# we might have emptied it right now, but if not we don't care
|
||||
if not self.dry_run:
|
||||
try:
|
||||
os.rmdir(self.build_base)
|
||||
log.info("removing '%s'", self.build_base)
|
||||
except OSError:
|
||||
pass
|
@ -0,0 +1,344 @@
|
||||
"""distutils.command.config
|
||||
|
||||
Implements the Distutils 'config' command, a (mostly) empty command class
|
||||
that exists mainly to be sub-classed by specific module distributions and
|
||||
applications. The idea is that while every "config" command is different,
|
||||
at least they're all named the same, and users always see "config" in the
|
||||
list of standard commands. Also, this is a good place to put common
|
||||
configure-like tasks: "try to compile this C code", or "figure out where
|
||||
this header file lives".
|
||||
"""
|
||||
|
||||
import os, re
|
||||
|
||||
from distutils.core import Command
|
||||
from distutils.errors import DistutilsExecError
|
||||
from distutils.sysconfig import customize_compiler
|
||||
from distutils import log
|
||||
|
||||
LANG_EXT = {"c": ".c", "c++": ".cxx"}
|
||||
|
||||
class config(Command):
|
||||
|
||||
description = "prepare to build"
|
||||
|
||||
user_options = [
|
||||
('compiler=', None,
|
||||
"specify the compiler type"),
|
||||
('cc=', None,
|
||||
"specify the compiler executable"),
|
||||
('include-dirs=', 'I',
|
||||
"list of directories to search for header files"),
|
||||
('define=', 'D',
|
||||
"C preprocessor macros to define"),
|
||||
('undef=', 'U',
|
||||
"C preprocessor macros to undefine"),
|
||||
('libraries=', 'l',
|
||||
"external C libraries to link with"),
|
||||
('library-dirs=', 'L',
|
||||
"directories to search for external C libraries"),
|
||||
|
||||
('noisy', None,
|
||||
"show every action (compile, link, run, ...) taken"),
|
||||
('dump-source', None,
|
||||
"dump generated source files before attempting to compile them"),
|
||||
]
|
||||
|
||||
|
||||
# The three standard command methods: since the "config" command
|
||||
# does nothing by default, these are empty.
|
||||
|
||||
def initialize_options(self):
|
||||
self.compiler = None
|
||||
self.cc = None
|
||||
self.include_dirs = None
|
||||
self.libraries = None
|
||||
self.library_dirs = None
|
||||
|
||||
# maximal output for now
|
||||
self.noisy = 1
|
||||
self.dump_source = 1
|
||||
|
||||
# list of temporary files generated along-the-way that we have
|
||||
# to clean at some point
|
||||
self.temp_files = []
|
||||
|
||||
def finalize_options(self):
|
||||
if self.include_dirs is None:
|
||||
self.include_dirs = self.distribution.include_dirs or []
|
||||
elif isinstance(self.include_dirs, str):
|
||||
self.include_dirs = self.include_dirs.split(os.pathsep)
|
||||
|
||||
if self.libraries is None:
|
||||
self.libraries = []
|
||||
elif isinstance(self.libraries, str):
|
||||
self.libraries = [self.libraries]
|
||||
|
||||
if self.library_dirs is None:
|
||||
self.library_dirs = []
|
||||
elif isinstance(self.library_dirs, str):
|
||||
self.library_dirs = self.library_dirs.split(os.pathsep)
|
||||
|
||||
def run(self):
|
||||
pass
|
||||
|
||||
# Utility methods for actual "config" commands. The interfaces are
|
||||
# loosely based on Autoconf macros of similar names. Sub-classes
|
||||
# may use these freely.
|
||||
|
||||
def _check_compiler(self):
|
||||
"""Check that 'self.compiler' really is a CCompiler object;
|
||||
if not, make it one.
|
||||
"""
|
||||
# We do this late, and only on-demand, because this is an expensive
|
||||
# import.
|
||||
from distutils.ccompiler import CCompiler, new_compiler
|
||||
if not isinstance(self.compiler, CCompiler):
|
||||
self.compiler = new_compiler(compiler=self.compiler,
|
||||
dry_run=self.dry_run, force=1)
|
||||
customize_compiler(self.compiler)
|
||||
if self.include_dirs:
|
||||
self.compiler.set_include_dirs(self.include_dirs)
|
||||
if self.libraries:
|
||||
self.compiler.set_libraries(self.libraries)
|
||||
if self.library_dirs:
|
||||
self.compiler.set_library_dirs(self.library_dirs)
|
||||
|
||||
def _gen_temp_sourcefile(self, body, headers, lang):
|
||||
filename = "_configtest" + LANG_EXT[lang]
|
||||
with open(filename, "w") as file:
|
||||
if headers:
|
||||
for header in headers:
|
||||
file.write("#include <%s>\n" % header)
|
||||
file.write("\n")
|
||||
file.write(body)
|
||||
if body[-1] != "\n":
|
||||
file.write("\n")
|
||||
return filename
|
||||
|
||||
def _preprocess(self, body, headers, include_dirs, lang):
|
||||
src = self._gen_temp_sourcefile(body, headers, lang)
|
||||
out = "_configtest.i"
|
||||
self.temp_files.extend([src, out])
|
||||
self.compiler.preprocess(src, out, include_dirs=include_dirs)
|
||||
return (src, out)
|
||||
|
||||
def _compile(self, body, headers, include_dirs, lang):
|
||||
src = self._gen_temp_sourcefile(body, headers, lang)
|
||||
if self.dump_source:
|
||||
dump_file(src, "compiling '%s':" % src)
|
||||
(obj,) = self.compiler.object_filenames([src])
|
||||
self.temp_files.extend([src, obj])
|
||||
self.compiler.compile([src], include_dirs=include_dirs)
|
||||
return (src, obj)
|
||||
|
||||
def _link(self, body, headers, include_dirs, libraries, library_dirs,
|
||||
lang):
|
||||
(src, obj) = self._compile(body, headers, include_dirs, lang)
|
||||
prog = os.path.splitext(os.path.basename(src))[0]
|
||||
self.compiler.link_executable([obj], prog,
|
||||
libraries=libraries,
|
||||
library_dirs=library_dirs,
|
||||
target_lang=lang)
|
||||
|
||||
if self.compiler.exe_extension is not None:
|
||||
prog = prog + self.compiler.exe_extension
|
||||
self.temp_files.append(prog)
|
||||
|
||||
return (src, obj, prog)
|
||||
|
||||
def _clean(self, *filenames):
|
||||
if not filenames:
|
||||
filenames = self.temp_files
|
||||
self.temp_files = []
|
||||
log.info("removing: %s", ' '.join(filenames))
|
||||
for filename in filenames:
|
||||
try:
|
||||
os.remove(filename)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
# XXX these ignore the dry-run flag: what to do, what to do? even if
|
||||
# you want a dry-run build, you still need some sort of configuration
|
||||
# info. My inclination is to make it up to the real config command to
|
||||
# consult 'dry_run', and assume a default (minimal) configuration if
|
||||
# true. The problem with trying to do it here is that you'd have to
|
||||
# return either true or false from all the 'try' methods, neither of
|
||||
# which is correct.
|
||||
|
||||
# XXX need access to the header search path and maybe default macros.
|
||||
|
||||
def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
|
||||
"""Construct a source file from 'body' (a string containing lines
|
||||
of C/C++ code) and 'headers' (a list of header files to include)
|
||||
and run it through the preprocessor. Return true if the
|
||||
preprocessor succeeded, false if there were any errors.
|
||||
('body' probably isn't of much use, but what the heck.)
|
||||
"""
|
||||
from distutils.ccompiler import CompileError
|
||||
self._check_compiler()
|
||||
ok = True
|
||||
try:
|
||||
self._preprocess(body, headers, include_dirs, lang)
|
||||
except CompileError:
|
||||
ok = False
|
||||
|
||||
self._clean()
|
||||
return ok
|
||||
|
||||
def search_cpp(self, pattern, body=None, headers=None, include_dirs=None,
|
||||
lang="c"):
|
||||
"""Construct a source file (just like 'try_cpp()'), run it through
|
||||
the preprocessor, and return true if any line of the output matches
|
||||
'pattern'. 'pattern' should either be a compiled regex object or a
|
||||
string containing a regex. If both 'body' and 'headers' are None,
|
||||
preprocesses an empty file -- which can be useful to determine the
|
||||
symbols the preprocessor and compiler set by default.
|
||||
"""
|
||||
self._check_compiler()
|
||||
src, out = self._preprocess(body, headers, include_dirs, lang)
|
||||
|
||||
if isinstance(pattern, str):
|
||||
pattern = re.compile(pattern)
|
||||
|
||||
with open(out) as file:
|
||||
match = False
|
||||
while True:
|
||||
line = file.readline()
|
||||
if line == '':
|
||||
break
|
||||
if pattern.search(line):
|
||||
match = True
|
||||
break
|
||||
|
||||
self._clean()
|
||||
return match
|
||||
|
||||
def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
|
||||
"""Try to compile a source file built from 'body' and 'headers'.
|
||||
Return true on success, false otherwise.
|
||||
"""
|
||||
from distutils.ccompiler import CompileError
|
||||
self._check_compiler()
|
||||
try:
|
||||
self._compile(body, headers, include_dirs, lang)
|
||||
ok = True
|
||||
except CompileError:
|
||||
ok = False
|
||||
|
||||
log.info(ok and "success!" or "failure.")
|
||||
self._clean()
|
||||
return ok
|
||||
|
||||
def try_link(self, body, headers=None, include_dirs=None, libraries=None,
|
||||
library_dirs=None, lang="c"):
|
||||
"""Try to compile and link a source file, built from 'body' and
|
||||
'headers', to executable form. Return true on success, false
|
||||
otherwise.
|
||||
"""
|
||||
from distutils.ccompiler import CompileError, LinkError
|
||||
self._check_compiler()
|
||||
try:
|
||||
self._link(body, headers, include_dirs,
|
||||
libraries, library_dirs, lang)
|
||||
ok = True
|
||||
except (CompileError, LinkError):
|
||||
ok = False
|
||||
|
||||
log.info(ok and "success!" or "failure.")
|
||||
self._clean()
|
||||
return ok
|
||||
|
||||
def try_run(self, body, headers=None, include_dirs=None, libraries=None,
|
||||
library_dirs=None, lang="c"):
|
||||
"""Try to compile, link to an executable, and run a program
|
||||
built from 'body' and 'headers'. Return true on success, false
|
||||
otherwise.
|
||||
"""
|
||||
from distutils.ccompiler import CompileError, LinkError
|
||||
self._check_compiler()
|
||||
try:
|
||||
src, obj, exe = self._link(body, headers, include_dirs,
|
||||
libraries, library_dirs, lang)
|
||||
self.spawn([exe])
|
||||
ok = True
|
||||
except (CompileError, LinkError, DistutilsExecError):
|
||||
ok = False
|
||||
|
||||
log.info(ok and "success!" or "failure.")
|
||||
self._clean()
|
||||
return ok
|
||||
|
||||
|
||||
# -- High-level methods --------------------------------------------
|
||||
# (these are the ones that are actually likely to be useful
|
||||
# when implementing a real-world config command!)
|
||||
|
||||
def check_func(self, func, headers=None, include_dirs=None,
|
||||
libraries=None, library_dirs=None, decl=0, call=0):
|
||||
"""Determine if function 'func' is available by constructing a
|
||||
source file that refers to 'func', and compiles and links it.
|
||||
If everything succeeds, returns true; otherwise returns false.
|
||||
|
||||
The constructed source file starts out by including the header
|
||||
files listed in 'headers'. If 'decl' is true, it then declares
|
||||
'func' (as "int func()"); you probably shouldn't supply 'headers'
|
||||
and set 'decl' true in the same call, or you might get errors about
|
||||
a conflicting declarations for 'func'. Finally, the constructed
|
||||
'main()' function either references 'func' or (if 'call' is true)
|
||||
calls it. 'libraries' and 'library_dirs' are used when
|
||||
linking.
|
||||
"""
|
||||
self._check_compiler()
|
||||
body = []
|
||||
if decl:
|
||||
body.append("int %s ();" % func)
|
||||
body.append("int main () {")
|
||||
if call:
|
||||
body.append(" %s();" % func)
|
||||
else:
|
||||
body.append(" %s;" % func)
|
||||
body.append("}")
|
||||
body = "\n".join(body) + "\n"
|
||||
|
||||
return self.try_link(body, headers, include_dirs,
|
||||
libraries, library_dirs)
|
||||
|
||||
def check_lib(self, library, library_dirs=None, headers=None,
|
||||
include_dirs=None, other_libraries=[]):
|
||||
"""Determine if 'library' is available to be linked against,
|
||||
without actually checking that any particular symbols are provided
|
||||
by it. 'headers' will be used in constructing the source file to
|
||||
be compiled, but the only effect of this is to check if all the
|
||||
header files listed are available. Any libraries listed in
|
||||
'other_libraries' will be included in the link, in case 'library'
|
||||
has symbols that depend on other libraries.
|
||||
"""
|
||||
self._check_compiler()
|
||||
return self.try_link("int main (void) { }", headers, include_dirs,
|
||||
[library] + other_libraries, library_dirs)
|
||||
|
||||
def check_header(self, header, include_dirs=None, library_dirs=None,
|
||||
lang="c"):
|
||||
"""Determine if the system header file named by 'header_file'
|
||||
exists and can be found by the preprocessor; return true if so,
|
||||
false otherwise.
|
||||
"""
|
||||
return self.try_cpp(body="/* No body */", headers=[header],
|
||||
include_dirs=include_dirs)
|
||||
|
||||
def dump_file(filename, head=None):
|
||||
"""Dumps a file content into log.info.
|
||||
|
||||
If head is not None, will be dumped before the file content.
|
||||
"""
|
||||
if head is None:
|
||||
log.info('%s', filename)
|
||||
else:
|
||||
log.info(head)
|
||||
file = open(filename)
|
||||
try:
|
||||
log.info(file.read())
|
||||
finally:
|
||||
file.close()
|
@ -0,0 +1,773 @@
|
||||
"""distutils.command.install
|
||||
|
||||
Implements the Distutils 'install' command."""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import contextlib
|
||||
import sysconfig
|
||||
import itertools
|
||||
|
||||
from distutils import log
|
||||
from distutils.core import Command
|
||||
from distutils.debug import DEBUG
|
||||
from distutils.sysconfig import get_config_vars
|
||||
from distutils.errors import DistutilsPlatformError
|
||||
from distutils.file_util import write_file
|
||||
from distutils.util import convert_path, subst_vars, change_root
|
||||
from distutils.util import get_platform
|
||||
from distutils.errors import DistutilsOptionError
|
||||
|
||||
from site import USER_BASE
|
||||
from site import USER_SITE
|
||||
HAS_USER_SITE = True
|
||||
|
||||
WINDOWS_SCHEME = {
|
||||
'purelib': '{base}/Lib/site-packages',
|
||||
'platlib': '{base}/Lib/site-packages',
|
||||
'headers': '{base}/Include/{dist_name}',
|
||||
'scripts': '{base}/Scripts',
|
||||
'data' : '{base}',
|
||||
}
|
||||
|
||||
INSTALL_SCHEMES = {
|
||||
'posix_prefix': {
|
||||
'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages',
|
||||
'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}/site-packages',
|
||||
'headers': '{base}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}',
|
||||
'scripts': '{base}/bin',
|
||||
'data' : '{base}',
|
||||
},
|
||||
'posix_home': {
|
||||
'purelib': '{base}/lib/{implementation_lower}',
|
||||
'platlib': '{base}/{platlibdir}/{implementation_lower}',
|
||||
'headers': '{base}/include/{implementation_lower}/{dist_name}',
|
||||
'scripts': '{base}/bin',
|
||||
'data' : '{base}',
|
||||
},
|
||||
'nt': WINDOWS_SCHEME,
|
||||
'pypy': {
|
||||
'purelib': '{base}/site-packages',
|
||||
'platlib': '{base}/site-packages',
|
||||
'headers': '{base}/include/{dist_name}',
|
||||
'scripts': '{base}/bin',
|
||||
'data' : '{base}',
|
||||
},
|
||||
'pypy_nt': {
|
||||
'purelib': '{base}/site-packages',
|
||||
'platlib': '{base}/site-packages',
|
||||
'headers': '{base}/include/{dist_name}',
|
||||
'scripts': '{base}/Scripts',
|
||||
'data' : '{base}',
|
||||
},
|
||||
}
|
||||
|
||||
# user site schemes
|
||||
if HAS_USER_SITE:
|
||||
INSTALL_SCHEMES['nt_user'] = {
|
||||
'purelib': '{usersite}',
|
||||
'platlib': '{usersite}',
|
||||
'headers': '{userbase}/{implementation}{py_version_nodot}/Include/{dist_name}',
|
||||
'scripts': '{userbase}/{implementation}{py_version_nodot}/Scripts',
|
||||
'data' : '{userbase}',
|
||||
}
|
||||
|
||||
INSTALL_SCHEMES['posix_user'] = {
|
||||
'purelib': '{usersite}',
|
||||
'platlib': '{usersite}',
|
||||
'headers':
|
||||
'{userbase}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}',
|
||||
'scripts': '{userbase}/bin',
|
||||
'data' : '{userbase}',
|
||||
}
|
||||
|
||||
# The keys to an installation scheme; if any new types of files are to be
|
||||
# installed, be sure to add an entry to every installation scheme above,
|
||||
# and to SCHEME_KEYS here.
|
||||
SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data')
|
||||
|
||||
|
||||
def _load_sysconfig_schemes():
|
||||
with contextlib.suppress(AttributeError):
|
||||
return {
|
||||
scheme: sysconfig.get_paths(scheme, expand=False)
|
||||
for scheme in sysconfig.get_scheme_names()
|
||||
}
|
||||
|
||||
|
||||
def _load_schemes():
|
||||
"""
|
||||
Extend default schemes with schemes from sysconfig.
|
||||
"""
|
||||
|
||||
sysconfig_schemes = _load_sysconfig_schemes() or {}
|
||||
|
||||
return {
|
||||
scheme: {
|
||||
**INSTALL_SCHEMES.get(scheme, {}),
|
||||
**sysconfig_schemes.get(scheme, {}),
|
||||
}
|
||||
for scheme in set(itertools.chain(INSTALL_SCHEMES, sysconfig_schemes))
|
||||
}
|
||||
|
||||
|
||||
def _get_implementation():
|
||||
if hasattr(sys, 'pypy_version_info'):
|
||||
return 'PyPy'
|
||||
else:
|
||||
return 'Python'
|
||||
|
||||
|
||||
def _select_scheme(ob, name):
|
||||
scheme = _inject_headers(name, _load_scheme(_resolve_scheme(name)))
|
||||
vars(ob).update(_remove_set(ob, _scheme_attrs(scheme)))
|
||||
|
||||
|
||||
def _remove_set(ob, attrs):
|
||||
"""
|
||||
Include only attrs that are None in ob.
|
||||
"""
|
||||
return {
|
||||
key: value
|
||||
for key, value in attrs.items()
|
||||
if getattr(ob, key) is None
|
||||
}
|
||||
|
||||
|
||||
def _resolve_scheme(name):
|
||||
os_name, sep, key = name.partition('_')
|
||||
try:
|
||||
resolved = sysconfig.get_preferred_scheme(key)
|
||||
except Exception:
|
||||
resolved = _pypy_hack(name)
|
||||
return resolved
|
||||
|
||||
|
||||
def _load_scheme(name):
|
||||
return _load_schemes()[name]
|
||||
|
||||
|
||||
def _inject_headers(name, scheme):
|
||||
"""
|
||||
Given a scheme name and the resolved scheme,
|
||||
if the scheme does not include headers, resolve
|
||||
the fallback scheme for the name and use headers
|
||||
from it. pypa/distutils#88
|
||||
"""
|
||||
# Bypass the preferred scheme, which may not
|
||||
# have defined headers.
|
||||
fallback = _load_scheme(_pypy_hack(name))
|
||||
scheme.setdefault('headers', fallback['headers'])
|
||||
return scheme
|
||||
|
||||
|
||||
def _scheme_attrs(scheme):
|
||||
"""Resolve install directories by applying the install schemes."""
|
||||
return {
|
||||
f'install_{key}': scheme[key]
|
||||
for key in SCHEME_KEYS
|
||||
}
|
||||
|
||||
|
||||
def _pypy_hack(name):
|
||||
PY37 = sys.version_info < (3, 8)
|
||||
old_pypy = hasattr(sys, 'pypy_version_info') and PY37
|
||||
prefix = not name.endswith(('_user', '_home'))
|
||||
pypy_name = 'pypy' + '_nt' * (os.name == 'nt')
|
||||
return pypy_name if old_pypy and prefix else name
|
||||
|
||||
|
||||
class install(Command):
|
||||
|
||||
description = "install everything from build directory"
|
||||
|
||||
user_options = [
|
||||
# Select installation scheme and set base director(y|ies)
|
||||
('prefix=', None,
|
||||
"installation prefix"),
|
||||
('exec-prefix=', None,
|
||||
"(Unix only) prefix for platform-specific files"),
|
||||
('home=', None,
|
||||
"(Unix only) home directory to install under"),
|
||||
|
||||
# Or, just set the base director(y|ies)
|
||||
('install-base=', None,
|
||||
"base installation directory (instead of --prefix or --home)"),
|
||||
('install-platbase=', None,
|
||||
"base installation directory for platform-specific files " +
|
||||
"(instead of --exec-prefix or --home)"),
|
||||
('root=', None,
|
||||
"install everything relative to this alternate root directory"),
|
||||
|
||||
# Or, explicitly set the installation scheme
|
||||
('install-purelib=', None,
|
||||
"installation directory for pure Python module distributions"),
|
||||
('install-platlib=', None,
|
||||
"installation directory for non-pure module distributions"),
|
||||
('install-lib=', None,
|
||||
"installation directory for all module distributions " +
|
||||
"(overrides --install-purelib and --install-platlib)"),
|
||||
|
||||
('install-headers=', None,
|
||||
"installation directory for C/C++ headers"),
|
||||
('install-scripts=', None,
|
||||
"installation directory for Python scripts"),
|
||||
('install-data=', None,
|
||||
"installation directory for data files"),
|
||||
|
||||
# Byte-compilation options -- see install_lib.py for details, as
|
||||
# these are duplicated from there (but only install_lib does
|
||||
# anything with them).
|
||||
('compile', 'c', "compile .py to .pyc [default]"),
|
||||
('no-compile', None, "don't compile .py files"),
|
||||
('optimize=', 'O',
|
||||
"also compile with optimization: -O1 for \"python -O\", "
|
||||
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
|
||||
|
||||
# Miscellaneous control options
|
||||
('force', 'f',
|
||||
"force installation (overwrite any existing files)"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
|
||||
# Where to install documentation (eventually!)
|
||||
#('doc-format=', None, "format of documentation to generate"),
|
||||
#('install-man=', None, "directory for Unix man pages"),
|
||||
#('install-html=', None, "directory for HTML documentation"),
|
||||
#('install-info=', None, "directory for GNU info files"),
|
||||
|
||||
('record=', None,
|
||||
"filename in which to record list of installed files"),
|
||||
]
|
||||
|
||||
boolean_options = ['compile', 'force', 'skip-build']
|
||||
|
||||
if HAS_USER_SITE:
|
||||
user_options.append(('user', None,
|
||||
"install in user site-package '%s'" % USER_SITE))
|
||||
boolean_options.append('user')
|
||||
|
||||
negative_opt = {'no-compile' : 'compile'}
|
||||
|
||||
|
||||
def initialize_options(self):
|
||||
"""Initializes options."""
|
||||
# High-level options: these select both an installation base
|
||||
# and scheme.
|
||||
self.prefix = None
|
||||
self.exec_prefix = None
|
||||
self.home = None
|
||||
self.user = 0
|
||||
|
||||
# These select only the installation base; it's up to the user to
|
||||
# specify the installation scheme (currently, that means supplying
|
||||
# the --install-{platlib,purelib,scripts,data} options).
|
||||
self.install_base = None
|
||||
self.install_platbase = None
|
||||
self.root = None
|
||||
|
||||
# These options are the actual installation directories; if not
|
||||
# supplied by the user, they are filled in using the installation
|
||||
# scheme implied by prefix/exec-prefix/home and the contents of
|
||||
# that installation scheme.
|
||||
self.install_purelib = None # for pure module distributions
|
||||
self.install_platlib = None # non-pure (dists w/ extensions)
|
||||
self.install_headers = None # for C/C++ headers
|
||||
self.install_lib = None # set to either purelib or platlib
|
||||
self.install_scripts = None
|
||||
self.install_data = None
|
||||
self.install_userbase = USER_BASE
|
||||
self.install_usersite = USER_SITE
|
||||
|
||||
self.compile = None
|
||||
self.optimize = None
|
||||
|
||||
# Deprecated
|
||||
# These two are for putting non-packagized distributions into their
|
||||
# own directory and creating a .pth file if it makes sense.
|
||||
# 'extra_path' comes from the setup file; 'install_path_file' can
|
||||
# be turned off if it makes no sense to install a .pth file. (But
|
||||
# better to install it uselessly than to guess wrong and not
|
||||
# install it when it's necessary and would be used!) Currently,
|
||||
# 'install_path_file' is always true unless some outsider meddles
|
||||
# with it.
|
||||
self.extra_path = None
|
||||
self.install_path_file = 1
|
||||
|
||||
# 'force' forces installation, even if target files are not
|
||||
# out-of-date. 'skip_build' skips running the "build" command,
|
||||
# handy if you know it's not necessary. 'warn_dir' (which is *not*
|
||||
# a user option, it's just there so the bdist_* commands can turn
|
||||
# it off) determines whether we warn about installing to a
|
||||
# directory not in sys.path.
|
||||
self.force = 0
|
||||
self.skip_build = 0
|
||||
self.warn_dir = 1
|
||||
|
||||
# These are only here as a conduit from the 'build' command to the
|
||||
# 'install_*' commands that do the real work. ('build_base' isn't
|
||||
# actually used anywhere, but it might be useful in future.) They
|
||||
# are not user options, because if the user told the install
|
||||
# command where the build directory is, that wouldn't affect the
|
||||
# build command.
|
||||
self.build_base = None
|
||||
self.build_lib = None
|
||||
|
||||
# Not defined yet because we don't know anything about
|
||||
# documentation yet.
|
||||
#self.install_man = None
|
||||
#self.install_html = None
|
||||
#self.install_info = None
|
||||
|
||||
self.record = None
|
||||
|
||||
|
||||
# -- Option finalizing methods -------------------------------------
|
||||
# (This is rather more involved than for most commands,
|
||||
# because this is where the policy for installing third-
|
||||
# party Python modules on various platforms given a wide
|
||||
# array of user input is decided. Yes, it's quite complex!)
|
||||
|
||||
def finalize_options(self):
|
||||
"""Finalizes options."""
|
||||
# This method (and its helpers, like 'finalize_unix()',
|
||||
# 'finalize_other()', and 'select_scheme()') is where the default
|
||||
# installation directories for modules, extension modules, and
|
||||
# anything else we care to install from a Python module
|
||||
# distribution. Thus, this code makes a pretty important policy
|
||||
# statement about how third-party stuff is added to a Python
|
||||
# installation! Note that the actual work of installation is done
|
||||
# by the relatively simple 'install_*' commands; they just take
|
||||
# their orders from the installation directory options determined
|
||||
# here.
|
||||
|
||||
# Check for errors/inconsistencies in the options; first, stuff
|
||||
# that's wrong on any platform.
|
||||
|
||||
if ((self.prefix or self.exec_prefix or self.home) and
|
||||
(self.install_base or self.install_platbase)):
|
||||
raise DistutilsOptionError(
|
||||
"must supply either prefix/exec-prefix/home or " +
|
||||
"install-base/install-platbase -- not both")
|
||||
|
||||
if self.home and (self.prefix or self.exec_prefix):
|
||||
raise DistutilsOptionError(
|
||||
"must supply either home or prefix/exec-prefix -- not both")
|
||||
|
||||
if self.user and (self.prefix or self.exec_prefix or self.home or
|
||||
self.install_base or self.install_platbase):
|
||||
raise DistutilsOptionError("can't combine user with prefix, "
|
||||
"exec_prefix/home, or install_(plat)base")
|
||||
|
||||
# Next, stuff that's wrong (or dubious) only on certain platforms.
|
||||
if os.name != "posix":
|
||||
if self.exec_prefix:
|
||||
self.warn("exec-prefix option ignored on this platform")
|
||||
self.exec_prefix = None
|
||||
|
||||
# Now the interesting logic -- so interesting that we farm it out
|
||||
# to other methods. The goal of these methods is to set the final
|
||||
# values for the install_{lib,scripts,data,...} options, using as
|
||||
# input a heady brew of prefix, exec_prefix, home, install_base,
|
||||
# install_platbase, user-supplied versions of
|
||||
# install_{purelib,platlib,lib,scripts,data,...}, and the
|
||||
# install schemes. Phew!
|
||||
|
||||
self.dump_dirs("pre-finalize_{unix,other}")
|
||||
|
||||
if os.name == 'posix':
|
||||
self.finalize_unix()
|
||||
else:
|
||||
self.finalize_other()
|
||||
|
||||
self.dump_dirs("post-finalize_{unix,other}()")
|
||||
|
||||
# Expand configuration variables, tilde, etc. in self.install_base
|
||||
# and self.install_platbase -- that way, we can use $base or
|
||||
# $platbase in the other installation directories and not worry
|
||||
# about needing recursive variable expansion (shudder).
|
||||
|
||||
py_version = sys.version.split()[0]
|
||||
(prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
|
||||
try:
|
||||
abiflags = sys.abiflags
|
||||
except AttributeError:
|
||||
# sys.abiflags may not be defined on all platforms.
|
||||
abiflags = ''
|
||||
self.config_vars = {'dist_name': self.distribution.get_name(),
|
||||
'dist_version': self.distribution.get_version(),
|
||||
'dist_fullname': self.distribution.get_fullname(),
|
||||
'py_version': py_version,
|
||||
'py_version_short': '%d.%d' % sys.version_info[:2],
|
||||
'py_version_nodot': '%d%d' % sys.version_info[:2],
|
||||
'sys_prefix': prefix,
|
||||
'prefix': prefix,
|
||||
'sys_exec_prefix': exec_prefix,
|
||||
'exec_prefix': exec_prefix,
|
||||
'abiflags': abiflags,
|
||||
'platlibdir': getattr(sys, 'platlibdir', 'lib'),
|
||||
'implementation_lower': _get_implementation().lower(),
|
||||
'implementation': _get_implementation(),
|
||||
'platsubdir': sysconfig.get_config_var('platsubdir'),
|
||||
}
|
||||
|
||||
if HAS_USER_SITE:
|
||||
self.config_vars['userbase'] = self.install_userbase
|
||||
self.config_vars['usersite'] = self.install_usersite
|
||||
|
||||
self.expand_basedirs()
|
||||
|
||||
self.dump_dirs("post-expand_basedirs()")
|
||||
|
||||
# Now define config vars for the base directories so we can expand
|
||||
# everything else.
|
||||
self.config_vars['base'] = self.install_base
|
||||
self.config_vars['platbase'] = self.install_platbase
|
||||
self.config_vars['installed_base'] = (
|
||||
sysconfig.get_config_vars()['installed_base'])
|
||||
|
||||
if DEBUG:
|
||||
from pprint import pprint
|
||||
print("config vars:")
|
||||
pprint(self.config_vars)
|
||||
|
||||
# Expand "~" and configuration variables in the installation
|
||||
# directories.
|
||||
self.expand_dirs()
|
||||
|
||||
self.dump_dirs("post-expand_dirs()")
|
||||
|
||||
# Create directories in the home dir:
|
||||
if self.user:
|
||||
self.create_home_path()
|
||||
|
||||
# Pick the actual directory to install all modules to: either
|
||||
# install_purelib or install_platlib, depending on whether this
|
||||
# module distribution is pure or not. Of course, if the user
|
||||
# already specified install_lib, use their selection.
|
||||
if self.install_lib is None:
|
||||
if self.distribution.has_ext_modules(): # has extensions: non-pure
|
||||
self.install_lib = self.install_platlib
|
||||
else:
|
||||
self.install_lib = self.install_purelib
|
||||
|
||||
|
||||
# Convert directories from Unix /-separated syntax to the local
|
||||
# convention.
|
||||
self.convert_paths('lib', 'purelib', 'platlib',
|
||||
'scripts', 'data', 'headers',
|
||||
'userbase', 'usersite')
|
||||
|
||||
# Deprecated
|
||||
# Well, we're not actually fully completely finalized yet: we still
|
||||
# have to deal with 'extra_path', which is the hack for allowing
|
||||
# non-packagized module distributions (hello, Numerical Python!) to
|
||||
# get their own directories.
|
||||
self.handle_extra_path()
|
||||
self.install_libbase = self.install_lib # needed for .pth file
|
||||
self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
|
||||
|
||||
# If a new root directory was supplied, make all the installation
|
||||
# dirs relative to it.
|
||||
if self.root is not None:
|
||||
self.change_roots('libbase', 'lib', 'purelib', 'platlib',
|
||||
'scripts', 'data', 'headers')
|
||||
|
||||
self.dump_dirs("after prepending root")
|
||||
|
||||
# Find out the build directories, ie. where to install from.
|
||||
self.set_undefined_options('build',
|
||||
('build_base', 'build_base'),
|
||||
('build_lib', 'build_lib'))
|
||||
|
||||
# Punt on doc directories for now -- after all, we're punting on
|
||||
# documentation completely!
|
||||
|
||||
def dump_dirs(self, msg):
|
||||
"""Dumps the list of user options."""
|
||||
if not DEBUG:
|
||||
return
|
||||
from distutils.fancy_getopt import longopt_xlate
|
||||
log.debug(msg + ":")
|
||||
for opt in self.user_options:
|
||||
opt_name = opt[0]
|
||||
if opt_name[-1] == "=":
|
||||
opt_name = opt_name[0:-1]
|
||||
if opt_name in self.negative_opt:
|
||||
opt_name = self.negative_opt[opt_name]
|
||||
opt_name = opt_name.translate(longopt_xlate)
|
||||
val = not getattr(self, opt_name)
|
||||
else:
|
||||
opt_name = opt_name.translate(longopt_xlate)
|
||||
val = getattr(self, opt_name)
|
||||
log.debug(" %s: %s", opt_name, val)
|
||||
|
||||
def finalize_unix(self):
|
||||
"""Finalizes options for posix platforms."""
|
||||
if self.install_base is not None or self.install_platbase is not None:
|
||||
incomplete_scheme = (
|
||||
(
|
||||
self.install_lib is None and
|
||||
self.install_purelib is None and
|
||||
self.install_platlib is None
|
||||
) or
|
||||
self.install_headers is None or
|
||||
self.install_scripts is None or
|
||||
self.install_data is None
|
||||
)
|
||||
if incomplete_scheme:
|
||||
raise DistutilsOptionError(
|
||||
"install-base or install-platbase supplied, but "
|
||||
"installation scheme is incomplete")
|
||||
return
|
||||
|
||||
if self.user:
|
||||
if self.install_userbase is None:
|
||||
raise DistutilsPlatformError(
|
||||
"User base directory is not specified")
|
||||
self.install_base = self.install_platbase = self.install_userbase
|
||||
self.select_scheme("posix_user")
|
||||
elif self.home is not None:
|
||||
self.install_base = self.install_platbase = self.home
|
||||
self.select_scheme("posix_home")
|
||||
else:
|
||||
if self.prefix is None:
|
||||
if self.exec_prefix is not None:
|
||||
raise DistutilsOptionError(
|
||||
"must not supply exec-prefix without prefix")
|
||||
|
||||
# Allow Fedora to add components to the prefix
|
||||
_prefix_addition = getattr(sysconfig, '_prefix_addition', "")
|
||||
|
||||
self.prefix = (
|
||||
os.path.normpath(sys.prefix) + _prefix_addition)
|
||||
self.exec_prefix = (
|
||||
os.path.normpath(sys.exec_prefix) + _prefix_addition)
|
||||
|
||||
else:
|
||||
if self.exec_prefix is None:
|
||||
self.exec_prefix = self.prefix
|
||||
|
||||
self.install_base = self.prefix
|
||||
self.install_platbase = self.exec_prefix
|
||||
self.select_scheme("posix_prefix")
|
||||
|
||||
def finalize_other(self):
|
||||
"""Finalizes options for non-posix platforms"""
|
||||
if self.user:
|
||||
if self.install_userbase is None:
|
||||
raise DistutilsPlatformError(
|
||||
"User base directory is not specified")
|
||||
self.install_base = self.install_platbase = self.install_userbase
|
||||
self.select_scheme(os.name + "_user")
|
||||
elif self.home is not None:
|
||||
self.install_base = self.install_platbase = self.home
|
||||
self.select_scheme("posix_home")
|
||||
else:
|
||||
if self.prefix is None:
|
||||
self.prefix = os.path.normpath(sys.prefix)
|
||||
|
||||
self.install_base = self.install_platbase = self.prefix
|
||||
try:
|
||||
self.select_scheme(os.name)
|
||||
except KeyError:
|
||||
raise DistutilsPlatformError(
|
||||
"I don't know how to install stuff on '%s'" % os.name)
|
||||
|
||||
def select_scheme(self, name):
|
||||
_select_scheme(self, name)
|
||||
|
||||
def _expand_attrs(self, attrs):
|
||||
for attr in attrs:
|
||||
val = getattr(self, attr)
|
||||
if val is not None:
|
||||
if os.name == 'posix' or os.name == 'nt':
|
||||
val = os.path.expanduser(val)
|
||||
val = subst_vars(val, self.config_vars)
|
||||
setattr(self, attr, val)
|
||||
|
||||
def expand_basedirs(self):
|
||||
"""Calls `os.path.expanduser` on install_base, install_platbase and
|
||||
root."""
|
||||
self._expand_attrs(['install_base', 'install_platbase', 'root'])
|
||||
|
||||
def expand_dirs(self):
|
||||
"""Calls `os.path.expanduser` on install dirs."""
|
||||
self._expand_attrs(['install_purelib', 'install_platlib',
|
||||
'install_lib', 'install_headers',
|
||||
'install_scripts', 'install_data',])
|
||||
|
||||
def convert_paths(self, *names):
|
||||
"""Call `convert_path` over `names`."""
|
||||
for name in names:
|
||||
attr = "install_" + name
|
||||
setattr(self, attr, convert_path(getattr(self, attr)))
|
||||
|
||||
def handle_extra_path(self):
|
||||
"""Set `path_file` and `extra_dirs` using `extra_path`."""
|
||||
if self.extra_path is None:
|
||||
self.extra_path = self.distribution.extra_path
|
||||
|
||||
if self.extra_path is not None:
|
||||
log.warn(
|
||||
"Distribution option extra_path is deprecated. "
|
||||
"See issue27919 for details."
|
||||
)
|
||||
if isinstance(self.extra_path, str):
|
||||
self.extra_path = self.extra_path.split(',')
|
||||
|
||||
if len(self.extra_path) == 1:
|
||||
path_file = extra_dirs = self.extra_path[0]
|
||||
elif len(self.extra_path) == 2:
|
||||
path_file, extra_dirs = self.extra_path
|
||||
else:
|
||||
raise DistutilsOptionError(
|
||||
"'extra_path' option must be a list, tuple, or "
|
||||
"comma-separated string with 1 or 2 elements")
|
||||
|
||||
# convert to local form in case Unix notation used (as it
|
||||
# should be in setup scripts)
|
||||
extra_dirs = convert_path(extra_dirs)
|
||||
else:
|
||||
path_file = None
|
||||
extra_dirs = ''
|
||||
|
||||
# XXX should we warn if path_file and not extra_dirs? (in which
|
||||
# case the path file would be harmless but pointless)
|
||||
self.path_file = path_file
|
||||
self.extra_dirs = extra_dirs
|
||||
|
||||
def change_roots(self, *names):
|
||||
"""Change the install directories pointed by name using root."""
|
||||
for name in names:
|
||||
attr = "install_" + name
|
||||
setattr(self, attr, change_root(self.root, getattr(self, attr)))
|
||||
|
||||
def create_home_path(self):
|
||||
"""Create directories under ~."""
|
||||
if not self.user:
|
||||
return
|
||||
home = convert_path(os.path.expanduser("~"))
|
||||
for name, path in self.config_vars.items():
|
||||
if str(path).startswith(home) and not os.path.isdir(path):
|
||||
self.debug_print("os.makedirs('%s', 0o700)" % path)
|
||||
os.makedirs(path, 0o700)
|
||||
|
||||
# -- Command execution methods -------------------------------------
|
||||
|
||||
def run(self):
|
||||
"""Runs the command."""
|
||||
# Obviously have to build before we can install
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
# If we built for any other platform, we can't install.
|
||||
build_plat = self.distribution.get_command_obj('build').plat_name
|
||||
# check warn_dir - it is a clue that the 'install' is happening
|
||||
# internally, and not to sys.path, so we don't check the platform
|
||||
# matches what we are running.
|
||||
if self.warn_dir and build_plat != get_platform():
|
||||
raise DistutilsPlatformError("Can't install when "
|
||||
"cross-compiling")
|
||||
|
||||
# Run all sub-commands (at least those that need to be run)
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
if self.path_file:
|
||||
self.create_path_file()
|
||||
|
||||
# write list of installed files, if requested.
|
||||
if self.record:
|
||||
outputs = self.get_outputs()
|
||||
if self.root: # strip any package prefix
|
||||
root_len = len(self.root)
|
||||
for counter in range(len(outputs)):
|
||||
outputs[counter] = outputs[counter][root_len:]
|
||||
self.execute(write_file,
|
||||
(self.record, outputs),
|
||||
"writing list of installed files to '%s'" %
|
||||
self.record)
|
||||
|
||||
sys_path = map(os.path.normpath, sys.path)
|
||||
sys_path = map(os.path.normcase, sys_path)
|
||||
install_lib = os.path.normcase(os.path.normpath(self.install_lib))
|
||||
if (self.warn_dir and
|
||||
not (self.path_file and self.install_path_file) and
|
||||
install_lib not in sys_path):
|
||||
log.debug(("modules installed to '%s', which is not in "
|
||||
"Python's module search path (sys.path) -- "
|
||||
"you'll have to change the search path yourself"),
|
||||
self.install_lib)
|
||||
|
||||
def create_path_file(self):
|
||||
"""Creates the .pth file"""
|
||||
filename = os.path.join(self.install_libbase,
|
||||
self.path_file + ".pth")
|
||||
if self.install_path_file:
|
||||
self.execute(write_file,
|
||||
(filename, [self.extra_dirs]),
|
||||
"creating %s" % filename)
|
||||
else:
|
||||
self.warn("path file '%s' not created" % filename)
|
||||
|
||||
|
||||
# -- Reporting methods ---------------------------------------------
|
||||
|
||||
def get_outputs(self):
|
||||
"""Assembles the outputs of all the sub-commands."""
|
||||
outputs = []
|
||||
for cmd_name in self.get_sub_commands():
|
||||
cmd = self.get_finalized_command(cmd_name)
|
||||
# Add the contents of cmd.get_outputs(), ensuring
|
||||
# that outputs doesn't contain duplicate entries
|
||||
for filename in cmd.get_outputs():
|
||||
if filename not in outputs:
|
||||
outputs.append(filename)
|
||||
|
||||
if self.path_file and self.install_path_file:
|
||||
outputs.append(os.path.join(self.install_libbase,
|
||||
self.path_file + ".pth"))
|
||||
|
||||
return outputs
|
||||
|
||||
def get_inputs(self):
|
||||
"""Returns the inputs of all the sub-commands"""
|
||||
# XXX gee, this looks familiar ;-(
|
||||
inputs = []
|
||||
for cmd_name in self.get_sub_commands():
|
||||
cmd = self.get_finalized_command(cmd_name)
|
||||
inputs.extend(cmd.get_inputs())
|
||||
|
||||
return inputs
|
||||
|
||||
# -- Predicates for sub-command list -------------------------------
|
||||
|
||||
def has_lib(self):
|
||||
"""Returns true if the current distribution has any Python
|
||||
modules to install."""
|
||||
return (self.distribution.has_pure_modules() or
|
||||
self.distribution.has_ext_modules())
|
||||
|
||||
def has_headers(self):
|
||||
"""Returns true if the current distribution has any headers to
|
||||
install."""
|
||||
return self.distribution.has_headers()
|
||||
|
||||
def has_scripts(self):
|
||||
"""Returns true if the current distribution has any scripts to.
|
||||
install."""
|
||||
return self.distribution.has_scripts()
|
||||
|
||||
def has_data(self):
|
||||
"""Returns true if the current distribution has any data to.
|
||||
install."""
|
||||
return self.distribution.has_data_files()
|
||||
|
||||
# 'sub_commands': a list of commands this command might have to run to
|
||||
# get its work done. See cmd.py for more info.
|
||||
sub_commands = [('install_lib', has_lib),
|
||||
('install_headers', has_headers),
|
||||
('install_scripts', has_scripts),
|
||||
('install_data', has_data),
|
||||
('install_egg_info', lambda self:True),
|
||||
]
|
@ -0,0 +1,79 @@
|
||||
"""distutils.command.install_data
|
||||
|
||||
Implements the Distutils 'install_data' command, for installing
|
||||
platform-independent data files."""
|
||||
|
||||
# contributed by Bastian Kleineidam
|
||||
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils.util import change_root, convert_path
|
||||
|
||||
class install_data(Command):
|
||||
|
||||
description = "install data files"
|
||||
|
||||
user_options = [
|
||||
('install-dir=', 'd',
|
||||
"base directory for installing data files "
|
||||
"(default: installation base dir)"),
|
||||
('root=', None,
|
||||
"install everything relative to this alternate root directory"),
|
||||
('force', 'f', "force installation (overwrite existing files)"),
|
||||
]
|
||||
|
||||
boolean_options = ['force']
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.outfiles = []
|
||||
self.root = None
|
||||
self.force = 0
|
||||
self.data_files = self.distribution.data_files
|
||||
self.warn_dir = 1
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install',
|
||||
('install_data', 'install_dir'),
|
||||
('root', 'root'),
|
||||
('force', 'force'),
|
||||
)
|
||||
|
||||
def run(self):
|
||||
self.mkpath(self.install_dir)
|
||||
for f in self.data_files:
|
||||
if isinstance(f, str):
|
||||
# it's a simple file, so copy it
|
||||
f = convert_path(f)
|
||||
if self.warn_dir:
|
||||
self.warn("setup script did not provide a directory for "
|
||||
"'%s' -- installing right in '%s'" %
|
||||
(f, self.install_dir))
|
||||
(out, _) = self.copy_file(f, self.install_dir)
|
||||
self.outfiles.append(out)
|
||||
else:
|
||||
# it's a tuple with path to install to and a list of files
|
||||
dir = convert_path(f[0])
|
||||
if not os.path.isabs(dir):
|
||||
dir = os.path.join(self.install_dir, dir)
|
||||
elif self.root:
|
||||
dir = change_root(self.root, dir)
|
||||
self.mkpath(dir)
|
||||
|
||||
if f[1] == []:
|
||||
# If there are no files listed, the user must be
|
||||
# trying to create an empty directory, so add the
|
||||
# directory to the list of output files.
|
||||
self.outfiles.append(dir)
|
||||
else:
|
||||
# Copy files, adding them to the list of output files.
|
||||
for data in f[1]:
|
||||
data = convert_path(data)
|
||||
(out, _) = self.copy_file(data, dir)
|
||||
self.outfiles.append(out)
|
||||
|
||||
def get_inputs(self):
|
||||
return self.data_files or []
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outfiles
|
@ -0,0 +1,84 @@
|
||||
"""distutils.command.install_egg_info
|
||||
|
||||
Implements the Distutils 'install_egg_info' command, for installing
|
||||
a package's PKG-INFO metadata."""
|
||||
|
||||
|
||||
from distutils.cmd import Command
|
||||
from distutils import log, dir_util
|
||||
import os, sys, re
|
||||
|
||||
class install_egg_info(Command):
|
||||
"""Install an .egg-info file for the package"""
|
||||
|
||||
description = "Install package's PKG-INFO metadata as an .egg-info file"
|
||||
user_options = [
|
||||
('install-dir=', 'd', "directory to install to"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
|
||||
@property
|
||||
def basename(self):
|
||||
"""
|
||||
Allow basename to be overridden by child class.
|
||||
Ref pypa/distutils#2.
|
||||
"""
|
||||
return "%s-%s-py%d.%d.egg-info" % (
|
||||
to_filename(safe_name(self.distribution.get_name())),
|
||||
to_filename(safe_version(self.distribution.get_version())),
|
||||
*sys.version_info[:2]
|
||||
)
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install_lib',('install_dir','install_dir'))
|
||||
self.target = os.path.join(self.install_dir, self.basename)
|
||||
self.outputs = [self.target]
|
||||
|
||||
def run(self):
|
||||
target = self.target
|
||||
if os.path.isdir(target) and not os.path.islink(target):
|
||||
dir_util.remove_tree(target, dry_run=self.dry_run)
|
||||
elif os.path.exists(target):
|
||||
self.execute(os.unlink,(self.target,),"Removing "+target)
|
||||
elif not os.path.isdir(self.install_dir):
|
||||
self.execute(os.makedirs, (self.install_dir,),
|
||||
"Creating "+self.install_dir)
|
||||
log.info("Writing %s", target)
|
||||
if not self.dry_run:
|
||||
with open(target, 'w', encoding='UTF-8') as f:
|
||||
self.distribution.metadata.write_pkg_file(f)
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outputs
|
||||
|
||||
|
||||
# The following routines are taken from setuptools' pkg_resources module and
|
||||
# can be replaced by importing them from pkg_resources once it is included
|
||||
# in the stdlib.
|
||||
|
||||
def safe_name(name):
|
||||
"""Convert an arbitrary string to a standard distribution name
|
||||
|
||||
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
|
||||
"""
|
||||
return re.sub('[^A-Za-z0-9.]+', '-', name)
|
||||
|
||||
|
||||
def safe_version(version):
|
||||
"""Convert an arbitrary string to a standard version string
|
||||
|
||||
Spaces become dots, and all other non-alphanumeric characters become
|
||||
dashes, with runs of multiple dashes condensed to a single dash.
|
||||
"""
|
||||
version = version.replace(' ','.')
|
||||
return re.sub('[^A-Za-z0-9.]+', '-', version)
|
||||
|
||||
|
||||
def to_filename(name):
|
||||
"""Convert a project or version name to its filename-escaped form
|
||||
|
||||
Any '-' characters are currently replaced with '_'.
|
||||
"""
|
||||
return name.replace('-','_')
|
@ -0,0 +1,47 @@
|
||||
"""distutils.command.install_headers
|
||||
|
||||
Implements the Distutils 'install_headers' command, to install C/C++ header
|
||||
files to the Python include directory."""
|
||||
|
||||
from distutils.core import Command
|
||||
|
||||
|
||||
# XXX force is never used
|
||||
class install_headers(Command):
|
||||
|
||||
description = "install C/C++ header files"
|
||||
|
||||
user_options = [('install-dir=', 'd',
|
||||
"directory to install header files to"),
|
||||
('force', 'f',
|
||||
"force installation (overwrite existing files)"),
|
||||
]
|
||||
|
||||
boolean_options = ['force']
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.force = 0
|
||||
self.outfiles = []
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install',
|
||||
('install_headers', 'install_dir'),
|
||||
('force', 'force'))
|
||||
|
||||
|
||||
def run(self):
|
||||
headers = self.distribution.headers
|
||||
if not headers:
|
||||
return
|
||||
|
||||
self.mkpath(self.install_dir)
|
||||
for header in headers:
|
||||
(out, _) = self.copy_file(header, self.install_dir)
|
||||
self.outfiles.append(out)
|
||||
|
||||
def get_inputs(self):
|
||||
return self.distribution.headers or []
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outfiles
|
@ -0,0 +1,217 @@
|
||||
"""distutils.command.install_lib
|
||||
|
||||
Implements the Distutils 'install_lib' command
|
||||
(install all Python modules)."""
|
||||
|
||||
import os
|
||||
import importlib.util
|
||||
import sys
|
||||
|
||||
from distutils.core import Command
|
||||
from distutils.errors import DistutilsOptionError
|
||||
|
||||
|
||||
# Extension for Python source files.
|
||||
PYTHON_SOURCE_EXTENSION = ".py"
|
||||
|
||||
class install_lib(Command):
|
||||
|
||||
description = "install all Python modules (extensions and pure Python)"
|
||||
|
||||
# The byte-compilation options are a tad confusing. Here are the
|
||||
# possible scenarios:
|
||||
# 1) no compilation at all (--no-compile --no-optimize)
|
||||
# 2) compile .pyc only (--compile --no-optimize; default)
|
||||
# 3) compile .pyc and "opt-1" .pyc (--compile --optimize)
|
||||
# 4) compile "opt-1" .pyc only (--no-compile --optimize)
|
||||
# 5) compile .pyc and "opt-2" .pyc (--compile --optimize-more)
|
||||
# 6) compile "opt-2" .pyc only (--no-compile --optimize-more)
|
||||
#
|
||||
# The UI for this is two options, 'compile' and 'optimize'.
|
||||
# 'compile' is strictly boolean, and only decides whether to
|
||||
# generate .pyc files. 'optimize' is three-way (0, 1, or 2), and
|
||||
# decides both whether to generate .pyc files and what level of
|
||||
# optimization to use.
|
||||
|
||||
user_options = [
|
||||
('install-dir=', 'd', "directory to install to"),
|
||||
('build-dir=','b', "build directory (where to install from)"),
|
||||
('force', 'f', "force installation (overwrite existing files)"),
|
||||
('compile', 'c', "compile .py to .pyc [default]"),
|
||||
('no-compile', None, "don't compile .py files"),
|
||||
('optimize=', 'O',
|
||||
"also compile with optimization: -O1 for \"python -O\", "
|
||||
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
|
||||
('skip-build', None, "skip the build steps"),
|
||||
]
|
||||
|
||||
boolean_options = ['force', 'compile', 'skip-build']
|
||||
negative_opt = {'no-compile' : 'compile'}
|
||||
|
||||
def initialize_options(self):
|
||||
# let the 'install' command dictate our installation directory
|
||||
self.install_dir = None
|
||||
self.build_dir = None
|
||||
self.force = 0
|
||||
self.compile = None
|
||||
self.optimize = None
|
||||
self.skip_build = None
|
||||
|
||||
def finalize_options(self):
|
||||
# Get all the information we need to install pure Python modules
|
||||
# from the umbrella 'install' command -- build (source) directory,
|
||||
# install (target) directory, and whether to compile .py files.
|
||||
self.set_undefined_options('install',
|
||||
('build_lib', 'build_dir'),
|
||||
('install_lib', 'install_dir'),
|
||||
('force', 'force'),
|
||||
('compile', 'compile'),
|
||||
('optimize', 'optimize'),
|
||||
('skip_build', 'skip_build'),
|
||||
)
|
||||
|
||||
if self.compile is None:
|
||||
self.compile = True
|
||||
if self.optimize is None:
|
||||
self.optimize = False
|
||||
|
||||
if not isinstance(self.optimize, int):
|
||||
try:
|
||||
self.optimize = int(self.optimize)
|
||||
if self.optimize not in (0, 1, 2):
|
||||
raise AssertionError
|
||||
except (ValueError, AssertionError):
|
||||
raise DistutilsOptionError("optimize must be 0, 1, or 2")
|
||||
|
||||
def run(self):
|
||||
# Make sure we have built everything we need first
|
||||
self.build()
|
||||
|
||||
# Install everything: simply dump the entire contents of the build
|
||||
# directory to the installation directory (that's the beauty of
|
||||
# having a build directory!)
|
||||
outfiles = self.install()
|
||||
|
||||
# (Optionally) compile .py to .pyc
|
||||
if outfiles is not None and self.distribution.has_pure_modules():
|
||||
self.byte_compile(outfiles)
|
||||
|
||||
# -- Top-level worker functions ------------------------------------
|
||||
# (called from 'run()')
|
||||
|
||||
def build(self):
|
||||
if not self.skip_build:
|
||||
if self.distribution.has_pure_modules():
|
||||
self.run_command('build_py')
|
||||
if self.distribution.has_ext_modules():
|
||||
self.run_command('build_ext')
|
||||
|
||||
def install(self):
|
||||
if os.path.isdir(self.build_dir):
|
||||
outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
||||
else:
|
||||
self.warn("'%s' does not exist -- no Python modules to install" %
|
||||
self.build_dir)
|
||||
return
|
||||
return outfiles
|
||||
|
||||
def byte_compile(self, files):
|
||||
if sys.dont_write_bytecode:
|
||||
self.warn('byte-compiling is disabled, skipping.')
|
||||
return
|
||||
|
||||
from distutils.util import byte_compile
|
||||
|
||||
# Get the "--root" directory supplied to the "install" command,
|
||||
# and use it as a prefix to strip off the purported filename
|
||||
# encoded in bytecode files. This is far from complete, but it
|
||||
# should at least generate usable bytecode in RPM distributions.
|
||||
install_root = self.get_finalized_command('install').root
|
||||
|
||||
if self.compile:
|
||||
byte_compile(files, optimize=0,
|
||||
force=self.force, prefix=install_root,
|
||||
dry_run=self.dry_run)
|
||||
if self.optimize > 0:
|
||||
byte_compile(files, optimize=self.optimize,
|
||||
force=self.force, prefix=install_root,
|
||||
verbose=self.verbose, dry_run=self.dry_run)
|
||||
|
||||
|
||||
# -- Utility methods -----------------------------------------------
|
||||
|
||||
def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
|
||||
if not has_any:
|
||||
return []
|
||||
|
||||
build_cmd = self.get_finalized_command(build_cmd)
|
||||
build_files = build_cmd.get_outputs()
|
||||
build_dir = getattr(build_cmd, cmd_option)
|
||||
|
||||
prefix_len = len(build_dir) + len(os.sep)
|
||||
outputs = []
|
||||
for file in build_files:
|
||||
outputs.append(os.path.join(output_dir, file[prefix_len:]))
|
||||
|
||||
return outputs
|
||||
|
||||
def _bytecode_filenames(self, py_filenames):
|
||||
bytecode_files = []
|
||||
for py_file in py_filenames:
|
||||
# Since build_py handles package data installation, the
|
||||
# list of outputs can contain more than just .py files.
|
||||
# Make sure we only report bytecode for the .py files.
|
||||
ext = os.path.splitext(os.path.normcase(py_file))[1]
|
||||
if ext != PYTHON_SOURCE_EXTENSION:
|
||||
continue
|
||||
if self.compile:
|
||||
bytecode_files.append(importlib.util.cache_from_source(
|
||||
py_file, optimization=''))
|
||||
if self.optimize > 0:
|
||||
bytecode_files.append(importlib.util.cache_from_source(
|
||||
py_file, optimization=self.optimize))
|
||||
|
||||
return bytecode_files
|
||||
|
||||
|
||||
# -- External interface --------------------------------------------
|
||||
# (called by outsiders)
|
||||
|
||||
def get_outputs(self):
|
||||
"""Return the list of files that would be installed if this command
|
||||
were actually run. Not affected by the "dry-run" flag or whether
|
||||
modules have actually been built yet.
|
||||
"""
|
||||
pure_outputs = \
|
||||
self._mutate_outputs(self.distribution.has_pure_modules(),
|
||||
'build_py', 'build_lib',
|
||||
self.install_dir)
|
||||
if self.compile:
|
||||
bytecode_outputs = self._bytecode_filenames(pure_outputs)
|
||||
else:
|
||||
bytecode_outputs = []
|
||||
|
||||
ext_outputs = \
|
||||
self._mutate_outputs(self.distribution.has_ext_modules(),
|
||||
'build_ext', 'build_lib',
|
||||
self.install_dir)
|
||||
|
||||
return pure_outputs + bytecode_outputs + ext_outputs
|
||||
|
||||
def get_inputs(self):
|
||||
"""Get the list of files that are input to this command, ie. the
|
||||
files that get installed as they are named in the build tree.
|
||||
The files in this list correspond one-to-one to the output
|
||||
filenames returned by 'get_outputs()'.
|
||||
"""
|
||||
inputs = []
|
||||
|
||||
if self.distribution.has_pure_modules():
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
inputs.extend(build_py.get_outputs())
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
build_ext = self.get_finalized_command('build_ext')
|
||||
inputs.extend(build_ext.get_outputs())
|
||||
|
||||
return inputs
|
@ -0,0 +1,60 @@
|
||||
"""distutils.command.install_scripts
|
||||
|
||||
Implements the Distutils 'install_scripts' command, for installing
|
||||
Python scripts."""
|
||||
|
||||
# contributed by Bastian Kleineidam
|
||||
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils import log
|
||||
from stat import ST_MODE
|
||||
|
||||
|
||||
class install_scripts(Command):
|
||||
|
||||
description = "install scripts (Python or otherwise)"
|
||||
|
||||
user_options = [
|
||||
('install-dir=', 'd', "directory to install scripts to"),
|
||||
('build-dir=','b', "build directory (where to install from)"),
|
||||
('force', 'f', "force installation (overwrite existing files)"),
|
||||
('skip-build', None, "skip the build steps"),
|
||||
]
|
||||
|
||||
boolean_options = ['force', 'skip-build']
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.force = 0
|
||||
self.build_dir = None
|
||||
self.skip_build = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build', ('build_scripts', 'build_dir'))
|
||||
self.set_undefined_options('install',
|
||||
('install_scripts', 'install_dir'),
|
||||
('force', 'force'),
|
||||
('skip_build', 'skip_build'),
|
||||
)
|
||||
|
||||
def run(self):
|
||||
if not self.skip_build:
|
||||
self.run_command('build_scripts')
|
||||
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
||||
if os.name == 'posix':
|
||||
# Set the executable bits (owner, group, and world) on
|
||||
# all the scripts we just installed.
|
||||
for file in self.get_outputs():
|
||||
if self.dry_run:
|
||||
log.info("changing mode of %s", file)
|
||||
else:
|
||||
mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777
|
||||
log.info("changing mode of %s to %o", file, mode)
|
||||
os.chmod(file, mode)
|
||||
|
||||
def get_inputs(self):
|
||||
return self.distribution.scripts or []
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outfiles or []
|
@ -0,0 +1,30 @@
|
||||
import sys
|
||||
|
||||
|
||||
def _pythonlib_compat():
|
||||
"""
|
||||
On Python 3.7 and earlier, distutils would include the Python
|
||||
library. See pypa/distutils#9.
|
||||
"""
|
||||
from distutils import sysconfig
|
||||
if not sysconfig.get_config_var('Py_ENABLED_SHARED'):
|
||||
return
|
||||
|
||||
yield 'python{}.{}{}'.format(
|
||||
sys.hexversion >> 24,
|
||||
(sys.hexversion >> 16) & 0xff,
|
||||
sysconfig.get_config_var('ABIFLAGS'),
|
||||
)
|
||||
|
||||
|
||||
def compose(f1, f2):
|
||||
return lambda *args, **kwargs: f1(f2(*args, **kwargs))
|
||||
|
||||
|
||||
pythonlib = (
|
||||
compose(list, _pythonlib_compat)
|
||||
if sys.version_info < (3, 8)
|
||||
and sys.platform != 'darwin'
|
||||
and sys.platform[:3] != 'aix'
|
||||
else list
|
||||
)
|
@ -0,0 +1,304 @@
|
||||
"""distutils.command.register
|
||||
|
||||
Implements the Distutils 'register' command (register with the repository).
|
||||
"""
|
||||
|
||||
# created 2002/10/21, Richard Jones
|
||||
|
||||
import getpass
|
||||
import io
|
||||
import urllib.parse, urllib.request
|
||||
from warnings import warn
|
||||
|
||||
from distutils.core import PyPIRCCommand
|
||||
from distutils.errors import *
|
||||
from distutils import log
|
||||
|
||||
class register(PyPIRCCommand):
|
||||
|
||||
description = ("register the distribution with the Python package index")
|
||||
user_options = PyPIRCCommand.user_options + [
|
||||
('list-classifiers', None,
|
||||
'list the valid Trove classifiers'),
|
||||
('strict', None ,
|
||||
'Will stop the registering if the meta-data are not fully compliant')
|
||||
]
|
||||
boolean_options = PyPIRCCommand.boolean_options + [
|
||||
'verify', 'list-classifiers', 'strict']
|
||||
|
||||
sub_commands = [('check', lambda self: True)]
|
||||
|
||||
def initialize_options(self):
|
||||
PyPIRCCommand.initialize_options(self)
|
||||
self.list_classifiers = 0
|
||||
self.strict = 0
|
||||
|
||||
def finalize_options(self):
|
||||
PyPIRCCommand.finalize_options(self)
|
||||
# setting options for the `check` subcommand
|
||||
check_options = {'strict': ('register', self.strict),
|
||||
'restructuredtext': ('register', 1)}
|
||||
self.distribution.command_options['check'] = check_options
|
||||
|
||||
def run(self):
|
||||
self.finalize_options()
|
||||
self._set_config()
|
||||
|
||||
# Run sub commands
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
if self.dry_run:
|
||||
self.verify_metadata()
|
||||
elif self.list_classifiers:
|
||||
self.classifiers()
|
||||
else:
|
||||
self.send_metadata()
|
||||
|
||||
def check_metadata(self):
|
||||
"""Deprecated API."""
|
||||
warn("distutils.command.register.check_metadata is deprecated, \
|
||||
use the check command instead", PendingDeprecationWarning)
|
||||
check = self.distribution.get_command_obj('check')
|
||||
check.ensure_finalized()
|
||||
check.strict = self.strict
|
||||
check.restructuredtext = 1
|
||||
check.run()
|
||||
|
||||
def _set_config(self):
|
||||
''' Reads the configuration file and set attributes.
|
||||
'''
|
||||
config = self._read_pypirc()
|
||||
if config != {}:
|
||||
self.username = config['username']
|
||||
self.password = config['password']
|
||||
self.repository = config['repository']
|
||||
self.realm = config['realm']
|
||||
self.has_config = True
|
||||
else:
|
||||
if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
|
||||
raise ValueError('%s not found in .pypirc' % self.repository)
|
||||
if self.repository == 'pypi':
|
||||
self.repository = self.DEFAULT_REPOSITORY
|
||||
self.has_config = False
|
||||
|
||||
def classifiers(self):
|
||||
''' Fetch the list of classifiers from the server.
|
||||
'''
|
||||
url = self.repository+'?:action=list_classifiers'
|
||||
response = urllib.request.urlopen(url)
|
||||
log.info(self._read_pypi_response(response))
|
||||
|
||||
def verify_metadata(self):
|
||||
''' Send the metadata to the package index server to be checked.
|
||||
'''
|
||||
# send the info to the server and report the result
|
||||
(code, result) = self.post_to_server(self.build_post_data('verify'))
|
||||
log.info('Server response (%s): %s', code, result)
|
||||
|
||||
def send_metadata(self):
|
||||
''' Send the metadata to the package index server.
|
||||
|
||||
Well, do the following:
|
||||
1. figure who the user is, and then
|
||||
2. send the data as a Basic auth'ed POST.
|
||||
|
||||
First we try to read the username/password from $HOME/.pypirc,
|
||||
which is a ConfigParser-formatted file with a section
|
||||
[distutils] containing username and password entries (both
|
||||
in clear text). Eg:
|
||||
|
||||
[distutils]
|
||||
index-servers =
|
||||
pypi
|
||||
|
||||
[pypi]
|
||||
username: fred
|
||||
password: sekrit
|
||||
|
||||
Otherwise, to figure who the user is, we offer the user three
|
||||
choices:
|
||||
|
||||
1. use existing login,
|
||||
2. register as a new user, or
|
||||
3. set the password to a random string and email the user.
|
||||
|
||||
'''
|
||||
# see if we can short-cut and get the username/password from the
|
||||
# config
|
||||
if self.has_config:
|
||||
choice = '1'
|
||||
username = self.username
|
||||
password = self.password
|
||||
else:
|
||||
choice = 'x'
|
||||
username = password = ''
|
||||
|
||||
# get the user's login info
|
||||
choices = '1 2 3 4'.split()
|
||||
while choice not in choices:
|
||||
self.announce('''\
|
||||
We need to know who you are, so please choose either:
|
||||
1. use your existing login,
|
||||
2. register as a new user,
|
||||
3. have the server generate a new password for you (and email it to you), or
|
||||
4. quit
|
||||
Your selection [default 1]: ''', log.INFO)
|
||||
choice = input()
|
||||
if not choice:
|
||||
choice = '1'
|
||||
elif choice not in choices:
|
||||
print('Please choose one of the four options!')
|
||||
|
||||
if choice == '1':
|
||||
# get the username and password
|
||||
while not username:
|
||||
username = input('Username: ')
|
||||
while not password:
|
||||
password = getpass.getpass('Password: ')
|
||||
|
||||
# set up the authentication
|
||||
auth = urllib.request.HTTPPasswordMgr()
|
||||
host = urllib.parse.urlparse(self.repository)[1]
|
||||
auth.add_password(self.realm, host, username, password)
|
||||
# send the info to the server and report the result
|
||||
code, result = self.post_to_server(self.build_post_data('submit'),
|
||||
auth)
|
||||
self.announce('Server response (%s): %s' % (code, result),
|
||||
log.INFO)
|
||||
|
||||
# possibly save the login
|
||||
if code == 200:
|
||||
if self.has_config:
|
||||
# sharing the password in the distribution instance
|
||||
# so the upload command can reuse it
|
||||
self.distribution.password = password
|
||||
else:
|
||||
self.announce(('I can store your PyPI login so future '
|
||||
'submissions will be faster.'), log.INFO)
|
||||
self.announce('(the login will be stored in %s)' % \
|
||||
self._get_rc_file(), log.INFO)
|
||||
choice = 'X'
|
||||
while choice.lower() not in 'yn':
|
||||
choice = input('Save your login (y/N)?')
|
||||
if not choice:
|
||||
choice = 'n'
|
||||
if choice.lower() == 'y':
|
||||
self._store_pypirc(username, password)
|
||||
|
||||
elif choice == '2':
|
||||
data = {':action': 'user'}
|
||||
data['name'] = data['password'] = data['email'] = ''
|
||||
data['confirm'] = None
|
||||
while not data['name']:
|
||||
data['name'] = input('Username: ')
|
||||
while data['password'] != data['confirm']:
|
||||
while not data['password']:
|
||||
data['password'] = getpass.getpass('Password: ')
|
||||
while not data['confirm']:
|
||||
data['confirm'] = getpass.getpass(' Confirm: ')
|
||||
if data['password'] != data['confirm']:
|
||||
data['password'] = ''
|
||||
data['confirm'] = None
|
||||
print("Password and confirm don't match!")
|
||||
while not data['email']:
|
||||
data['email'] = input(' EMail: ')
|
||||
code, result = self.post_to_server(data)
|
||||
if code != 200:
|
||||
log.info('Server response (%s): %s', code, result)
|
||||
else:
|
||||
log.info('You will receive an email shortly.')
|
||||
log.info(('Follow the instructions in it to '
|
||||
'complete registration.'))
|
||||
elif choice == '3':
|
||||
data = {':action': 'password_reset'}
|
||||
data['email'] = ''
|
||||
while not data['email']:
|
||||
data['email'] = input('Your email address: ')
|
||||
code, result = self.post_to_server(data)
|
||||
log.info('Server response (%s): %s', code, result)
|
||||
|
||||
def build_post_data(self, action):
|
||||
# figure the data to send - the metadata plus some additional
|
||||
# information used by the package server
|
||||
meta = self.distribution.metadata
|
||||
data = {
|
||||
':action': action,
|
||||
'metadata_version' : '1.0',
|
||||
'name': meta.get_name(),
|
||||
'version': meta.get_version(),
|
||||
'summary': meta.get_description(),
|
||||
'home_page': meta.get_url(),
|
||||
'author': meta.get_contact(),
|
||||
'author_email': meta.get_contact_email(),
|
||||
'license': meta.get_licence(),
|
||||
'description': meta.get_long_description(),
|
||||
'keywords': meta.get_keywords(),
|
||||
'platform': meta.get_platforms(),
|
||||
'classifiers': meta.get_classifiers(),
|
||||
'download_url': meta.get_download_url(),
|
||||
# PEP 314
|
||||
'provides': meta.get_provides(),
|
||||
'requires': meta.get_requires(),
|
||||
'obsoletes': meta.get_obsoletes(),
|
||||
}
|
||||
if data['provides'] or data['requires'] or data['obsoletes']:
|
||||
data['metadata_version'] = '1.1'
|
||||
return data
|
||||
|
||||
def post_to_server(self, data, auth=None):
|
||||
''' Post a query to the server, and return a string response.
|
||||
'''
|
||||
if 'name' in data:
|
||||
self.announce('Registering %s to %s' % (data['name'],
|
||||
self.repository),
|
||||
log.INFO)
|
||||
# Build up the MIME payload for the urllib2 POST data
|
||||
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
||||
sep_boundary = '\n--' + boundary
|
||||
end_boundary = sep_boundary + '--'
|
||||
body = io.StringIO()
|
||||
for key, value in data.items():
|
||||
# handle multiple entries for the same name
|
||||
if type(value) not in (type([]), type( () )):
|
||||
value = [value]
|
||||
for value in value:
|
||||
value = str(value)
|
||||
body.write(sep_boundary)
|
||||
body.write('\nContent-Disposition: form-data; name="%s"'%key)
|
||||
body.write("\n\n")
|
||||
body.write(value)
|
||||
if value and value[-1] == '\r':
|
||||
body.write('\n') # write an extra newline (lurve Macs)
|
||||
body.write(end_boundary)
|
||||
body.write("\n")
|
||||
body = body.getvalue().encode("utf-8")
|
||||
|
||||
# build the Request
|
||||
headers = {
|
||||
'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary,
|
||||
'Content-length': str(len(body))
|
||||
}
|
||||
req = urllib.request.Request(self.repository, body, headers)
|
||||
|
||||
# handle HTTP and include the Basic Auth handler
|
||||
opener = urllib.request.build_opener(
|
||||
urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
|
||||
)
|
||||
data = ''
|
||||
try:
|
||||
result = opener.open(req)
|
||||
except urllib.error.HTTPError as e:
|
||||
if self.show_response:
|
||||
data = e.fp.read()
|
||||
result = e.code, e.msg
|
||||
except urllib.error.URLError as e:
|
||||
result = 500, str(e)
|
||||
else:
|
||||
if self.show_response:
|
||||
data = self._read_pypi_response(result)
|
||||
result = 200, 'OK'
|
||||
if self.show_response:
|
||||
msg = '\n'.join(('-' * 75, data, '-' * 75))
|
||||
self.announce(msg, log.INFO)
|
||||
return result
|
@ -0,0 +1,494 @@
|
||||
"""distutils.command.sdist
|
||||
|
||||
Implements the Distutils 'sdist' command (create a source distribution)."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from glob import glob
|
||||
from warnings import warn
|
||||
|
||||
from distutils.core import Command
|
||||
from distutils import dir_util
|
||||
from distutils import file_util
|
||||
from distutils import archive_util
|
||||
from distutils.text_file import TextFile
|
||||
from distutils.filelist import FileList
|
||||
from distutils import log
|
||||
from distutils.util import convert_path
|
||||
from distutils.errors import DistutilsTemplateError, DistutilsOptionError
|
||||
|
||||
|
||||
def show_formats():
|
||||
"""Print all possible values for the 'formats' option (used by
|
||||
the "--help-formats" command-line option).
|
||||
"""
|
||||
from distutils.fancy_getopt import FancyGetopt
|
||||
from distutils.archive_util import ARCHIVE_FORMATS
|
||||
formats = []
|
||||
for format in ARCHIVE_FORMATS.keys():
|
||||
formats.append(("formats=" + format, None,
|
||||
ARCHIVE_FORMATS[format][2]))
|
||||
formats.sort()
|
||||
FancyGetopt(formats).print_help(
|
||||
"List of available source distribution formats:")
|
||||
|
||||
|
||||
class sdist(Command):
|
||||
|
||||
description = "create a source distribution (tarball, zip file, etc.)"
|
||||
|
||||
def checking_metadata(self):
|
||||
"""Callable used for the check sub-command.
|
||||
|
||||
Placed here so user_options can view it"""
|
||||
return self.metadata_check
|
||||
|
||||
user_options = [
|
||||
('template=', 't',
|
||||
"name of manifest template file [default: MANIFEST.in]"),
|
||||
('manifest=', 'm',
|
||||
"name of manifest file [default: MANIFEST]"),
|
||||
('use-defaults', None,
|
||||
"include the default file set in the manifest "
|
||||
"[default; disable with --no-defaults]"),
|
||||
('no-defaults', None,
|
||||
"don't include the default file set"),
|
||||
('prune', None,
|
||||
"specifically exclude files/directories that should not be "
|
||||
"distributed (build tree, RCS/CVS dirs, etc.) "
|
||||
"[default; disable with --no-prune]"),
|
||||
('no-prune', None,
|
||||
"don't automatically exclude anything"),
|
||||
('manifest-only', 'o',
|
||||
"just regenerate the manifest and then stop "
|
||||
"(implies --force-manifest)"),
|
||||
('force-manifest', 'f',
|
||||
"forcibly regenerate the manifest and carry on as usual. "
|
||||
"Deprecated: now the manifest is always regenerated."),
|
||||
('formats=', None,
|
||||
"formats for source distribution (comma-separated list)"),
|
||||
('keep-temp', 'k',
|
||||
"keep the distribution tree around after creating " +
|
||||
"archive file(s)"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put the source distribution archive(s) in "
|
||||
"[default: dist]"),
|
||||
('metadata-check', None,
|
||||
"Ensure that all required elements of meta-data "
|
||||
"are supplied. Warn if any missing. [default]"),
|
||||
('owner=', 'u',
|
||||
"Owner name used when creating a tar file [default: current user]"),
|
||||
('group=', 'g',
|
||||
"Group name used when creating a tar file [default: current group]"),
|
||||
]
|
||||
|
||||
boolean_options = ['use-defaults', 'prune',
|
||||
'manifest-only', 'force-manifest',
|
||||
'keep-temp', 'metadata-check']
|
||||
|
||||
help_options = [
|
||||
('help-formats', None,
|
||||
"list available distribution formats", show_formats),
|
||||
]
|
||||
|
||||
negative_opt = {'no-defaults': 'use-defaults',
|
||||
'no-prune': 'prune' }
|
||||
|
||||
sub_commands = [('check', checking_metadata)]
|
||||
|
||||
READMES = ('README', 'README.txt', 'README.rst')
|
||||
|
||||
def initialize_options(self):
|
||||
# 'template' and 'manifest' are, respectively, the names of
|
||||
# the manifest template and manifest file.
|
||||
self.template = None
|
||||
self.manifest = None
|
||||
|
||||
# 'use_defaults': if true, we will include the default file set
|
||||
# in the manifest
|
||||
self.use_defaults = 1
|
||||
self.prune = 1
|
||||
|
||||
self.manifest_only = 0
|
||||
self.force_manifest = 0
|
||||
|
||||
self.formats = ['gztar']
|
||||
self.keep_temp = 0
|
||||
self.dist_dir = None
|
||||
|
||||
self.archive_files = None
|
||||
self.metadata_check = 1
|
||||
self.owner = None
|
||||
self.group = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.manifest is None:
|
||||
self.manifest = "MANIFEST"
|
||||
if self.template is None:
|
||||
self.template = "MANIFEST.in"
|
||||
|
||||
self.ensure_string_list('formats')
|
||||
|
||||
bad_format = archive_util.check_archive_formats(self.formats)
|
||||
if bad_format:
|
||||
raise DistutilsOptionError(
|
||||
"unknown archive format '%s'" % bad_format)
|
||||
|
||||
if self.dist_dir is None:
|
||||
self.dist_dir = "dist"
|
||||
|
||||
def run(self):
|
||||
# 'filelist' contains the list of files that will make up the
|
||||
# manifest
|
||||
self.filelist = FileList()
|
||||
|
||||
# Run sub commands
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
# Do whatever it takes to get the list of files to process
|
||||
# (process the manifest template, read an existing manifest,
|
||||
# whatever). File list is accumulated in 'self.filelist'.
|
||||
self.get_file_list()
|
||||
|
||||
# If user just wanted us to regenerate the manifest, stop now.
|
||||
if self.manifest_only:
|
||||
return
|
||||
|
||||
# Otherwise, go ahead and create the source distribution tarball,
|
||||
# or zipfile, or whatever.
|
||||
self.make_distribution()
|
||||
|
||||
def check_metadata(self):
|
||||
"""Deprecated API."""
|
||||
warn("distutils.command.sdist.check_metadata is deprecated, \
|
||||
use the check command instead", PendingDeprecationWarning)
|
||||
check = self.distribution.get_command_obj('check')
|
||||
check.ensure_finalized()
|
||||
check.run()
|
||||
|
||||
def get_file_list(self):
|
||||
"""Figure out the list of files to include in the source
|
||||
distribution, and put it in 'self.filelist'. This might involve
|
||||
reading the manifest template (and writing the manifest), or just
|
||||
reading the manifest, or just using the default file set -- it all
|
||||
depends on the user's options.
|
||||
"""
|
||||
# new behavior when using a template:
|
||||
# the file list is recalculated every time because
|
||||
# even if MANIFEST.in or setup.py are not changed
|
||||
# the user might have added some files in the tree that
|
||||
# need to be included.
|
||||
#
|
||||
# This makes --force the default and only behavior with templates.
|
||||
template_exists = os.path.isfile(self.template)
|
||||
if not template_exists and self._manifest_is_not_generated():
|
||||
self.read_manifest()
|
||||
self.filelist.sort()
|
||||
self.filelist.remove_duplicates()
|
||||
return
|
||||
|
||||
if not template_exists:
|
||||
self.warn(("manifest template '%s' does not exist " +
|
||||
"(using default file list)") %
|
||||
self.template)
|
||||
self.filelist.findall()
|
||||
|
||||
if self.use_defaults:
|
||||
self.add_defaults()
|
||||
|
||||
if template_exists:
|
||||
self.read_template()
|
||||
|
||||
if self.prune:
|
||||
self.prune_file_list()
|
||||
|
||||
self.filelist.sort()
|
||||
self.filelist.remove_duplicates()
|
||||
self.write_manifest()
|
||||
|
||||
def add_defaults(self):
|
||||
"""Add all the default files to self.filelist:
|
||||
- README or README.txt
|
||||
- setup.py
|
||||
- test/test*.py
|
||||
- all pure Python modules mentioned in setup script
|
||||
- all files pointed by package_data (build_py)
|
||||
- all files defined in data_files.
|
||||
- all files defined as scripts.
|
||||
- all C sources listed as part of extensions or C libraries
|
||||
in the setup script (doesn't catch C headers!)
|
||||
Warns if (README or README.txt) or setup.py are missing; everything
|
||||
else is optional.
|
||||
"""
|
||||
self._add_defaults_standards()
|
||||
self._add_defaults_optional()
|
||||
self._add_defaults_python()
|
||||
self._add_defaults_data_files()
|
||||
self._add_defaults_ext()
|
||||
self._add_defaults_c_libs()
|
||||
self._add_defaults_scripts()
|
||||
|
||||
@staticmethod
|
||||
def _cs_path_exists(fspath):
|
||||
"""
|
||||
Case-sensitive path existence check
|
||||
|
||||
>>> sdist._cs_path_exists(__file__)
|
||||
True
|
||||
>>> sdist._cs_path_exists(__file__.upper())
|
||||
False
|
||||
"""
|
||||
if not os.path.exists(fspath):
|
||||
return False
|
||||
# make absolute so we always have a directory
|
||||
abspath = os.path.abspath(fspath)
|
||||
directory, filename = os.path.split(abspath)
|
||||
return filename in os.listdir(directory)
|
||||
|
||||
def _add_defaults_standards(self):
|
||||
standards = [self.READMES, self.distribution.script_name]
|
||||
for fn in standards:
|
||||
if isinstance(fn, tuple):
|
||||
alts = fn
|
||||
got_it = False
|
||||
for fn in alts:
|
||||
if self._cs_path_exists(fn):
|
||||
got_it = True
|
||||
self.filelist.append(fn)
|
||||
break
|
||||
|
||||
if not got_it:
|
||||
self.warn("standard file not found: should have one of " +
|
||||
', '.join(alts))
|
||||
else:
|
||||
if self._cs_path_exists(fn):
|
||||
self.filelist.append(fn)
|
||||
else:
|
||||
self.warn("standard file '%s' not found" % fn)
|
||||
|
||||
def _add_defaults_optional(self):
|
||||
optional = ['test/test*.py', 'setup.cfg']
|
||||
for pattern in optional:
|
||||
files = filter(os.path.isfile, glob(pattern))
|
||||
self.filelist.extend(files)
|
||||
|
||||
def _add_defaults_python(self):
|
||||
# build_py is used to get:
|
||||
# - python modules
|
||||
# - files defined in package_data
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
|
||||
# getting python files
|
||||
if self.distribution.has_pure_modules():
|
||||
self.filelist.extend(build_py.get_source_files())
|
||||
|
||||
# getting package_data files
|
||||
# (computed in build_py.data_files by build_py.finalize_options)
|
||||
for pkg, src_dir, build_dir, filenames in build_py.data_files:
|
||||
for filename in filenames:
|
||||
self.filelist.append(os.path.join(src_dir, filename))
|
||||
|
||||
def _add_defaults_data_files(self):
|
||||
# getting distribution.data_files
|
||||
if self.distribution.has_data_files():
|
||||
for item in self.distribution.data_files:
|
||||
if isinstance(item, str):
|
||||
# plain file
|
||||
item = convert_path(item)
|
||||
if os.path.isfile(item):
|
||||
self.filelist.append(item)
|
||||
else:
|
||||
# a (dirname, filenames) tuple
|
||||
dirname, filenames = item
|
||||
for f in filenames:
|
||||
f = convert_path(f)
|
||||
if os.path.isfile(f):
|
||||
self.filelist.append(f)
|
||||
|
||||
def _add_defaults_ext(self):
|
||||
if self.distribution.has_ext_modules():
|
||||
build_ext = self.get_finalized_command('build_ext')
|
||||
self.filelist.extend(build_ext.get_source_files())
|
||||
|
||||
def _add_defaults_c_libs(self):
|
||||
if self.distribution.has_c_libraries():
|
||||
build_clib = self.get_finalized_command('build_clib')
|
||||
self.filelist.extend(build_clib.get_source_files())
|
||||
|
||||
def _add_defaults_scripts(self):
|
||||
if self.distribution.has_scripts():
|
||||
build_scripts = self.get_finalized_command('build_scripts')
|
||||
self.filelist.extend(build_scripts.get_source_files())
|
||||
|
||||
def read_template(self):
|
||||
"""Read and parse manifest template file named by self.template.
|
||||
|
||||
(usually "MANIFEST.in") The parsing and processing is done by
|
||||
'self.filelist', which updates itself accordingly.
|
||||
"""
|
||||
log.info("reading manifest template '%s'", self.template)
|
||||
template = TextFile(self.template, strip_comments=1, skip_blanks=1,
|
||||
join_lines=1, lstrip_ws=1, rstrip_ws=1,
|
||||
collapse_join=1)
|
||||
|
||||
try:
|
||||
while True:
|
||||
line = template.readline()
|
||||
if line is None: # end of file
|
||||
break
|
||||
|
||||
try:
|
||||
self.filelist.process_template_line(line)
|
||||
# the call above can raise a DistutilsTemplateError for
|
||||
# malformed lines, or a ValueError from the lower-level
|
||||
# convert_path function
|
||||
except (DistutilsTemplateError, ValueError) as msg:
|
||||
self.warn("%s, line %d: %s" % (template.filename,
|
||||
template.current_line,
|
||||
msg))
|
||||
finally:
|
||||
template.close()
|
||||
|
||||
def prune_file_list(self):
|
||||
"""Prune off branches that might slip into the file list as created
|
||||
by 'read_template()', but really don't belong there:
|
||||
* the build tree (typically "build")
|
||||
* the release tree itself (only an issue if we ran "sdist"
|
||||
previously with --keep-temp, or it aborted)
|
||||
* any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
|
||||
"""
|
||||
build = self.get_finalized_command('build')
|
||||
base_dir = self.distribution.get_fullname()
|
||||
|
||||
self.filelist.exclude_pattern(None, prefix=build.build_base)
|
||||
self.filelist.exclude_pattern(None, prefix=base_dir)
|
||||
|
||||
if sys.platform == 'win32':
|
||||
seps = r'/|\\'
|
||||
else:
|
||||
seps = '/'
|
||||
|
||||
vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
|
||||
'_darcs']
|
||||
vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
|
||||
self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
|
||||
|
||||
def write_manifest(self):
|
||||
"""Write the file list in 'self.filelist' (presumably as filled in
|
||||
by 'add_defaults()' and 'read_template()') to the manifest file
|
||||
named by 'self.manifest'.
|
||||
"""
|
||||
if self._manifest_is_not_generated():
|
||||
log.info("not writing to manually maintained "
|
||||
"manifest file '%s'" % self.manifest)
|
||||
return
|
||||
|
||||
content = self.filelist.files[:]
|
||||
content.insert(0, '# file GENERATED by distutils, do NOT edit')
|
||||
self.execute(file_util.write_file, (self.manifest, content),
|
||||
"writing manifest file '%s'" % self.manifest)
|
||||
|
||||
def _manifest_is_not_generated(self):
|
||||
# check for special comment used in 3.1.3 and higher
|
||||
if not os.path.isfile(self.manifest):
|
||||
return False
|
||||
|
||||
fp = open(self.manifest)
|
||||
try:
|
||||
first_line = fp.readline()
|
||||
finally:
|
||||
fp.close()
|
||||
return first_line != '# file GENERATED by distutils, do NOT edit\n'
|
||||
|
||||
def read_manifest(self):
|
||||
"""Read the manifest file (named by 'self.manifest') and use it to
|
||||
fill in 'self.filelist', the list of files to include in the source
|
||||
distribution.
|
||||
"""
|
||||
log.info("reading manifest file '%s'", self.manifest)
|
||||
with open(self.manifest) as manifest:
|
||||
for line in manifest:
|
||||
# ignore comments and blank lines
|
||||
line = line.strip()
|
||||
if line.startswith('#') or not line:
|
||||
continue
|
||||
self.filelist.append(line)
|
||||
|
||||
def make_release_tree(self, base_dir, files):
|
||||
"""Create the directory tree that will become the source
|
||||
distribution archive. All directories implied by the filenames in
|
||||
'files' are created under 'base_dir', and then we hard link or copy
|
||||
(if hard linking is unavailable) those files into place.
|
||||
Essentially, this duplicates the developer's source tree, but in a
|
||||
directory named after the distribution, containing only the files
|
||||
to be distributed.
|
||||
"""
|
||||
# Create all the directories under 'base_dir' necessary to
|
||||
# put 'files' there; the 'mkpath()' is just so we don't die
|
||||
# if the manifest happens to be empty.
|
||||
self.mkpath(base_dir)
|
||||
dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
|
||||
|
||||
# And walk over the list of files, either making a hard link (if
|
||||
# os.link exists) to each one that doesn't already exist in its
|
||||
# corresponding location under 'base_dir', or copying each file
|
||||
# that's out-of-date in 'base_dir'. (Usually, all files will be
|
||||
# out-of-date, because by default we blow away 'base_dir' when
|
||||
# we're done making the distribution archives.)
|
||||
|
||||
if hasattr(os, 'link'): # can make hard links on this system
|
||||
link = 'hard'
|
||||
msg = "making hard links in %s..." % base_dir
|
||||
else: # nope, have to copy
|
||||
link = None
|
||||
msg = "copying files to %s..." % base_dir
|
||||
|
||||
if not files:
|
||||
log.warn("no files to distribute -- empty manifest?")
|
||||
else:
|
||||
log.info(msg)
|
||||
for file in files:
|
||||
if not os.path.isfile(file):
|
||||
log.warn("'%s' not a regular file -- skipping", file)
|
||||
else:
|
||||
dest = os.path.join(base_dir, file)
|
||||
self.copy_file(file, dest, link=link)
|
||||
|
||||
self.distribution.metadata.write_pkg_info(base_dir)
|
||||
|
||||
def make_distribution(self):
|
||||
"""Create the source distribution(s). First, we create the release
|
||||
tree with 'make_release_tree()'; then, we create all required
|
||||
archive files (according to 'self.formats') from the release tree.
|
||||
Finally, we clean up by blowing away the release tree (unless
|
||||
'self.keep_temp' is true). The list of archive files created is
|
||||
stored so it can be retrieved later by 'get_archive_files()'.
|
||||
"""
|
||||
# Don't warn about missing meta-data here -- should be (and is!)
|
||||
# done elsewhere.
|
||||
base_dir = self.distribution.get_fullname()
|
||||
base_name = os.path.join(self.dist_dir, base_dir)
|
||||
|
||||
self.make_release_tree(base_dir, self.filelist.files)
|
||||
archive_files = [] # remember names of files we create
|
||||
# tar archive must be created last to avoid overwrite and remove
|
||||
if 'tar' in self.formats:
|
||||
self.formats.append(self.formats.pop(self.formats.index('tar')))
|
||||
|
||||
for fmt in self.formats:
|
||||
file = self.make_archive(base_name, fmt, base_dir=base_dir,
|
||||
owner=self.owner, group=self.group)
|
||||
archive_files.append(file)
|
||||
self.distribution.dist_files.append(('sdist', '', file))
|
||||
|
||||
self.archive_files = archive_files
|
||||
|
||||
if not self.keep_temp:
|
||||
dir_util.remove_tree(base_dir, dry_run=self.dry_run)
|
||||
|
||||
def get_archive_files(self):
|
||||
"""Return the list of archive files created when the command
|
||||
was run, or None if the command hasn't run yet.
|
||||
"""
|
||||
return self.archive_files
|
@ -0,0 +1,214 @@
|
||||
"""
|
||||
distutils.command.upload
|
||||
|
||||
Implements the Distutils 'upload' subcommand (upload package to a package
|
||||
index).
|
||||
"""
|
||||
|
||||
import os
|
||||
import io
|
||||
import hashlib
|
||||
from base64 import standard_b64encode
|
||||
from urllib.request import urlopen, Request, HTTPError
|
||||
from urllib.parse import urlparse
|
||||
from distutils.errors import DistutilsError, DistutilsOptionError
|
||||
from distutils.core import PyPIRCCommand
|
||||
from distutils.spawn import spawn
|
||||
from distutils import log
|
||||
|
||||
|
||||
# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
|
||||
# https://bugs.python.org/issue40698
|
||||
_FILE_CONTENT_DIGESTS = {
|
||||
"md5_digest": getattr(hashlib, "md5", None),
|
||||
"sha256_digest": getattr(hashlib, "sha256", None),
|
||||
"blake2_256_digest": getattr(hashlib, "blake2b", None),
|
||||
}
|
||||
|
||||
|
||||
class upload(PyPIRCCommand):
|
||||
|
||||
description = "upload binary package to PyPI"
|
||||
|
||||
user_options = PyPIRCCommand.user_options + [
|
||||
('sign', 's',
|
||||
'sign files to upload using gpg'),
|
||||
('identity=', 'i', 'GPG identity used to sign files'),
|
||||
]
|
||||
|
||||
boolean_options = PyPIRCCommand.boolean_options + ['sign']
|
||||
|
||||
def initialize_options(self):
|
||||
PyPIRCCommand.initialize_options(self)
|
||||
self.username = ''
|
||||
self.password = ''
|
||||
self.show_response = 0
|
||||
self.sign = False
|
||||
self.identity = None
|
||||
|
||||
def finalize_options(self):
|
||||
PyPIRCCommand.finalize_options(self)
|
||||
if self.identity and not self.sign:
|
||||
raise DistutilsOptionError(
|
||||
"Must use --sign for --identity to have meaning"
|
||||
)
|
||||
config = self._read_pypirc()
|
||||
if config != {}:
|
||||
self.username = config['username']
|
||||
self.password = config['password']
|
||||
self.repository = config['repository']
|
||||
self.realm = config['realm']
|
||||
|
||||
# getting the password from the distribution
|
||||
# if previously set by the register command
|
||||
if not self.password and self.distribution.password:
|
||||
self.password = self.distribution.password
|
||||
|
||||
def run(self):
|
||||
if not self.distribution.dist_files:
|
||||
msg = ("Must create and upload files in one command "
|
||||
"(e.g. setup.py sdist upload)")
|
||||
raise DistutilsOptionError(msg)
|
||||
for command, pyversion, filename in self.distribution.dist_files:
|
||||
self.upload_file(command, pyversion, filename)
|
||||
|
||||
def upload_file(self, command, pyversion, filename):
|
||||
# Makes sure the repository URL is compliant
|
||||
schema, netloc, url, params, query, fragments = \
|
||||
urlparse(self.repository)
|
||||
if params or query or fragments:
|
||||
raise AssertionError("Incompatible url %s" % self.repository)
|
||||
|
||||
if schema not in ('http', 'https'):
|
||||
raise AssertionError("unsupported schema " + schema)
|
||||
|
||||
# Sign if requested
|
||||
if self.sign:
|
||||
gpg_args = ["gpg", "--detach-sign", "-a", filename]
|
||||
if self.identity:
|
||||
gpg_args[2:2] = ["--local-user", self.identity]
|
||||
spawn(gpg_args,
|
||||
dry_run=self.dry_run)
|
||||
|
||||
# Fill in the data - send all the meta-data in case we need to
|
||||
# register a new release
|
||||
f = open(filename,'rb')
|
||||
try:
|
||||
content = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
meta = self.distribution.metadata
|
||||
data = {
|
||||
# action
|
||||
':action': 'file_upload',
|
||||
'protocol_version': '1',
|
||||
|
||||
# identify release
|
||||
'name': meta.get_name(),
|
||||
'version': meta.get_version(),
|
||||
|
||||
# file content
|
||||
'content': (os.path.basename(filename),content),
|
||||
'filetype': command,
|
||||
'pyversion': pyversion,
|
||||
|
||||
# additional meta-data
|
||||
'metadata_version': '1.0',
|
||||
'summary': meta.get_description(),
|
||||
'home_page': meta.get_url(),
|
||||
'author': meta.get_contact(),
|
||||
'author_email': meta.get_contact_email(),
|
||||
'license': meta.get_licence(),
|
||||
'description': meta.get_long_description(),
|
||||
'keywords': meta.get_keywords(),
|
||||
'platform': meta.get_platforms(),
|
||||
'classifiers': meta.get_classifiers(),
|
||||
'download_url': meta.get_download_url(),
|
||||
# PEP 314
|
||||
'provides': meta.get_provides(),
|
||||
'requires': meta.get_requires(),
|
||||
'obsoletes': meta.get_obsoletes(),
|
||||
}
|
||||
|
||||
data['comment'] = ''
|
||||
|
||||
# file content digests
|
||||
for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
|
||||
if digest_cons is None:
|
||||
continue
|
||||
try:
|
||||
data[digest_name] = digest_cons(content).hexdigest()
|
||||
except ValueError:
|
||||
# hash digest not available or blocked by security policy
|
||||
pass
|
||||
|
||||
if self.sign:
|
||||
with open(filename + ".asc", "rb") as f:
|
||||
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
|
||||
f.read())
|
||||
|
||||
# set up the authentication
|
||||
user_pass = (self.username + ":" + self.password).encode('ascii')
|
||||
# The exact encoding of the authentication string is debated.
|
||||
# Anyway PyPI only accepts ascii for both username or password.
|
||||
auth = "Basic " + standard_b64encode(user_pass).decode('ascii')
|
||||
|
||||
# Build up the MIME payload for the POST data
|
||||
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
||||
sep_boundary = b'\r\n--' + boundary.encode('ascii')
|
||||
end_boundary = sep_boundary + b'--\r\n'
|
||||
body = io.BytesIO()
|
||||
for key, value in data.items():
|
||||
title = '\r\nContent-Disposition: form-data; name="%s"' % key
|
||||
# handle multiple entries for the same name
|
||||
if not isinstance(value, list):
|
||||
value = [value]
|
||||
for value in value:
|
||||
if type(value) is tuple:
|
||||
title += '; filename="%s"' % value[0]
|
||||
value = value[1]
|
||||
else:
|
||||
value = str(value).encode('utf-8')
|
||||
body.write(sep_boundary)
|
||||
body.write(title.encode('utf-8'))
|
||||
body.write(b"\r\n\r\n")
|
||||
body.write(value)
|
||||
body.write(end_boundary)
|
||||
body = body.getvalue()
|
||||
|
||||
msg = "Submitting %s to %s" % (filename, self.repository)
|
||||
self.announce(msg, log.INFO)
|
||||
|
||||
# build the Request
|
||||
headers = {
|
||||
'Content-type': 'multipart/form-data; boundary=%s' % boundary,
|
||||
'Content-length': str(len(body)),
|
||||
'Authorization': auth,
|
||||
}
|
||||
|
||||
request = Request(self.repository, data=body,
|
||||
headers=headers)
|
||||
# send the data
|
||||
try:
|
||||
result = urlopen(request)
|
||||
status = result.getcode()
|
||||
reason = result.msg
|
||||
except HTTPError as e:
|
||||
status = e.code
|
||||
reason = e.msg
|
||||
except OSError as e:
|
||||
self.announce(str(e), log.ERROR)
|
||||
raise
|
||||
|
||||
if status == 200:
|
||||
self.announce('Server response (%s): %s' % (status, reason),
|
||||
log.INFO)
|
||||
if self.show_response:
|
||||
text = self._read_pypi_response(result)
|
||||
msg = '\n'.join(('-' * 75, text, '-' * 75))
|
||||
self.announce(msg, log.INFO)
|
||||
else:
|
||||
msg = 'Upload failed (%s): %s' % (status, reason)
|
||||
self.announce(msg, log.ERROR)
|
||||
raise DistutilsError(msg)
|
@ -0,0 +1,130 @@
|
||||
"""distutils.pypirc
|
||||
|
||||
Provides the PyPIRCCommand class, the base class for the command classes
|
||||
that uses .pypirc in the distutils.command package.
|
||||
"""
|
||||
import os
|
||||
from configparser import RawConfigParser
|
||||
|
||||
from distutils.cmd import Command
|
||||
|
||||
DEFAULT_PYPIRC = """\
|
||||
[distutils]
|
||||
index-servers =
|
||||
pypi
|
||||
|
||||
[pypi]
|
||||
username:%s
|
||||
password:%s
|
||||
"""
|
||||
|
||||
class PyPIRCCommand(Command):
|
||||
"""Base command that knows how to handle the .pypirc file
|
||||
"""
|
||||
DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
|
||||
DEFAULT_REALM = 'pypi'
|
||||
repository = None
|
||||
realm = None
|
||||
|
||||
user_options = [
|
||||
('repository=', 'r',
|
||||
"url of repository [default: %s]" % \
|
||||
DEFAULT_REPOSITORY),
|
||||
('show-response', None,
|
||||
'display full response text from server')]
|
||||
|
||||
boolean_options = ['show-response']
|
||||
|
||||
def _get_rc_file(self):
|
||||
"""Returns rc file path."""
|
||||
return os.path.join(os.path.expanduser('~'), '.pypirc')
|
||||
|
||||
def _store_pypirc(self, username, password):
|
||||
"""Creates a default .pypirc file."""
|
||||
rc = self._get_rc_file()
|
||||
with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
|
||||
f.write(DEFAULT_PYPIRC % (username, password))
|
||||
|
||||
def _read_pypirc(self):
|
||||
"""Reads the .pypirc file."""
|
||||
rc = self._get_rc_file()
|
||||
if os.path.exists(rc):
|
||||
self.announce('Using PyPI login from %s' % rc)
|
||||
repository = self.repository or self.DEFAULT_REPOSITORY
|
||||
|
||||
config = RawConfigParser()
|
||||
config.read(rc)
|
||||
sections = config.sections()
|
||||
if 'distutils' in sections:
|
||||
# let's get the list of servers
|
||||
index_servers = config.get('distutils', 'index-servers')
|
||||
_servers = [server.strip() for server in
|
||||
index_servers.split('\n')
|
||||
if server.strip() != '']
|
||||
if _servers == []:
|
||||
# nothing set, let's try to get the default pypi
|
||||
if 'pypi' in sections:
|
||||
_servers = ['pypi']
|
||||
else:
|
||||
# the file is not properly defined, returning
|
||||
# an empty dict
|
||||
return {}
|
||||
for server in _servers:
|
||||
current = {'server': server}
|
||||
current['username'] = config.get(server, 'username')
|
||||
|
||||
# optional params
|
||||
for key, default in (('repository',
|
||||
self.DEFAULT_REPOSITORY),
|
||||
('realm', self.DEFAULT_REALM),
|
||||
('password', None)):
|
||||
if config.has_option(server, key):
|
||||
current[key] = config.get(server, key)
|
||||
else:
|
||||
current[key] = default
|
||||
|
||||
# work around people having "repository" for the "pypi"
|
||||
# section of their config set to the HTTP (rather than
|
||||
# HTTPS) URL
|
||||
if (server == 'pypi' and
|
||||
repository in (self.DEFAULT_REPOSITORY, 'pypi')):
|
||||
current['repository'] = self.DEFAULT_REPOSITORY
|
||||
return current
|
||||
|
||||
if (current['server'] == repository or
|
||||
current['repository'] == repository):
|
||||
return current
|
||||
elif 'server-login' in sections:
|
||||
# old format
|
||||
server = 'server-login'
|
||||
if config.has_option(server, 'repository'):
|
||||
repository = config.get(server, 'repository')
|
||||
else:
|
||||
repository = self.DEFAULT_REPOSITORY
|
||||
return {'username': config.get(server, 'username'),
|
||||
'password': config.get(server, 'password'),
|
||||
'repository': repository,
|
||||
'server': server,
|
||||
'realm': self.DEFAULT_REALM}
|
||||
|
||||
return {}
|
||||
|
||||
def _read_pypi_response(self, response):
|
||||
"""Read and decode a PyPI HTTP response."""
|
||||
import cgi
|
||||
content_type = response.getheader('content-type', 'text/plain')
|
||||
encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii')
|
||||
return response.read().decode(encoding)
|
||||
|
||||
def initialize_options(self):
|
||||
"""Initialize options."""
|
||||
self.repository = None
|
||||
self.realm = None
|
||||
self.show_response = 0
|
||||
|
||||
def finalize_options(self):
|
||||
"""Finalizes options."""
|
||||
if self.repository is None:
|
||||
self.repository = self.DEFAULT_REPOSITORY
|
||||
if self.realm is None:
|
||||
self.realm = self.DEFAULT_REALM
|
@ -0,0 +1,249 @@
|
||||
"""distutils.core
|
||||
|
||||
The only module that needs to be imported to use the Distutils; provides
|
||||
the 'setup' function (which is to be called from the setup script). Also
|
||||
indirectly provides the Distribution and Command classes, although they are
|
||||
really defined in distutils.dist and distutils.cmd.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tokenize
|
||||
|
||||
from distutils.debug import DEBUG
|
||||
from distutils.errors import *
|
||||
|
||||
# Mainly import these so setup scripts can "from distutils.core import" them.
|
||||
from distutils.dist import Distribution
|
||||
from distutils.cmd import Command
|
||||
from distutils.config import PyPIRCCommand
|
||||
from distutils.extension import Extension
|
||||
|
||||
# This is a barebones help message generated displayed when the user
|
||||
# runs the setup script with no arguments at all. More useful help
|
||||
# is generated with various --help options: global help, list commands,
|
||||
# and per-command help.
|
||||
USAGE = """\
|
||||
usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
|
||||
or: %(script)s --help [cmd1 cmd2 ...]
|
||||
or: %(script)s --help-commands
|
||||
or: %(script)s cmd --help
|
||||
"""
|
||||
|
||||
def gen_usage (script_name):
|
||||
script = os.path.basename(script_name)
|
||||
return USAGE % vars()
|
||||
|
||||
|
||||
# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'.
|
||||
_setup_stop_after = None
|
||||
_setup_distribution = None
|
||||
|
||||
# Legal keyword arguments for the setup() function
|
||||
setup_keywords = ('distclass', 'script_name', 'script_args', 'options',
|
||||
'name', 'version', 'author', 'author_email',
|
||||
'maintainer', 'maintainer_email', 'url', 'license',
|
||||
'description', 'long_description', 'keywords',
|
||||
'platforms', 'classifiers', 'download_url',
|
||||
'requires', 'provides', 'obsoletes',
|
||||
)
|
||||
|
||||
# Legal keyword arguments for the Extension constructor
|
||||
extension_keywords = ('name', 'sources', 'include_dirs',
|
||||
'define_macros', 'undef_macros',
|
||||
'library_dirs', 'libraries', 'runtime_library_dirs',
|
||||
'extra_objects', 'extra_compile_args', 'extra_link_args',
|
||||
'swig_opts', 'export_symbols', 'depends', 'language')
|
||||
|
||||
def setup (**attrs):
|
||||
"""The gateway to the Distutils: do everything your setup script needs
|
||||
to do, in a highly flexible and user-driven way. Briefly: create a
|
||||
Distribution instance; find and parse config files; parse the command
|
||||
line; run each Distutils command found there, customized by the options
|
||||
supplied to 'setup()' (as keyword arguments), in config files, and on
|
||||
the command line.
|
||||
|
||||
The Distribution instance might be an instance of a class supplied via
|
||||
the 'distclass' keyword argument to 'setup'; if no such class is
|
||||
supplied, then the Distribution class (in dist.py) is instantiated.
|
||||
All other arguments to 'setup' (except for 'cmdclass') are used to set
|
||||
attributes of the Distribution instance.
|
||||
|
||||
The 'cmdclass' argument, if supplied, is a dictionary mapping command
|
||||
names to command classes. Each command encountered on the command line
|
||||
will be turned into a command class, which is in turn instantiated; any
|
||||
class found in 'cmdclass' is used in place of the default, which is
|
||||
(for command 'foo_bar') class 'foo_bar' in module
|
||||
'distutils.command.foo_bar'. The command class must provide a
|
||||
'user_options' attribute which is a list of option specifiers for
|
||||
'distutils.fancy_getopt'. Any command-line options between the current
|
||||
and the next command are used to set attributes of the current command
|
||||
object.
|
||||
|
||||
When the entire command-line has been successfully parsed, calls the
|
||||
'run()' method on each command object in turn. This method will be
|
||||
driven entirely by the Distribution object (which each command object
|
||||
has a reference to, thanks to its constructor), and the
|
||||
command-specific options that became attributes of each command
|
||||
object.
|
||||
"""
|
||||
|
||||
global _setup_stop_after, _setup_distribution
|
||||
|
||||
# Determine the distribution class -- either caller-supplied or
|
||||
# our Distribution (see below).
|
||||
klass = attrs.get('distclass')
|
||||
if klass:
|
||||
del attrs['distclass']
|
||||
else:
|
||||
klass = Distribution
|
||||
|
||||
if 'script_name' not in attrs:
|
||||
attrs['script_name'] = os.path.basename(sys.argv[0])
|
||||
if 'script_args' not in attrs:
|
||||
attrs['script_args'] = sys.argv[1:]
|
||||
|
||||
# Create the Distribution instance, using the remaining arguments
|
||||
# (ie. everything except distclass) to initialize it
|
||||
try:
|
||||
_setup_distribution = dist = klass(attrs)
|
||||
except DistutilsSetupError as msg:
|
||||
if 'name' not in attrs:
|
||||
raise SystemExit("error in setup command: %s" % msg)
|
||||
else:
|
||||
raise SystemExit("error in %s setup command: %s" % \
|
||||
(attrs['name'], msg))
|
||||
|
||||
if _setup_stop_after == "init":
|
||||
return dist
|
||||
|
||||
# Find and parse the config file(s): they will override options from
|
||||
# the setup script, but be overridden by the command line.
|
||||
dist.parse_config_files()
|
||||
|
||||
if DEBUG:
|
||||
print("options (after parsing config files):")
|
||||
dist.dump_option_dicts()
|
||||
|
||||
if _setup_stop_after == "config":
|
||||
return dist
|
||||
|
||||
# Parse the command line and override config files; any
|
||||
# command-line errors are the end user's fault, so turn them into
|
||||
# SystemExit to suppress tracebacks.
|
||||
try:
|
||||
ok = dist.parse_command_line()
|
||||
except DistutilsArgError as msg:
|
||||
raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg)
|
||||
|
||||
if DEBUG:
|
||||
print("options (after parsing command line):")
|
||||
dist.dump_option_dicts()
|
||||
|
||||
if _setup_stop_after == "commandline":
|
||||
return dist
|
||||
|
||||
# And finally, run all the commands found on the command line.
|
||||
if ok:
|
||||
return run_commands(dist)
|
||||
|
||||
return dist
|
||||
|
||||
# setup ()
|
||||
|
||||
|
||||
def run_commands (dist):
|
||||
"""Given a Distribution object run all the commands,
|
||||
raising ``SystemExit`` errors in the case of failure.
|
||||
|
||||
This function assumes that either ``sys.argv`` or ``dist.script_args``
|
||||
is already set accordingly.
|
||||
"""
|
||||
try:
|
||||
dist.run_commands()
|
||||
except KeyboardInterrupt:
|
||||
raise SystemExit("interrupted")
|
||||
except OSError as exc:
|
||||
if DEBUG:
|
||||
sys.stderr.write("error: %s\n" % (exc,))
|
||||
raise
|
||||
else:
|
||||
raise SystemExit("error: %s" % (exc,))
|
||||
|
||||
except (DistutilsError,
|
||||
CCompilerError) as msg:
|
||||
if DEBUG:
|
||||
raise
|
||||
else:
|
||||
raise SystemExit("error: " + str(msg))
|
||||
|
||||
return dist
|
||||
|
||||
|
||||
def run_setup (script_name, script_args=None, stop_after="run"):
|
||||
"""Run a setup script in a somewhat controlled environment, and
|
||||
return the Distribution instance that drives things. This is useful
|
||||
if you need to find out the distribution meta-data (passed as
|
||||
keyword args from 'script' to 'setup()', or the contents of the
|
||||
config files or command-line.
|
||||
|
||||
'script_name' is a file that will be read and run with 'exec()';
|
||||
'sys.argv[0]' will be replaced with 'script' for the duration of the
|
||||
call. 'script_args' is a list of strings; if supplied,
|
||||
'sys.argv[1:]' will be replaced by 'script_args' for the duration of
|
||||
the call.
|
||||
|
||||
'stop_after' tells 'setup()' when to stop processing; possible
|
||||
values:
|
||||
init
|
||||
stop after the Distribution instance has been created and
|
||||
populated with the keyword arguments to 'setup()'
|
||||
config
|
||||
stop after config files have been parsed (and their data
|
||||
stored in the Distribution instance)
|
||||
commandline
|
||||
stop after the command-line ('sys.argv[1:]' or 'script_args')
|
||||
have been parsed (and the data stored in the Distribution)
|
||||
run [default]
|
||||
stop after all commands have been run (the same as if 'setup()'
|
||||
had been called in the usual way
|
||||
|
||||
Returns the Distribution instance, which provides all information
|
||||
used to drive the Distutils.
|
||||
"""
|
||||
if stop_after not in ('init', 'config', 'commandline', 'run'):
|
||||
raise ValueError("invalid value for 'stop_after': %r" % (stop_after,))
|
||||
|
||||
global _setup_stop_after, _setup_distribution
|
||||
_setup_stop_after = stop_after
|
||||
|
||||
save_argv = sys.argv.copy()
|
||||
g = {'__file__': script_name, '__name__': '__main__'}
|
||||
try:
|
||||
try:
|
||||
sys.argv[0] = script_name
|
||||
if script_args is not None:
|
||||
sys.argv[1:] = script_args
|
||||
# tokenize.open supports automatic encoding detection
|
||||
with tokenize.open(script_name) as f:
|
||||
code = f.read().replace(r'\r\n', r'\n')
|
||||
exec(code, g)
|
||||
finally:
|
||||
sys.argv = save_argv
|
||||
_setup_stop_after = None
|
||||
except SystemExit:
|
||||
# Hmm, should we do something if exiting with a non-zero code
|
||||
# (ie. error)?
|
||||
pass
|
||||
|
||||
if _setup_distribution is None:
|
||||
raise RuntimeError(("'distutils.core.setup()' was never called -- "
|
||||
"perhaps '%s' is not a Distutils setup script?") % \
|
||||
script_name)
|
||||
|
||||
# I wonder if the setup script's namespace -- g and l -- would be of
|
||||
# any interest to callers?
|
||||
#print "_setup_distribution:", _setup_distribution
|
||||
return _setup_distribution
|
||||
|
||||
# run_setup ()
|
@ -0,0 +1,362 @@
|
||||
"""distutils.cygwinccompiler
|
||||
|
||||
Provides the CygwinCCompiler class, a subclass of UnixCCompiler that
|
||||
handles the Cygwin port of the GNU C compiler to Windows. It also contains
|
||||
the Mingw32CCompiler class which handles the mingw32 port of GCC (same as
|
||||
cygwin in no-cygwin mode).
|
||||
"""
|
||||
|
||||
# problems:
|
||||
#
|
||||
# * if you use a msvc compiled python version (1.5.2)
|
||||
# 1. you have to insert a __GNUC__ section in its config.h
|
||||
# 2. you have to generate an import library for its dll
|
||||
# - create a def-file for python??.dll
|
||||
# - create an import library using
|
||||
# dlltool --dllname python15.dll --def python15.def \
|
||||
# --output-lib libpython15.a
|
||||
#
|
||||
# see also http://starship.python.net/crew/kernr/mingw32/Notes.html
|
||||
#
|
||||
# * We put export_symbols in a def-file, and don't use
|
||||
# --export-all-symbols because it doesn't worked reliable in some
|
||||
# tested configurations. And because other windows compilers also
|
||||
# need their symbols specified this no serious problem.
|
||||
#
|
||||
# tested configurations:
|
||||
#
|
||||
# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works
|
||||
# (after patching python's config.h and for C++ some other include files)
|
||||
# see also http://starship.python.net/crew/kernr/mingw32/Notes.html
|
||||
# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works
|
||||
# (ld doesn't support -shared, so we use dllwrap)
|
||||
# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now
|
||||
# - its dllwrap doesn't work, there is a bug in binutils 2.10.90
|
||||
# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html
|
||||
# - using gcc -mdll instead dllwrap doesn't work without -static because
|
||||
# it tries to link against dlls instead their import libraries. (If
|
||||
# it finds the dll first.)
|
||||
# By specifying -static we force ld to link against the import libraries,
|
||||
# this is windows standard and there are normally not the necessary symbols
|
||||
# in the dlls.
|
||||
# *** only the version of June 2000 shows these problems
|
||||
# * cygwin gcc 3.2/ld 2.13.90 works
|
||||
# (ld supports -shared)
|
||||
# * mingw gcc 3.2/ld 2.13 works
|
||||
# (ld supports -shared)
|
||||
# * llvm-mingw with Clang 11 works
|
||||
# (lld supports -shared)
|
||||
|
||||
import os
|
||||
import sys
|
||||
import copy
|
||||
import shlex
|
||||
import warnings
|
||||
from subprocess import check_output
|
||||
|
||||
from distutils.unixccompiler import UnixCCompiler
|
||||
from distutils.file_util import write_file
|
||||
from distutils.errors import (DistutilsExecError, CCompilerError,
|
||||
CompileError, UnknownFileError)
|
||||
from distutils.version import LooseVersion, suppress_known_deprecation
|
||||
|
||||
def get_msvcr():
|
||||
"""Include the appropriate MSVC runtime library if Python was built
|
||||
with MSVC 7.0 or later.
|
||||
"""
|
||||
msc_pos = sys.version.find('MSC v.')
|
||||
if msc_pos != -1:
|
||||
msc_ver = sys.version[msc_pos+6:msc_pos+10]
|
||||
if msc_ver == '1300':
|
||||
# MSVC 7.0
|
||||
return ['msvcr70']
|
||||
elif msc_ver == '1310':
|
||||
# MSVC 7.1
|
||||
return ['msvcr71']
|
||||
elif msc_ver == '1400':
|
||||
# VS2005 / MSVC 8.0
|
||||
return ['msvcr80']
|
||||
elif msc_ver == '1500':
|
||||
# VS2008 / MSVC 9.0
|
||||
return ['msvcr90']
|
||||
elif msc_ver == '1600':
|
||||
# VS2010 / MSVC 10.0
|
||||
return ['msvcr100']
|
||||
elif msc_ver == '1700':
|
||||
# VS2012 / MSVC 11.0
|
||||
return ['msvcr110']
|
||||
elif msc_ver == '1800':
|
||||
# VS2013 / MSVC 12.0
|
||||
return ['msvcr120']
|
||||
elif 1900 <= int(msc_ver) < 2000:
|
||||
# VS2015 / MSVC 14.0
|
||||
return ['ucrt', 'vcruntime140']
|
||||
else:
|
||||
raise ValueError("Unknown MS Compiler version %s " % msc_ver)
|
||||
|
||||
|
||||
class CygwinCCompiler(UnixCCompiler):
|
||||
""" Handles the Cygwin port of the GNU C compiler to Windows.
|
||||
"""
|
||||
compiler_type = 'cygwin'
|
||||
obj_extension = ".o"
|
||||
static_lib_extension = ".a"
|
||||
shared_lib_extension = ".dll"
|
||||
static_lib_format = "lib%s%s"
|
||||
shared_lib_format = "%s%s"
|
||||
exe_extension = ".exe"
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
|
||||
UnixCCompiler.__init__(self, verbose, dry_run, force)
|
||||
|
||||
status, details = check_config_h()
|
||||
self.debug_print("Python's GCC status: %s (details: %s)" %
|
||||
(status, details))
|
||||
if status is not CONFIG_H_OK:
|
||||
self.warn(
|
||||
"Python's pyconfig.h doesn't seem to support your compiler. "
|
||||
"Reason: %s. "
|
||||
"Compiling may fail because of undefined preprocessor macros."
|
||||
% details)
|
||||
|
||||
self.cc = os.environ.get('CC', 'gcc')
|
||||
self.cxx = os.environ.get('CXX', 'g++')
|
||||
|
||||
self.linker_dll = self.cc
|
||||
shared_option = "-shared"
|
||||
|
||||
self.set_executables(compiler='%s -mcygwin -O -Wall' % self.cc,
|
||||
compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc,
|
||||
compiler_cxx='%s -mcygwin -O -Wall' % self.cxx,
|
||||
linker_exe='%s -mcygwin' % self.cc,
|
||||
linker_so=('%s -mcygwin %s' %
|
||||
(self.linker_dll, shared_option)))
|
||||
|
||||
# Include the appropriate MSVC runtime library if Python was built
|
||||
# with MSVC 7.0 or later.
|
||||
self.dll_libraries = get_msvcr()
|
||||
|
||||
@property
|
||||
def gcc_version(self):
|
||||
# Older numpy dependend on this existing to check for ancient
|
||||
# gcc versions. This doesn't make much sense with clang etc so
|
||||
# just hardcode to something recent.
|
||||
# https://github.com/numpy/numpy/pull/20333
|
||||
warnings.warn(
|
||||
"gcc_version attribute of CygwinCCompiler is deprecated. "
|
||||
"Instead of returning actual gcc version a fixed value 11.2.0 is returned.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
with suppress_known_deprecation():
|
||||
return LooseVersion("11.2.0")
|
||||
|
||||
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
||||
"""Compiles the source by spawning GCC and windres if needed."""
|
||||
if ext == '.rc' or ext == '.res':
|
||||
# gcc needs '.res' and '.rc' compiled to object files !!!
|
||||
try:
|
||||
self.spawn(["windres", "-i", src, "-o", obj])
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
else: # for other files use the C-compiler
|
||||
try:
|
||||
self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
|
||||
extra_postargs)
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
def link(self, target_desc, objects, output_filename, output_dir=None,
|
||||
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
export_symbols=None, debug=0, extra_preargs=None,
|
||||
extra_postargs=None, build_temp=None, target_lang=None):
|
||||
"""Link the objects."""
|
||||
# use separate copies, so we can modify the lists
|
||||
extra_preargs = copy.copy(extra_preargs or [])
|
||||
libraries = copy.copy(libraries or [])
|
||||
objects = copy.copy(objects or [])
|
||||
|
||||
# Additional libraries
|
||||
libraries.extend(self.dll_libraries)
|
||||
|
||||
# handle export symbols by creating a def-file
|
||||
# with executables this only works with gcc/ld as linker
|
||||
if ((export_symbols is not None) and
|
||||
(target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
|
||||
# (The linker doesn't do anything if output is up-to-date.
|
||||
# So it would probably better to check if we really need this,
|
||||
# but for this we had to insert some unchanged parts of
|
||||
# UnixCCompiler, and this is not what we want.)
|
||||
|
||||
# we want to put some files in the same directory as the
|
||||
# object files are, build_temp doesn't help much
|
||||
# where are the object files
|
||||
temp_dir = os.path.dirname(objects[0])
|
||||
# name of dll to give the helper files the same base name
|
||||
(dll_name, dll_extension) = os.path.splitext(
|
||||
os.path.basename(output_filename))
|
||||
|
||||
# generate the filenames for these files
|
||||
def_file = os.path.join(temp_dir, dll_name + ".def")
|
||||
lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a")
|
||||
|
||||
# Generate .def file
|
||||
contents = [
|
||||
"LIBRARY %s" % os.path.basename(output_filename),
|
||||
"EXPORTS"]
|
||||
for sym in export_symbols:
|
||||
contents.append(sym)
|
||||
self.execute(write_file, (def_file, contents),
|
||||
"writing %s" % def_file)
|
||||
|
||||
# next add options for def-file and to creating import libraries
|
||||
|
||||
# doesn't work: bfd_close build\...\libfoo.a: Invalid operation
|
||||
#extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file])
|
||||
# for gcc/ld the def-file is specified as any object files
|
||||
objects.append(def_file)
|
||||
|
||||
#end: if ((export_symbols is not None) and
|
||||
# (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
|
||||
|
||||
# who wants symbols and a many times larger output file
|
||||
# should explicitly switch the debug mode on
|
||||
# otherwise we let ld strip the output file
|
||||
# (On my machine: 10KiB < stripped_file < ??100KiB
|
||||
# unstripped_file = stripped_file + XXX KiB
|
||||
# ( XXX=254 for a typical python extension))
|
||||
if not debug:
|
||||
extra_preargs.append("-s")
|
||||
|
||||
UnixCCompiler.link(self, target_desc, objects, output_filename,
|
||||
output_dir, libraries, library_dirs,
|
||||
runtime_library_dirs,
|
||||
None, # export_symbols, we do this in our def-file
|
||||
debug, extra_preargs, extra_postargs, build_temp,
|
||||
target_lang)
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
|
||||
def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
|
||||
"""Adds supports for rc and res files."""
|
||||
if output_dir is None:
|
||||
output_dir = ''
|
||||
obj_names = []
|
||||
for src_name in source_filenames:
|
||||
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
|
||||
base, ext = os.path.splitext(os.path.normcase(src_name))
|
||||
if ext not in (self.src_extensions + ['.rc','.res']):
|
||||
raise UnknownFileError("unknown file type '%s' (from '%s')" % \
|
||||
(ext, src_name))
|
||||
if strip_dir:
|
||||
base = os.path.basename (base)
|
||||
if ext in ('.res', '.rc'):
|
||||
# these need to be compiled to object files
|
||||
obj_names.append (os.path.join(output_dir,
|
||||
base + ext + self.obj_extension))
|
||||
else:
|
||||
obj_names.append (os.path.join(output_dir,
|
||||
base + self.obj_extension))
|
||||
return obj_names
|
||||
|
||||
# the same as cygwin plus some additional parameters
|
||||
class Mingw32CCompiler(CygwinCCompiler):
|
||||
""" Handles the Mingw32 port of the GNU C compiler to Windows.
|
||||
"""
|
||||
compiler_type = 'mingw32'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
|
||||
CygwinCCompiler.__init__ (self, verbose, dry_run, force)
|
||||
|
||||
shared_option = "-shared"
|
||||
|
||||
if is_cygwincc(self.cc):
|
||||
raise CCompilerError(
|
||||
'Cygwin gcc cannot be used with --compiler=mingw32')
|
||||
|
||||
self.set_executables(compiler='%s -O -Wall' % self.cc,
|
||||
compiler_so='%s -mdll -O -Wall' % self.cc,
|
||||
compiler_cxx='%s -O -Wall' % self.cxx,
|
||||
linker_exe='%s' % self.cc,
|
||||
linker_so='%s %s'
|
||||
% (self.linker_dll, shared_option))
|
||||
|
||||
# Maybe we should also append -mthreads, but then the finished
|
||||
# dlls need another dll (mingwm10.dll see Mingw32 docs)
|
||||
# (-mthreads: Support thread-safe exception handling on `Mingw32')
|
||||
|
||||
# no additional libraries needed
|
||||
self.dll_libraries=[]
|
||||
|
||||
# Include the appropriate MSVC runtime library if Python was built
|
||||
# with MSVC 7.0 or later.
|
||||
self.dll_libraries = get_msvcr()
|
||||
|
||||
# Because these compilers aren't configured in Python's pyconfig.h file by
|
||||
# default, we should at least warn the user if he is using an unmodified
|
||||
# version.
|
||||
|
||||
CONFIG_H_OK = "ok"
|
||||
CONFIG_H_NOTOK = "not ok"
|
||||
CONFIG_H_UNCERTAIN = "uncertain"
|
||||
|
||||
def check_config_h():
|
||||
"""Check if the current Python installation appears amenable to building
|
||||
extensions with GCC.
|
||||
|
||||
Returns a tuple (status, details), where 'status' is one of the following
|
||||
constants:
|
||||
|
||||
- CONFIG_H_OK: all is well, go ahead and compile
|
||||
- CONFIG_H_NOTOK: doesn't look good
|
||||
- CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h
|
||||
|
||||
'details' is a human-readable string explaining the situation.
|
||||
|
||||
Note there are two ways to conclude "OK": either 'sys.version' contains
|
||||
the string "GCC" (implying that this Python was built with GCC), or the
|
||||
installed "pyconfig.h" contains the string "__GNUC__".
|
||||
"""
|
||||
|
||||
# XXX since this function also checks sys.version, it's not strictly a
|
||||
# "pyconfig.h" check -- should probably be renamed...
|
||||
|
||||
from distutils import sysconfig
|
||||
|
||||
# if sys.version contains GCC then python was compiled with GCC, and the
|
||||
# pyconfig.h file should be OK
|
||||
if "GCC" in sys.version:
|
||||
return CONFIG_H_OK, "sys.version mentions 'GCC'"
|
||||
|
||||
# Clang would also work
|
||||
if "Clang" in sys.version:
|
||||
return CONFIG_H_OK, "sys.version mentions 'Clang'"
|
||||
|
||||
# let's see if __GNUC__ is mentioned in python.h
|
||||
fn = sysconfig.get_config_h_filename()
|
||||
try:
|
||||
config_h = open(fn)
|
||||
try:
|
||||
if "__GNUC__" in config_h.read():
|
||||
return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn
|
||||
else:
|
||||
return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn
|
||||
finally:
|
||||
config_h.close()
|
||||
except OSError as exc:
|
||||
return (CONFIG_H_UNCERTAIN,
|
||||
"couldn't read '%s': %s" % (fn, exc.strerror))
|
||||
|
||||
def is_cygwincc(cc):
|
||||
'''Try to determine if the compiler that would be used is from cygwin.'''
|
||||
out_string = check_output(shlex.split(cc) + ['-dumpmachine'])
|
||||
return out_string.strip().endswith(b'cygwin')
|
||||
|
||||
|
||||
get_versions = None
|
||||
"""
|
||||
A stand-in for the previous get_versions() function to prevent failures
|
||||
when monkeypatched. See pypa/setuptools#2969.
|
||||
"""
|
@ -0,0 +1,5 @@
|
||||
import os
|
||||
|
||||
# If DISTUTILS_DEBUG is anything other than the empty string, we run in
|
||||
# debug mode.
|
||||
DEBUG = os.environ.get('DISTUTILS_DEBUG')
|
@ -0,0 +1,92 @@
|
||||
"""distutils.dep_util
|
||||
|
||||
Utility functions for simple, timestamp-based dependency of files
|
||||
and groups of files; also, function based entirely on such
|
||||
timestamp dependency analysis."""
|
||||
|
||||
import os
|
||||
from distutils.errors import DistutilsFileError
|
||||
|
||||
|
||||
def newer (source, target):
|
||||
"""Return true if 'source' exists and is more recently modified than
|
||||
'target', or if 'source' exists and 'target' doesn't. Return false if
|
||||
both exist and 'target' is the same age or younger than 'source'.
|
||||
Raise DistutilsFileError if 'source' does not exist.
|
||||
"""
|
||||
if not os.path.exists(source):
|
||||
raise DistutilsFileError("file '%s' does not exist" %
|
||||
os.path.abspath(source))
|
||||
if not os.path.exists(target):
|
||||
return 1
|
||||
|
||||
from stat import ST_MTIME
|
||||
mtime1 = os.stat(source)[ST_MTIME]
|
||||
mtime2 = os.stat(target)[ST_MTIME]
|
||||
|
||||
return mtime1 > mtime2
|
||||
|
||||
# newer ()
|
||||
|
||||
|
||||
def newer_pairwise (sources, targets):
|
||||
"""Walk two filename lists in parallel, testing if each source is newer
|
||||
than its corresponding target. Return a pair of lists (sources,
|
||||
targets) where source is newer than target, according to the semantics
|
||||
of 'newer()'.
|
||||
"""
|
||||
if len(sources) != len(targets):
|
||||
raise ValueError("'sources' and 'targets' must be same length")
|
||||
|
||||
# build a pair of lists (sources, targets) where source is newer
|
||||
n_sources = []
|
||||
n_targets = []
|
||||
for i in range(len(sources)):
|
||||
if newer(sources[i], targets[i]):
|
||||
n_sources.append(sources[i])
|
||||
n_targets.append(targets[i])
|
||||
|
||||
return (n_sources, n_targets)
|
||||
|
||||
# newer_pairwise ()
|
||||
|
||||
|
||||
def newer_group (sources, target, missing='error'):
|
||||
"""Return true if 'target' is out-of-date with respect to any file
|
||||
listed in 'sources'. In other words, if 'target' exists and is newer
|
||||
than every file in 'sources', return false; otherwise return true.
|
||||
'missing' controls what we do when a source file is missing; the
|
||||
default ("error") is to blow up with an OSError from inside 'stat()';
|
||||
if it is "ignore", we silently drop any missing source files; if it is
|
||||
"newer", any missing source files make us assume that 'target' is
|
||||
out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
|
||||
carry out commands that wouldn't work because inputs are missing, but
|
||||
that doesn't matter because you're not actually going to run the
|
||||
commands).
|
||||
"""
|
||||
# If the target doesn't even exist, then it's definitely out-of-date.
|
||||
if not os.path.exists(target):
|
||||
return 1
|
||||
|
||||
# Otherwise we have to find out the hard way: if *any* source file
|
||||
# is more recent than 'target', then 'target' is out-of-date and
|
||||
# we can immediately return true. If we fall through to the end
|
||||
# of the loop, then 'target' is up-to-date and we return false.
|
||||
from stat import ST_MTIME
|
||||
target_mtime = os.stat(target)[ST_MTIME]
|
||||
for source in sources:
|
||||
if not os.path.exists(source):
|
||||
if missing == 'error': # blow up when we stat() the file
|
||||
pass
|
||||
elif missing == 'ignore': # missing source dropped from
|
||||
continue # target's dependency list
|
||||
elif missing == 'newer': # missing source means target is
|
||||
return 1 # out-of-date
|
||||
|
||||
source_mtime = os.stat(source)[ST_MTIME]
|
||||
if source_mtime > target_mtime:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
# newer_group ()
|
@ -0,0 +1,210 @@
|
||||
"""distutils.dir_util
|
||||
|
||||
Utility functions for manipulating directories and directory trees."""
|
||||
|
||||
import os
|
||||
import errno
|
||||
from distutils.errors import DistutilsFileError, DistutilsInternalError
|
||||
from distutils import log
|
||||
|
||||
# cache for by mkpath() -- in addition to cheapening redundant calls,
|
||||
# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
|
||||
_path_created = {}
|
||||
|
||||
# I don't use os.makedirs because a) it's new to Python 1.5.2, and
|
||||
# b) it blows up if the directory already exists (I want to silently
|
||||
# succeed in that case).
|
||||
def mkpath(name, mode=0o777, verbose=1, dry_run=0):
|
||||
"""Create a directory and any missing ancestor directories.
|
||||
|
||||
If the directory already exists (or if 'name' is the empty string, which
|
||||
means the current directory, which of course exists), then do nothing.
|
||||
Raise DistutilsFileError if unable to create some directory along the way
|
||||
(eg. some sub-path exists, but is a file rather than a directory).
|
||||
If 'verbose' is true, print a one-line summary of each mkdir to stdout.
|
||||
Return the list of directories actually created.
|
||||
"""
|
||||
|
||||
global _path_created
|
||||
|
||||
# Detect a common bug -- name is None
|
||||
if not isinstance(name, str):
|
||||
raise DistutilsInternalError(
|
||||
"mkpath: 'name' must be a string (got %r)" % (name,))
|
||||
|
||||
# XXX what's the better way to handle verbosity? print as we create
|
||||
# each directory in the path (the current behaviour), or only announce
|
||||
# the creation of the whole path? (quite easy to do the latter since
|
||||
# we're not using a recursive algorithm)
|
||||
|
||||
name = os.path.normpath(name)
|
||||
created_dirs = []
|
||||
if os.path.isdir(name) or name == '':
|
||||
return created_dirs
|
||||
if _path_created.get(os.path.abspath(name)):
|
||||
return created_dirs
|
||||
|
||||
(head, tail) = os.path.split(name)
|
||||
tails = [tail] # stack of lone dirs to create
|
||||
|
||||
while head and tail and not os.path.isdir(head):
|
||||
(head, tail) = os.path.split(head)
|
||||
tails.insert(0, tail) # push next higher dir onto stack
|
||||
|
||||
# now 'head' contains the deepest directory that already exists
|
||||
# (that is, the child of 'head' in 'name' is the highest directory
|
||||
# that does *not* exist)
|
||||
for d in tails:
|
||||
#print "head = %s, d = %s: " % (head, d),
|
||||
head = os.path.join(head, d)
|
||||
abs_head = os.path.abspath(head)
|
||||
|
||||
if _path_created.get(abs_head):
|
||||
continue
|
||||
|
||||
if verbose >= 1:
|
||||
log.info("creating %s", head)
|
||||
|
||||
if not dry_run:
|
||||
try:
|
||||
os.mkdir(head, mode)
|
||||
except OSError as exc:
|
||||
if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
|
||||
raise DistutilsFileError(
|
||||
"could not create '%s': %s" % (head, exc.args[-1]))
|
||||
created_dirs.append(head)
|
||||
|
||||
_path_created[abs_head] = 1
|
||||
return created_dirs
|
||||
|
||||
def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
|
||||
"""Create all the empty directories under 'base_dir' needed to put 'files'
|
||||
there.
|
||||
|
||||
'base_dir' is just the name of a directory which doesn't necessarily
|
||||
exist yet; 'files' is a list of filenames to be interpreted relative to
|
||||
'base_dir'. 'base_dir' + the directory portion of every file in 'files'
|
||||
will be created if it doesn't already exist. 'mode', 'verbose' and
|
||||
'dry_run' flags are as for 'mkpath()'.
|
||||
"""
|
||||
# First get the list of directories to create
|
||||
need_dir = set()
|
||||
for file in files:
|
||||
need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
|
||||
|
||||
# Now create them
|
||||
for dir in sorted(need_dir):
|
||||
mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
|
||||
|
||||
def copy_tree(src, dst, preserve_mode=1, preserve_times=1,
|
||||
preserve_symlinks=0, update=0, verbose=1, dry_run=0):
|
||||
"""Copy an entire directory tree 'src' to a new location 'dst'.
|
||||
|
||||
Both 'src' and 'dst' must be directory names. If 'src' is not a
|
||||
directory, raise DistutilsFileError. If 'dst' does not exist, it is
|
||||
created with 'mkpath()'. The end result of the copy is that every
|
||||
file in 'src' is copied to 'dst', and directories under 'src' are
|
||||
recursively copied to 'dst'. Return the list of files that were
|
||||
copied or might have been copied, using their output name. The
|
||||
return value is unaffected by 'update' or 'dry_run': it is simply
|
||||
the list of all files under 'src', with the names changed to be
|
||||
under 'dst'.
|
||||
|
||||
'preserve_mode' and 'preserve_times' are the same as for
|
||||
'copy_file'; note that they only apply to regular files, not to
|
||||
directories. If 'preserve_symlinks' is true, symlinks will be
|
||||
copied as symlinks (on platforms that support them!); otherwise
|
||||
(the default), the destination of the symlink will be copied.
|
||||
'update' and 'verbose' are the same as for 'copy_file'.
|
||||
"""
|
||||
from distutils.file_util import copy_file
|
||||
|
||||
if not dry_run and not os.path.isdir(src):
|
||||
raise DistutilsFileError(
|
||||
"cannot copy tree '%s': not a directory" % src)
|
||||
try:
|
||||
names = os.listdir(src)
|
||||
except OSError as e:
|
||||
if dry_run:
|
||||
names = []
|
||||
else:
|
||||
raise DistutilsFileError(
|
||||
"error listing files in '%s': %s" % (src, e.strerror))
|
||||
|
||||
if not dry_run:
|
||||
mkpath(dst, verbose=verbose)
|
||||
|
||||
outputs = []
|
||||
|
||||
for n in names:
|
||||
src_name = os.path.join(src, n)
|
||||
dst_name = os.path.join(dst, n)
|
||||
|
||||
if n.startswith('.nfs'):
|
||||
# skip NFS rename files
|
||||
continue
|
||||
|
||||
if preserve_symlinks and os.path.islink(src_name):
|
||||
link_dest = os.readlink(src_name)
|
||||
if verbose >= 1:
|
||||
log.info("linking %s -> %s", dst_name, link_dest)
|
||||
if not dry_run:
|
||||
os.symlink(link_dest, dst_name)
|
||||
outputs.append(dst_name)
|
||||
|
||||
elif os.path.isdir(src_name):
|
||||
outputs.extend(
|
||||
copy_tree(src_name, dst_name, preserve_mode,
|
||||
preserve_times, preserve_symlinks, update,
|
||||
verbose=verbose, dry_run=dry_run))
|
||||
else:
|
||||
copy_file(src_name, dst_name, preserve_mode,
|
||||
preserve_times, update, verbose=verbose,
|
||||
dry_run=dry_run)
|
||||
outputs.append(dst_name)
|
||||
|
||||
return outputs
|
||||
|
||||
def _build_cmdtuple(path, cmdtuples):
|
||||
"""Helper for remove_tree()."""
|
||||
for f in os.listdir(path):
|
||||
real_f = os.path.join(path,f)
|
||||
if os.path.isdir(real_f) and not os.path.islink(real_f):
|
||||
_build_cmdtuple(real_f, cmdtuples)
|
||||
else:
|
||||
cmdtuples.append((os.remove, real_f))
|
||||
cmdtuples.append((os.rmdir, path))
|
||||
|
||||
def remove_tree(directory, verbose=1, dry_run=0):
|
||||
"""Recursively remove an entire directory tree.
|
||||
|
||||
Any errors are ignored (apart from being reported to stdout if 'verbose'
|
||||
is true).
|
||||
"""
|
||||
global _path_created
|
||||
|
||||
if verbose >= 1:
|
||||
log.info("removing '%s' (and everything under it)", directory)
|
||||
if dry_run:
|
||||
return
|
||||
cmdtuples = []
|
||||
_build_cmdtuple(directory, cmdtuples)
|
||||
for cmd in cmdtuples:
|
||||
try:
|
||||
cmd[0](cmd[1])
|
||||
# remove dir from cache if it's already there
|
||||
abspath = os.path.abspath(cmd[1])
|
||||
if abspath in _path_created:
|
||||
del _path_created[abspath]
|
||||
except OSError as exc:
|
||||
log.warn("error removing %s: %s", directory, exc)
|
||||
|
||||
def ensure_relative(path):
|
||||
"""Take the full path 'path', and make it a relative path.
|
||||
|
||||
This is useful to make 'path' the second argument to os.path.join().
|
||||
"""
|
||||
drive, path = os.path.splitdrive(path)
|
||||
if path[0:1] == os.sep:
|
||||
path = drive + path[1:]
|
||||
return path
|
1257
utils/python-venv/Lib/site-packages/setuptools/_distutils/dist.py
Normal file
1257
utils/python-venv/Lib/site-packages/setuptools/_distutils/dist.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,97 @@
|
||||
"""distutils.errors
|
||||
|
||||
Provides exceptions used by the Distutils modules. Note that Distutils
|
||||
modules may raise standard exceptions; in particular, SystemExit is
|
||||
usually raised for errors that are obviously the end-user's fault
|
||||
(eg. bad command-line arguments).
|
||||
|
||||
This module is safe to use in "from ... import *" mode; it only exports
|
||||
symbols whose names start with "Distutils" and end with "Error"."""
|
||||
|
||||
class DistutilsError (Exception):
|
||||
"""The root of all Distutils evil."""
|
||||
pass
|
||||
|
||||
class DistutilsModuleError (DistutilsError):
|
||||
"""Unable to load an expected module, or to find an expected class
|
||||
within some module (in particular, command modules and classes)."""
|
||||
pass
|
||||
|
||||
class DistutilsClassError (DistutilsError):
|
||||
"""Some command class (or possibly distribution class, if anyone
|
||||
feels a need to subclass Distribution) is found not to be holding
|
||||
up its end of the bargain, ie. implementing some part of the
|
||||
"command "interface."""
|
||||
pass
|
||||
|
||||
class DistutilsGetoptError (DistutilsError):
|
||||
"""The option table provided to 'fancy_getopt()' is bogus."""
|
||||
pass
|
||||
|
||||
class DistutilsArgError (DistutilsError):
|
||||
"""Raised by fancy_getopt in response to getopt.error -- ie. an
|
||||
error in the command line usage."""
|
||||
pass
|
||||
|
||||
class DistutilsFileError (DistutilsError):
|
||||
"""Any problems in the filesystem: expected file not found, etc.
|
||||
Typically this is for problems that we detect before OSError
|
||||
could be raised."""
|
||||
pass
|
||||
|
||||
class DistutilsOptionError (DistutilsError):
|
||||
"""Syntactic/semantic errors in command options, such as use of
|
||||
mutually conflicting options, or inconsistent options,
|
||||
badly-spelled values, etc. No distinction is made between option
|
||||
values originating in the setup script, the command line, config
|
||||
files, or what-have-you -- but if we *know* something originated in
|
||||
the setup script, we'll raise DistutilsSetupError instead."""
|
||||
pass
|
||||
|
||||
class DistutilsSetupError (DistutilsError):
|
||||
"""For errors that can be definitely blamed on the setup script,
|
||||
such as invalid keyword arguments to 'setup()'."""
|
||||
pass
|
||||
|
||||
class DistutilsPlatformError (DistutilsError):
|
||||
"""We don't know how to do something on the current platform (but
|
||||
we do know how to do it on some platform) -- eg. trying to compile
|
||||
C files on a platform not supported by a CCompiler subclass."""
|
||||
pass
|
||||
|
||||
class DistutilsExecError (DistutilsError):
|
||||
"""Any problems executing an external program (such as the C
|
||||
compiler, when compiling C files)."""
|
||||
pass
|
||||
|
||||
class DistutilsInternalError (DistutilsError):
|
||||
"""Internal inconsistencies or impossibilities (obviously, this
|
||||
should never be seen if the code is working!)."""
|
||||
pass
|
||||
|
||||
class DistutilsTemplateError (DistutilsError):
|
||||
"""Syntax error in a file list template."""
|
||||
|
||||
class DistutilsByteCompileError(DistutilsError):
|
||||
"""Byte compile error."""
|
||||
|
||||
# Exception classes used by the CCompiler implementation classes
|
||||
class CCompilerError (Exception):
|
||||
"""Some compile/link operation failed."""
|
||||
|
||||
class PreprocessError (CCompilerError):
|
||||
"""Failure to preprocess one or more C/C++ files."""
|
||||
|
||||
class CompileError (CCompilerError):
|
||||
"""Failure to compile one or more C/C++ source files."""
|
||||
|
||||
class LibError (CCompilerError):
|
||||
"""Failure to create a static library from one or more C/C++ object
|
||||
files."""
|
||||
|
||||
class LinkError (CCompilerError):
|
||||
"""Failure to link one or more C/C++ object files into an executable
|
||||
or shared library file."""
|
||||
|
||||
class UnknownFileError (CCompilerError):
|
||||
"""Attempt to process an unknown file type."""
|
@ -0,0 +1,240 @@
|
||||
"""distutils.extension
|
||||
|
||||
Provides the Extension class, used to describe C/C++ extension
|
||||
modules in setup scripts."""
|
||||
|
||||
import os
|
||||
import warnings
|
||||
|
||||
# This class is really only used by the "build_ext" command, so it might
|
||||
# make sense to put it in distutils.command.build_ext. However, that
|
||||
# module is already big enough, and I want to make this class a bit more
|
||||
# complex to simplify some common cases ("foo" module in "foo.c") and do
|
||||
# better error-checking ("foo.c" actually exists).
|
||||
#
|
||||
# Also, putting this in build_ext.py means every setup script would have to
|
||||
# import that large-ish module (indirectly, through distutils.core) in
|
||||
# order to do anything.
|
||||
|
||||
class Extension:
|
||||
"""Just a collection of attributes that describes an extension
|
||||
module and everything needed to build it (hopefully in a portable
|
||||
way, but there are hooks that let you be as unportable as you need).
|
||||
|
||||
Instance attributes:
|
||||
name : string
|
||||
the full name of the extension, including any packages -- ie.
|
||||
*not* a filename or pathname, but Python dotted name
|
||||
sources : [string]
|
||||
list of source filenames, relative to the distribution root
|
||||
(where the setup script lives), in Unix form (slash-separated)
|
||||
for portability. Source files may be C, C++, SWIG (.i),
|
||||
platform-specific resource files, or whatever else is recognized
|
||||
by the "build_ext" command as source for a Python extension.
|
||||
include_dirs : [string]
|
||||
list of directories to search for C/C++ header files (in Unix
|
||||
form for portability)
|
||||
define_macros : [(name : string, value : string|None)]
|
||||
list of macros to define; each macro is defined using a 2-tuple,
|
||||
where 'value' is either the string to define it to or None to
|
||||
define it without a particular value (equivalent of "#define
|
||||
FOO" in source or -DFOO on Unix C compiler command line)
|
||||
undef_macros : [string]
|
||||
list of macros to undefine explicitly
|
||||
library_dirs : [string]
|
||||
list of directories to search for C/C++ libraries at link time
|
||||
libraries : [string]
|
||||
list of library names (not filenames or paths) to link against
|
||||
runtime_library_dirs : [string]
|
||||
list of directories to search for C/C++ libraries at run time
|
||||
(for shared extensions, this is when the extension is loaded)
|
||||
extra_objects : [string]
|
||||
list of extra files to link with (eg. object files not implied
|
||||
by 'sources', static library that must be explicitly specified,
|
||||
binary resource files, etc.)
|
||||
extra_compile_args : [string]
|
||||
any extra platform- and compiler-specific information to use
|
||||
when compiling the source files in 'sources'. For platforms and
|
||||
compilers where "command line" makes sense, this is typically a
|
||||
list of command-line arguments, but for other platforms it could
|
||||
be anything.
|
||||
extra_link_args : [string]
|
||||
any extra platform- and compiler-specific information to use
|
||||
when linking object files together to create the extension (or
|
||||
to create a new static Python interpreter). Similar
|
||||
interpretation as for 'extra_compile_args'.
|
||||
export_symbols : [string]
|
||||
list of symbols to be exported from a shared extension. Not
|
||||
used on all platforms, and not generally necessary for Python
|
||||
extensions, which typically export exactly one symbol: "init" +
|
||||
extension_name.
|
||||
swig_opts : [string]
|
||||
any extra options to pass to SWIG if a source file has the .i
|
||||
extension.
|
||||
depends : [string]
|
||||
list of files that the extension depends on
|
||||
language : string
|
||||
extension language (i.e. "c", "c++", "objc"). Will be detected
|
||||
from the source extensions if not provided.
|
||||
optional : boolean
|
||||
specifies that a build failure in the extension should not abort the
|
||||
build process, but simply not install the failing extension.
|
||||
"""
|
||||
|
||||
# When adding arguments to this constructor, be sure to update
|
||||
# setup_keywords in core.py.
|
||||
def __init__(self, name, sources,
|
||||
include_dirs=None,
|
||||
define_macros=None,
|
||||
undef_macros=None,
|
||||
library_dirs=None,
|
||||
libraries=None,
|
||||
runtime_library_dirs=None,
|
||||
extra_objects=None,
|
||||
extra_compile_args=None,
|
||||
extra_link_args=None,
|
||||
export_symbols=None,
|
||||
swig_opts = None,
|
||||
depends=None,
|
||||
language=None,
|
||||
optional=None,
|
||||
**kw # To catch unknown keywords
|
||||
):
|
||||
if not isinstance(name, str):
|
||||
raise AssertionError("'name' must be a string")
|
||||
if not (isinstance(sources, list) and
|
||||
all(isinstance(v, str) for v in sources)):
|
||||
raise AssertionError("'sources' must be a list of strings")
|
||||
|
||||
self.name = name
|
||||
self.sources = sources
|
||||
self.include_dirs = include_dirs or []
|
||||
self.define_macros = define_macros or []
|
||||
self.undef_macros = undef_macros or []
|
||||
self.library_dirs = library_dirs or []
|
||||
self.libraries = libraries or []
|
||||
self.runtime_library_dirs = runtime_library_dirs or []
|
||||
self.extra_objects = extra_objects or []
|
||||
self.extra_compile_args = extra_compile_args or []
|
||||
self.extra_link_args = extra_link_args or []
|
||||
self.export_symbols = export_symbols or []
|
||||
self.swig_opts = swig_opts or []
|
||||
self.depends = depends or []
|
||||
self.language = language
|
||||
self.optional = optional
|
||||
|
||||
# If there are unknown keyword options, warn about them
|
||||
if len(kw) > 0:
|
||||
options = [repr(option) for option in kw]
|
||||
options = ', '.join(sorted(options))
|
||||
msg = "Unknown Extension options: %s" % options
|
||||
warnings.warn(msg)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s.%s(%r) at %#x>' % (
|
||||
self.__class__.__module__,
|
||||
self.__class__.__qualname__,
|
||||
self.name,
|
||||
id(self))
|
||||
|
||||
|
||||
def read_setup_file(filename):
|
||||
"""Reads a Setup file and returns Extension instances."""
|
||||
from distutils.sysconfig import (parse_makefile, expand_makefile_vars,
|
||||
_variable_rx)
|
||||
|
||||
from distutils.text_file import TextFile
|
||||
from distutils.util import split_quoted
|
||||
|
||||
# First pass over the file to gather "VAR = VALUE" assignments.
|
||||
vars = parse_makefile(filename)
|
||||
|
||||
# Second pass to gobble up the real content: lines of the form
|
||||
# <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
|
||||
file = TextFile(filename,
|
||||
strip_comments=1, skip_blanks=1, join_lines=1,
|
||||
lstrip_ws=1, rstrip_ws=1)
|
||||
try:
|
||||
extensions = []
|
||||
|
||||
while True:
|
||||
line = file.readline()
|
||||
if line is None: # eof
|
||||
break
|
||||
if _variable_rx.match(line): # VAR=VALUE, handled in first pass
|
||||
continue
|
||||
|
||||
if line[0] == line[-1] == "*":
|
||||
file.warn("'%s' lines not handled yet" % line)
|
||||
continue
|
||||
|
||||
line = expand_makefile_vars(line, vars)
|
||||
words = split_quoted(line)
|
||||
|
||||
# NB. this parses a slightly different syntax than the old
|
||||
# makesetup script: here, there must be exactly one extension per
|
||||
# line, and it must be the first word of the line. I have no idea
|
||||
# why the old syntax supported multiple extensions per line, as
|
||||
# they all wind up being the same.
|
||||
|
||||
module = words[0]
|
||||
ext = Extension(module, [])
|
||||
append_next_word = None
|
||||
|
||||
for word in words[1:]:
|
||||
if append_next_word is not None:
|
||||
append_next_word.append(word)
|
||||
append_next_word = None
|
||||
continue
|
||||
|
||||
suffix = os.path.splitext(word)[1]
|
||||
switch = word[0:2] ; value = word[2:]
|
||||
|
||||
if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"):
|
||||
# hmm, should we do something about C vs. C++ sources?
|
||||
# or leave it up to the CCompiler implementation to
|
||||
# worry about?
|
||||
ext.sources.append(word)
|
||||
elif switch == "-I":
|
||||
ext.include_dirs.append(value)
|
||||
elif switch == "-D":
|
||||
equals = value.find("=")
|
||||
if equals == -1: # bare "-DFOO" -- no value
|
||||
ext.define_macros.append((value, None))
|
||||
else: # "-DFOO=blah"
|
||||
ext.define_macros.append((value[0:equals],
|
||||
value[equals+2:]))
|
||||
elif switch == "-U":
|
||||
ext.undef_macros.append(value)
|
||||
elif switch == "-C": # only here 'cause makesetup has it!
|
||||
ext.extra_compile_args.append(word)
|
||||
elif switch == "-l":
|
||||
ext.libraries.append(value)
|
||||
elif switch == "-L":
|
||||
ext.library_dirs.append(value)
|
||||
elif switch == "-R":
|
||||
ext.runtime_library_dirs.append(value)
|
||||
elif word == "-rpath":
|
||||
append_next_word = ext.runtime_library_dirs
|
||||
elif word == "-Xlinker":
|
||||
append_next_word = ext.extra_link_args
|
||||
elif word == "-Xcompiler":
|
||||
append_next_word = ext.extra_compile_args
|
||||
elif switch == "-u":
|
||||
ext.extra_link_args.append(word)
|
||||
if not value:
|
||||
append_next_word = ext.extra_link_args
|
||||
elif suffix in (".a", ".so", ".sl", ".o", ".dylib"):
|
||||
# NB. a really faithful emulation of makesetup would
|
||||
# append a .o file to extra_objects only if it
|
||||
# had a slash in it; otherwise, it would s/.o/.c/
|
||||
# and append it to sources. Hmmmm.
|
||||
ext.extra_objects.append(word)
|
||||
else:
|
||||
file.warn("unrecognized argument '%s'" % word)
|
||||
|
||||
extensions.append(ext)
|
||||
finally:
|
||||
file.close()
|
||||
|
||||
return extensions
|
@ -0,0 +1,457 @@
|
||||
"""distutils.fancy_getopt
|
||||
|
||||
Wrapper around the standard getopt module that provides the following
|
||||
additional features:
|
||||
* short and long options are tied together
|
||||
* options have help strings, so fancy_getopt could potentially
|
||||
create a complete usage summary
|
||||
* options set attributes of a passed-in object
|
||||
"""
|
||||
|
||||
import sys, string, re
|
||||
import getopt
|
||||
from distutils.errors import *
|
||||
|
||||
# Much like command_re in distutils.core, this is close to but not quite
|
||||
# the same as a Python NAME -- except, in the spirit of most GNU
|
||||
# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
|
||||
# The similarities to NAME are again not a coincidence...
|
||||
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
|
||||
longopt_re = re.compile(r'^%s$' % longopt_pat)
|
||||
|
||||
# For recognizing "negative alias" options, eg. "quiet=!verbose"
|
||||
neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
|
||||
|
||||
# This is used to translate long options to legitimate Python identifiers
|
||||
# (for use as attributes of some object).
|
||||
longopt_xlate = str.maketrans('-', '_')
|
||||
|
||||
class FancyGetopt:
|
||||
"""Wrapper around the standard 'getopt()' module that provides some
|
||||
handy extra functionality:
|
||||
* short and long options are tied together
|
||||
* options have help strings, and help text can be assembled
|
||||
from them
|
||||
* options set attributes of a passed-in object
|
||||
* boolean options can have "negative aliases" -- eg. if
|
||||
--quiet is the "negative alias" of --verbose, then "--quiet"
|
||||
on the command line sets 'verbose' to false
|
||||
"""
|
||||
|
||||
def __init__(self, option_table=None):
|
||||
# The option table is (currently) a list of tuples. The
|
||||
# tuples may have 3 or four values:
|
||||
# (long_option, short_option, help_string [, repeatable])
|
||||
# if an option takes an argument, its long_option should have '='
|
||||
# appended; short_option should just be a single character, no ':'
|
||||
# in any case. If a long_option doesn't have a corresponding
|
||||
# short_option, short_option should be None. All option tuples
|
||||
# must have long options.
|
||||
self.option_table = option_table
|
||||
|
||||
# 'option_index' maps long option names to entries in the option
|
||||
# table (ie. those 3-tuples).
|
||||
self.option_index = {}
|
||||
if self.option_table:
|
||||
self._build_index()
|
||||
|
||||
# 'alias' records (duh) alias options; {'foo': 'bar'} means
|
||||
# --foo is an alias for --bar
|
||||
self.alias = {}
|
||||
|
||||
# 'negative_alias' keeps track of options that are the boolean
|
||||
# opposite of some other option
|
||||
self.negative_alias = {}
|
||||
|
||||
# These keep track of the information in the option table. We
|
||||
# don't actually populate these structures until we're ready to
|
||||
# parse the command-line, since the 'option_table' passed in here
|
||||
# isn't necessarily the final word.
|
||||
self.short_opts = []
|
||||
self.long_opts = []
|
||||
self.short2long = {}
|
||||
self.attr_name = {}
|
||||
self.takes_arg = {}
|
||||
|
||||
# And 'option_order' is filled up in 'getopt()'; it records the
|
||||
# original order of options (and their values) on the command-line,
|
||||
# but expands short options, converts aliases, etc.
|
||||
self.option_order = []
|
||||
|
||||
def _build_index(self):
|
||||
self.option_index.clear()
|
||||
for option in self.option_table:
|
||||
self.option_index[option[0]] = option
|
||||
|
||||
def set_option_table(self, option_table):
|
||||
self.option_table = option_table
|
||||
self._build_index()
|
||||
|
||||
def add_option(self, long_option, short_option=None, help_string=None):
|
||||
if long_option in self.option_index:
|
||||
raise DistutilsGetoptError(
|
||||
"option conflict: already an option '%s'" % long_option)
|
||||
else:
|
||||
option = (long_option, short_option, help_string)
|
||||
self.option_table.append(option)
|
||||
self.option_index[long_option] = option
|
||||
|
||||
def has_option(self, long_option):
|
||||
"""Return true if the option table for this parser has an
|
||||
option with long name 'long_option'."""
|
||||
return long_option in self.option_index
|
||||
|
||||
def get_attr_name(self, long_option):
|
||||
"""Translate long option name 'long_option' to the form it
|
||||
has as an attribute of some object: ie., translate hyphens
|
||||
to underscores."""
|
||||
return long_option.translate(longopt_xlate)
|
||||
|
||||
def _check_alias_dict(self, aliases, what):
|
||||
assert isinstance(aliases, dict)
|
||||
for (alias, opt) in aliases.items():
|
||||
if alias not in self.option_index:
|
||||
raise DistutilsGetoptError(("invalid %s '%s': "
|
||||
"option '%s' not defined") % (what, alias, alias))
|
||||
if opt not in self.option_index:
|
||||
raise DistutilsGetoptError(("invalid %s '%s': "
|
||||
"aliased option '%s' not defined") % (what, alias, opt))
|
||||
|
||||
def set_aliases(self, alias):
|
||||
"""Set the aliases for this option parser."""
|
||||
self._check_alias_dict(alias, "alias")
|
||||
self.alias = alias
|
||||
|
||||
def set_negative_aliases(self, negative_alias):
|
||||
"""Set the negative aliases for this option parser.
|
||||
'negative_alias' should be a dictionary mapping option names to
|
||||
option names, both the key and value must already be defined
|
||||
in the option table."""
|
||||
self._check_alias_dict(negative_alias, "negative alias")
|
||||
self.negative_alias = negative_alias
|
||||
|
||||
def _grok_option_table(self):
|
||||
"""Populate the various data structures that keep tabs on the
|
||||
option table. Called by 'getopt()' before it can do anything
|
||||
worthwhile.
|
||||
"""
|
||||
self.long_opts = []
|
||||
self.short_opts = []
|
||||
self.short2long.clear()
|
||||
self.repeat = {}
|
||||
|
||||
for option in self.option_table:
|
||||
if len(option) == 3:
|
||||
long, short, help = option
|
||||
repeat = 0
|
||||
elif len(option) == 4:
|
||||
long, short, help, repeat = option
|
||||
else:
|
||||
# the option table is part of the code, so simply
|
||||
# assert that it is correct
|
||||
raise ValueError("invalid option tuple: %r" % (option,))
|
||||
|
||||
# Type- and value-check the option names
|
||||
if not isinstance(long, str) or len(long) < 2:
|
||||
raise DistutilsGetoptError(("invalid long option '%s': "
|
||||
"must be a string of length >= 2") % long)
|
||||
|
||||
if (not ((short is None) or
|
||||
(isinstance(short, str) and len(short) == 1))):
|
||||
raise DistutilsGetoptError("invalid short option '%s': "
|
||||
"must a single character or None" % short)
|
||||
|
||||
self.repeat[long] = repeat
|
||||
self.long_opts.append(long)
|
||||
|
||||
if long[-1] == '=': # option takes an argument?
|
||||
if short: short = short + ':'
|
||||
long = long[0:-1]
|
||||
self.takes_arg[long] = 1
|
||||
else:
|
||||
# Is option is a "negative alias" for some other option (eg.
|
||||
# "quiet" == "!verbose")?
|
||||
alias_to = self.negative_alias.get(long)
|
||||
if alias_to is not None:
|
||||
if self.takes_arg[alias_to]:
|
||||
raise DistutilsGetoptError(
|
||||
"invalid negative alias '%s': "
|
||||
"aliased option '%s' takes a value"
|
||||
% (long, alias_to))
|
||||
|
||||
self.long_opts[-1] = long # XXX redundant?!
|
||||
self.takes_arg[long] = 0
|
||||
|
||||
# If this is an alias option, make sure its "takes arg" flag is
|
||||
# the same as the option it's aliased to.
|
||||
alias_to = self.alias.get(long)
|
||||
if alias_to is not None:
|
||||
if self.takes_arg[long] != self.takes_arg[alias_to]:
|
||||
raise DistutilsGetoptError(
|
||||
"invalid alias '%s': inconsistent with "
|
||||
"aliased option '%s' (one of them takes a value, "
|
||||
"the other doesn't"
|
||||
% (long, alias_to))
|
||||
|
||||
# Now enforce some bondage on the long option name, so we can
|
||||
# later translate it to an attribute name on some object. Have
|
||||
# to do this a bit late to make sure we've removed any trailing
|
||||
# '='.
|
||||
if not longopt_re.match(long):
|
||||
raise DistutilsGetoptError(
|
||||
"invalid long option name '%s' "
|
||||
"(must be letters, numbers, hyphens only" % long)
|
||||
|
||||
self.attr_name[long] = self.get_attr_name(long)
|
||||
if short:
|
||||
self.short_opts.append(short)
|
||||
self.short2long[short[0]] = long
|
||||
|
||||
def getopt(self, args=None, object=None):
|
||||
"""Parse command-line options in args. Store as attributes on object.
|
||||
|
||||
If 'args' is None or not supplied, uses 'sys.argv[1:]'. If
|
||||
'object' is None or not supplied, creates a new OptionDummy
|
||||
object, stores option values there, and returns a tuple (args,
|
||||
object). If 'object' is supplied, it is modified in place and
|
||||
'getopt()' just returns 'args'; in both cases, the returned
|
||||
'args' is a modified copy of the passed-in 'args' list, which
|
||||
is left untouched.
|
||||
"""
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
if object is None:
|
||||
object = OptionDummy()
|
||||
created_object = True
|
||||
else:
|
||||
created_object = False
|
||||
|
||||
self._grok_option_table()
|
||||
|
||||
short_opts = ' '.join(self.short_opts)
|
||||
try:
|
||||
opts, args = getopt.getopt(args, short_opts, self.long_opts)
|
||||
except getopt.error as msg:
|
||||
raise DistutilsArgError(msg)
|
||||
|
||||
for opt, val in opts:
|
||||
if len(opt) == 2 and opt[0] == '-': # it's a short option
|
||||
opt = self.short2long[opt[1]]
|
||||
else:
|
||||
assert len(opt) > 2 and opt[:2] == '--'
|
||||
opt = opt[2:]
|
||||
|
||||
alias = self.alias.get(opt)
|
||||
if alias:
|
||||
opt = alias
|
||||
|
||||
if not self.takes_arg[opt]: # boolean option?
|
||||
assert val == '', "boolean option can't have value"
|
||||
alias = self.negative_alias.get(opt)
|
||||
if alias:
|
||||
opt = alias
|
||||
val = 0
|
||||
else:
|
||||
val = 1
|
||||
|
||||
attr = self.attr_name[opt]
|
||||
# The only repeating option at the moment is 'verbose'.
|
||||
# It has a negative option -q quiet, which should set verbose = 0.
|
||||
if val and self.repeat.get(attr) is not None:
|
||||
val = getattr(object, attr, 0) + 1
|
||||
setattr(object, attr, val)
|
||||
self.option_order.append((opt, val))
|
||||
|
||||
# for opts
|
||||
if created_object:
|
||||
return args, object
|
||||
else:
|
||||
return args
|
||||
|
||||
def get_option_order(self):
|
||||
"""Returns the list of (option, value) tuples processed by the
|
||||
previous run of 'getopt()'. Raises RuntimeError if
|
||||
'getopt()' hasn't been called yet.
|
||||
"""
|
||||
if self.option_order is None:
|
||||
raise RuntimeError("'getopt()' hasn't been called yet")
|
||||
else:
|
||||
return self.option_order
|
||||
|
||||
def generate_help(self, header=None):
|
||||
"""Generate help text (a list of strings, one per suggested line of
|
||||
output) from the option table for this FancyGetopt object.
|
||||
"""
|
||||
# Blithely assume the option table is good: probably wouldn't call
|
||||
# 'generate_help()' unless you've already called 'getopt()'.
|
||||
|
||||
# First pass: determine maximum length of long option names
|
||||
max_opt = 0
|
||||
for option in self.option_table:
|
||||
long = option[0]
|
||||
short = option[1]
|
||||
l = len(long)
|
||||
if long[-1] == '=':
|
||||
l = l - 1
|
||||
if short is not None:
|
||||
l = l + 5 # " (-x)" where short == 'x'
|
||||
if l > max_opt:
|
||||
max_opt = l
|
||||
|
||||
opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter
|
||||
|
||||
# Typical help block looks like this:
|
||||
# --foo controls foonabulation
|
||||
# Help block for longest option looks like this:
|
||||
# --flimflam set the flim-flam level
|
||||
# and with wrapped text:
|
||||
# --flimflam set the flim-flam level (must be between
|
||||
# 0 and 100, except on Tuesdays)
|
||||
# Options with short names will have the short name shown (but
|
||||
# it doesn't contribute to max_opt):
|
||||
# --foo (-f) controls foonabulation
|
||||
# If adding the short option would make the left column too wide,
|
||||
# we push the explanation off to the next line
|
||||
# --flimflam (-l)
|
||||
# set the flim-flam level
|
||||
# Important parameters:
|
||||
# - 2 spaces before option block start lines
|
||||
# - 2 dashes for each long option name
|
||||
# - min. 2 spaces between option and explanation (gutter)
|
||||
# - 5 characters (incl. space) for short option name
|
||||
|
||||
# Now generate lines of help text. (If 80 columns were good enough
|
||||
# for Jesus, then 78 columns are good enough for me!)
|
||||
line_width = 78
|
||||
text_width = line_width - opt_width
|
||||
big_indent = ' ' * opt_width
|
||||
if header:
|
||||
lines = [header]
|
||||
else:
|
||||
lines = ['Option summary:']
|
||||
|
||||
for option in self.option_table:
|
||||
long, short, help = option[:3]
|
||||
text = wrap_text(help, text_width)
|
||||
if long[-1] == '=':
|
||||
long = long[0:-1]
|
||||
|
||||
# Case 1: no short option at all (makes life easy)
|
||||
if short is None:
|
||||
if text:
|
||||
lines.append(" --%-*s %s" % (max_opt, long, text[0]))
|
||||
else:
|
||||
lines.append(" --%-*s " % (max_opt, long))
|
||||
|
||||
# Case 2: we have a short option, so we have to include it
|
||||
# just after the long option
|
||||
else:
|
||||
opt_names = "%s (-%s)" % (long, short)
|
||||
if text:
|
||||
lines.append(" --%-*s %s" %
|
||||
(max_opt, opt_names, text[0]))
|
||||
else:
|
||||
lines.append(" --%-*s" % opt_names)
|
||||
|
||||
for l in text[1:]:
|
||||
lines.append(big_indent + l)
|
||||
return lines
|
||||
|
||||
def print_help(self, header=None, file=None):
|
||||
if file is None:
|
||||
file = sys.stdout
|
||||
for line in self.generate_help(header):
|
||||
file.write(line + "\n")
|
||||
|
||||
|
||||
def fancy_getopt(options, negative_opt, object, args):
|
||||
parser = FancyGetopt(options)
|
||||
parser.set_negative_aliases(negative_opt)
|
||||
return parser.getopt(args, object)
|
||||
|
||||
|
||||
WS_TRANS = {ord(_wschar) : ' ' for _wschar in string.whitespace}
|
||||
|
||||
def wrap_text(text, width):
|
||||
"""wrap_text(text : string, width : int) -> [string]
|
||||
|
||||
Split 'text' into multiple lines of no more than 'width' characters
|
||||
each, and return the list of strings that results.
|
||||
"""
|
||||
if text is None:
|
||||
return []
|
||||
if len(text) <= width:
|
||||
return [text]
|
||||
|
||||
text = text.expandtabs()
|
||||
text = text.translate(WS_TRANS)
|
||||
chunks = re.split(r'( +|-+)', text)
|
||||
chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings
|
||||
lines = []
|
||||
|
||||
while chunks:
|
||||
cur_line = [] # list of chunks (to-be-joined)
|
||||
cur_len = 0 # length of current line
|
||||
|
||||
while chunks:
|
||||
l = len(chunks[0])
|
||||
if cur_len + l <= width: # can squeeze (at least) this chunk in
|
||||
cur_line.append(chunks[0])
|
||||
del chunks[0]
|
||||
cur_len = cur_len + l
|
||||
else: # this line is full
|
||||
# drop last chunk if all space
|
||||
if cur_line and cur_line[-1][0] == ' ':
|
||||
del cur_line[-1]
|
||||
break
|
||||
|
||||
if chunks: # any chunks left to process?
|
||||
# if the current line is still empty, then we had a single
|
||||
# chunk that's too big too fit on a line -- so we break
|
||||
# down and break it up at the line width
|
||||
if cur_len == 0:
|
||||
cur_line.append(chunks[0][0:width])
|
||||
chunks[0] = chunks[0][width:]
|
||||
|
||||
# all-whitespace chunks at the end of a line can be discarded
|
||||
# (and we know from the re.split above that if a chunk has
|
||||
# *any* whitespace, it is *all* whitespace)
|
||||
if chunks[0][0] == ' ':
|
||||
del chunks[0]
|
||||
|
||||
# and store this line in the list-of-all-lines -- as a single
|
||||
# string, of course!
|
||||
lines.append(''.join(cur_line))
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def translate_longopt(opt):
|
||||
"""Convert a long option name to a valid Python identifier by
|
||||
changing "-" to "_".
|
||||
"""
|
||||
return opt.translate(longopt_xlate)
|
||||
|
||||
|
||||
class OptionDummy:
|
||||
"""Dummy class just used as a place to hold command-line option
|
||||
values as instance attributes."""
|
||||
|
||||
def __init__(self, options=[]):
|
||||
"""Create a new OptionDummy instance. The attributes listed in
|
||||
'options' will be initialized to None."""
|
||||
for opt in options:
|
||||
setattr(self, opt, None)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
text = """\
|
||||
Tra-la-la, supercalifragilisticexpialidocious.
|
||||
How *do* you spell that odd word, anyways?
|
||||
(Someone ask Mary -- she'll know [or she'll
|
||||
say, "How should I know?"].)"""
|
||||
|
||||
for w in (10, 20, 30, 40):
|
||||
print("width: %d" % w)
|
||||
print("\n".join(wrap_text(text, w)))
|
||||
print()
|
@ -0,0 +1,238 @@
|
||||
"""distutils.file_util
|
||||
|
||||
Utility functions for operating on single files.
|
||||
"""
|
||||
|
||||
import os
|
||||
from distutils.errors import DistutilsFileError
|
||||
from distutils import log
|
||||
|
||||
# for generating verbose output in 'copy_file()'
|
||||
_copy_action = { None: 'copying',
|
||||
'hard': 'hard linking',
|
||||
'sym': 'symbolically linking' }
|
||||
|
||||
|
||||
def _copy_file_contents(src, dst, buffer_size=16*1024):
|
||||
"""Copy the file 'src' to 'dst'; both must be filenames. Any error
|
||||
opening either file, reading from 'src', or writing to 'dst', raises
|
||||
DistutilsFileError. Data is read/written in chunks of 'buffer_size'
|
||||
bytes (default 16k). No attempt is made to handle anything apart from
|
||||
regular files.
|
||||
"""
|
||||
# Stolen from shutil module in the standard library, but with
|
||||
# custom error-handling added.
|
||||
fsrc = None
|
||||
fdst = None
|
||||
try:
|
||||
try:
|
||||
fsrc = open(src, 'rb')
|
||||
except OSError as e:
|
||||
raise DistutilsFileError("could not open '%s': %s" % (src, e.strerror))
|
||||
|
||||
if os.path.exists(dst):
|
||||
try:
|
||||
os.unlink(dst)
|
||||
except OSError as e:
|
||||
raise DistutilsFileError(
|
||||
"could not delete '%s': %s" % (dst, e.strerror))
|
||||
|
||||
try:
|
||||
fdst = open(dst, 'wb')
|
||||
except OSError as e:
|
||||
raise DistutilsFileError(
|
||||
"could not create '%s': %s" % (dst, e.strerror))
|
||||
|
||||
while True:
|
||||
try:
|
||||
buf = fsrc.read(buffer_size)
|
||||
except OSError as e:
|
||||
raise DistutilsFileError(
|
||||
"could not read from '%s': %s" % (src, e.strerror))
|
||||
|
||||
if not buf:
|
||||
break
|
||||
|
||||
try:
|
||||
fdst.write(buf)
|
||||
except OSError as e:
|
||||
raise DistutilsFileError(
|
||||
"could not write to '%s': %s" % (dst, e.strerror))
|
||||
finally:
|
||||
if fdst:
|
||||
fdst.close()
|
||||
if fsrc:
|
||||
fsrc.close()
|
||||
|
||||
def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0,
|
||||
link=None, verbose=1, dry_run=0):
|
||||
"""Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is
|
||||
copied there with the same name; otherwise, it must be a filename. (If
|
||||
the file exists, it will be ruthlessly clobbered.) If 'preserve_mode'
|
||||
is true (the default), the file's mode (type and permission bits, or
|
||||
whatever is analogous on the current platform) is copied. If
|
||||
'preserve_times' is true (the default), the last-modified and
|
||||
last-access times are copied as well. If 'update' is true, 'src' will
|
||||
only be copied if 'dst' does not exist, or if 'dst' does exist but is
|
||||
older than 'src'.
|
||||
|
||||
'link' allows you to make hard links (os.link) or symbolic links
|
||||
(os.symlink) instead of copying: set it to "hard" or "sym"; if it is
|
||||
None (the default), files are copied. Don't set 'link' on systems that
|
||||
don't support it: 'copy_file()' doesn't check if hard or symbolic
|
||||
linking is available. If hardlink fails, falls back to
|
||||
_copy_file_contents().
|
||||
|
||||
Under Mac OS, uses the native file copy function in macostools; on
|
||||
other systems, uses '_copy_file_contents()' to copy file contents.
|
||||
|
||||
Return a tuple (dest_name, copied): 'dest_name' is the actual name of
|
||||
the output file, and 'copied' is true if the file was copied (or would
|
||||
have been copied, if 'dry_run' true).
|
||||
"""
|
||||
# XXX if the destination file already exists, we clobber it if
|
||||
# copying, but blow up if linking. Hmmm. And I don't know what
|
||||
# macostools.copyfile() does. Should definitely be consistent, and
|
||||
# should probably blow up if destination exists and we would be
|
||||
# changing it (ie. it's not already a hard/soft link to src OR
|
||||
# (not update) and (src newer than dst).
|
||||
|
||||
from distutils.dep_util import newer
|
||||
from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE
|
||||
|
||||
if not os.path.isfile(src):
|
||||
raise DistutilsFileError(
|
||||
"can't copy '%s': doesn't exist or not a regular file" % src)
|
||||
|
||||
if os.path.isdir(dst):
|
||||
dir = dst
|
||||
dst = os.path.join(dst, os.path.basename(src))
|
||||
else:
|
||||
dir = os.path.dirname(dst)
|
||||
|
||||
if update and not newer(src, dst):
|
||||
if verbose >= 1:
|
||||
log.debug("not copying %s (output up-to-date)", src)
|
||||
return (dst, 0)
|
||||
|
||||
try:
|
||||
action = _copy_action[link]
|
||||
except KeyError:
|
||||
raise ValueError("invalid value '%s' for 'link' argument" % link)
|
||||
|
||||
if verbose >= 1:
|
||||
if os.path.basename(dst) == os.path.basename(src):
|
||||
log.info("%s %s -> %s", action, src, dir)
|
||||
else:
|
||||
log.info("%s %s -> %s", action, src, dst)
|
||||
|
||||
if dry_run:
|
||||
return (dst, 1)
|
||||
|
||||
# If linking (hard or symbolic), use the appropriate system call
|
||||
# (Unix only, of course, but that's the caller's responsibility)
|
||||
elif link == 'hard':
|
||||
if not (os.path.exists(dst) and os.path.samefile(src, dst)):
|
||||
try:
|
||||
os.link(src, dst)
|
||||
return (dst, 1)
|
||||
except OSError:
|
||||
# If hard linking fails, fall back on copying file
|
||||
# (some special filesystems don't support hard linking
|
||||
# even under Unix, see issue #8876).
|
||||
pass
|
||||
elif link == 'sym':
|
||||
if not (os.path.exists(dst) and os.path.samefile(src, dst)):
|
||||
os.symlink(src, dst)
|
||||
return (dst, 1)
|
||||
|
||||
# Otherwise (non-Mac, not linking), copy the file contents and
|
||||
# (optionally) copy the times and mode.
|
||||
_copy_file_contents(src, dst)
|
||||
if preserve_mode or preserve_times:
|
||||
st = os.stat(src)
|
||||
|
||||
# According to David Ascher <da@ski.org>, utime() should be done
|
||||
# before chmod() (at least under NT).
|
||||
if preserve_times:
|
||||
os.utime(dst, (st[ST_ATIME], st[ST_MTIME]))
|
||||
if preserve_mode:
|
||||
os.chmod(dst, S_IMODE(st[ST_MODE]))
|
||||
|
||||
return (dst, 1)
|
||||
|
||||
|
||||
# XXX I suspect this is Unix-specific -- need porting help!
|
||||
def move_file (src, dst,
|
||||
verbose=1,
|
||||
dry_run=0):
|
||||
|
||||
"""Move a file 'src' to 'dst'. If 'dst' is a directory, the file will
|
||||
be moved into it with the same name; otherwise, 'src' is just renamed
|
||||
to 'dst'. Return the new full name of the file.
|
||||
|
||||
Handles cross-device moves on Unix using 'copy_file()'. What about
|
||||
other systems???
|
||||
"""
|
||||
from os.path import exists, isfile, isdir, basename, dirname
|
||||
import errno
|
||||
|
||||
if verbose >= 1:
|
||||
log.info("moving %s -> %s", src, dst)
|
||||
|
||||
if dry_run:
|
||||
return dst
|
||||
|
||||
if not isfile(src):
|
||||
raise DistutilsFileError("can't move '%s': not a regular file" % src)
|
||||
|
||||
if isdir(dst):
|
||||
dst = os.path.join(dst, basename(src))
|
||||
elif exists(dst):
|
||||
raise DistutilsFileError(
|
||||
"can't move '%s': destination '%s' already exists" %
|
||||
(src, dst))
|
||||
|
||||
if not isdir(dirname(dst)):
|
||||
raise DistutilsFileError(
|
||||
"can't move '%s': destination '%s' not a valid path" %
|
||||
(src, dst))
|
||||
|
||||
copy_it = False
|
||||
try:
|
||||
os.rename(src, dst)
|
||||
except OSError as e:
|
||||
(num, msg) = e.args
|
||||
if num == errno.EXDEV:
|
||||
copy_it = True
|
||||
else:
|
||||
raise DistutilsFileError(
|
||||
"couldn't move '%s' to '%s': %s" % (src, dst, msg))
|
||||
|
||||
if copy_it:
|
||||
copy_file(src, dst, verbose=verbose)
|
||||
try:
|
||||
os.unlink(src)
|
||||
except OSError as e:
|
||||
(num, msg) = e.args
|
||||
try:
|
||||
os.unlink(dst)
|
||||
except OSError:
|
||||
pass
|
||||
raise DistutilsFileError(
|
||||
"couldn't move '%s' to '%s' by copy/delete: "
|
||||
"delete '%s' failed: %s"
|
||||
% (src, dst, src, msg))
|
||||
return dst
|
||||
|
||||
|
||||
def write_file (filename, contents):
|
||||
"""Create a file with the specified name and write 'contents' (a
|
||||
sequence of strings without line terminators) to it.
|
||||
"""
|
||||
f = open(filename, "w")
|
||||
try:
|
||||
for line in contents:
|
||||
f.write(line + "\n")
|
||||
finally:
|
||||
f.close()
|
@ -0,0 +1,355 @@
|
||||
"""distutils.filelist
|
||||
|
||||
Provides the FileList class, used for poking about the filesystem
|
||||
and building lists of files.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import fnmatch
|
||||
import functools
|
||||
|
||||
from distutils.util import convert_path
|
||||
from distutils.errors import DistutilsTemplateError, DistutilsInternalError
|
||||
from distutils import log
|
||||
|
||||
|
||||
class FileList:
|
||||
"""A list of files built by on exploring the filesystem and filtered by
|
||||
applying various patterns to what we find there.
|
||||
|
||||
Instance attributes:
|
||||
dir
|
||||
directory from which files will be taken -- only used if
|
||||
'allfiles' not supplied to constructor
|
||||
files
|
||||
list of filenames currently being built/filtered/manipulated
|
||||
allfiles
|
||||
complete list of files under consideration (ie. without any
|
||||
filtering applied)
|
||||
"""
|
||||
|
||||
def __init__(self, warn=None, debug_print=None):
|
||||
# ignore argument to FileList, but keep them for backwards
|
||||
# compatibility
|
||||
self.allfiles = None
|
||||
self.files = []
|
||||
|
||||
def set_allfiles(self, allfiles):
|
||||
self.allfiles = allfiles
|
||||
|
||||
def findall(self, dir=os.curdir):
|
||||
self.allfiles = findall(dir)
|
||||
|
||||
def debug_print(self, msg):
|
||||
"""Print 'msg' to stdout if the global DEBUG (taken from the
|
||||
DISTUTILS_DEBUG environment variable) flag is true.
|
||||
"""
|
||||
from distutils.debug import DEBUG
|
||||
if DEBUG:
|
||||
print(msg)
|
||||
|
||||
# Collection methods
|
||||
|
||||
def append(self, item):
|
||||
self.files.append(item)
|
||||
|
||||
def extend(self, items):
|
||||
self.files.extend(items)
|
||||
|
||||
def sort(self):
|
||||
# Not a strict lexical sort!
|
||||
sortable_files = sorted(map(os.path.split, self.files))
|
||||
self.files = []
|
||||
for sort_tuple in sortable_files:
|
||||
self.files.append(os.path.join(*sort_tuple))
|
||||
|
||||
# Other miscellaneous utility methods
|
||||
|
||||
def remove_duplicates(self):
|
||||
# Assumes list has been sorted!
|
||||
for i in range(len(self.files) - 1, 0, -1):
|
||||
if self.files[i] == self.files[i - 1]:
|
||||
del self.files[i]
|
||||
|
||||
# "File template" methods
|
||||
|
||||
def _parse_template_line(self, line):
|
||||
words = line.split()
|
||||
action = words[0]
|
||||
|
||||
patterns = dir = dir_pattern = None
|
||||
|
||||
if action in ('include', 'exclude',
|
||||
'global-include', 'global-exclude'):
|
||||
if len(words) < 2:
|
||||
raise DistutilsTemplateError(
|
||||
"'%s' expects <pattern1> <pattern2> ..." % action)
|
||||
patterns = [convert_path(w) for w in words[1:]]
|
||||
elif action in ('recursive-include', 'recursive-exclude'):
|
||||
if len(words) < 3:
|
||||
raise DistutilsTemplateError(
|
||||
"'%s' expects <dir> <pattern1> <pattern2> ..." % action)
|
||||
dir = convert_path(words[1])
|
||||
patterns = [convert_path(w) for w in words[2:]]
|
||||
elif action in ('graft', 'prune'):
|
||||
if len(words) != 2:
|
||||
raise DistutilsTemplateError(
|
||||
"'%s' expects a single <dir_pattern>" % action)
|
||||
dir_pattern = convert_path(words[1])
|
||||
else:
|
||||
raise DistutilsTemplateError("unknown action '%s'" % action)
|
||||
|
||||
return (action, patterns, dir, dir_pattern)
|
||||
|
||||
def process_template_line(self, line):
|
||||
# Parse the line: split it up, make sure the right number of words
|
||||
# is there, and return the relevant words. 'action' is always
|
||||
# defined: it's the first word of the line. Which of the other
|
||||
# three are defined depends on the action; it'll be either
|
||||
# patterns, (dir and patterns), or (dir_pattern).
|
||||
(action, patterns, dir, dir_pattern) = self._parse_template_line(line)
|
||||
|
||||
# OK, now we know that the action is valid and we have the
|
||||
# right number of words on the line for that action -- so we
|
||||
# can proceed with minimal error-checking.
|
||||
if action == 'include':
|
||||
self.debug_print("include " + ' '.join(patterns))
|
||||
for pattern in patterns:
|
||||
if not self.include_pattern(pattern, anchor=1):
|
||||
log.warn("warning: no files found matching '%s'",
|
||||
pattern)
|
||||
|
||||
elif action == 'exclude':
|
||||
self.debug_print("exclude " + ' '.join(patterns))
|
||||
for pattern in patterns:
|
||||
if not self.exclude_pattern(pattern, anchor=1):
|
||||
log.warn(("warning: no previously-included files "
|
||||
"found matching '%s'"), pattern)
|
||||
|
||||
elif action == 'global-include':
|
||||
self.debug_print("global-include " + ' '.join(patterns))
|
||||
for pattern in patterns:
|
||||
if not self.include_pattern(pattern, anchor=0):
|
||||
log.warn(("warning: no files found matching '%s' "
|
||||
"anywhere in distribution"), pattern)
|
||||
|
||||
elif action == 'global-exclude':
|
||||
self.debug_print("global-exclude " + ' '.join(patterns))
|
||||
for pattern in patterns:
|
||||
if not self.exclude_pattern(pattern, anchor=0):
|
||||
log.warn(("warning: no previously-included files matching "
|
||||
"'%s' found anywhere in distribution"),
|
||||
pattern)
|
||||
|
||||
elif action == 'recursive-include':
|
||||
self.debug_print("recursive-include %s %s" %
|
||||
(dir, ' '.join(patterns)))
|
||||
for pattern in patterns:
|
||||
if not self.include_pattern(pattern, prefix=dir):
|
||||
msg = (
|
||||
"warning: no files found matching '%s' "
|
||||
"under directory '%s'"
|
||||
)
|
||||
log.warn(msg, pattern, dir)
|
||||
|
||||
elif action == 'recursive-exclude':
|
||||
self.debug_print("recursive-exclude %s %s" %
|
||||
(dir, ' '.join(patterns)))
|
||||
for pattern in patterns:
|
||||
if not self.exclude_pattern(pattern, prefix=dir):
|
||||
log.warn(("warning: no previously-included files matching "
|
||||
"'%s' found under directory '%s'"),
|
||||
pattern, dir)
|
||||
|
||||
elif action == 'graft':
|
||||
self.debug_print("graft " + dir_pattern)
|
||||
if not self.include_pattern(None, prefix=dir_pattern):
|
||||
log.warn("warning: no directories found matching '%s'",
|
||||
dir_pattern)
|
||||
|
||||
elif action == 'prune':
|
||||
self.debug_print("prune " + dir_pattern)
|
||||
if not self.exclude_pattern(None, prefix=dir_pattern):
|
||||
log.warn(("no previously-included directories found "
|
||||
"matching '%s'"), dir_pattern)
|
||||
else:
|
||||
raise DistutilsInternalError(
|
||||
"this cannot happen: invalid action '%s'" % action)
|
||||
|
||||
# Filtering/selection methods
|
||||
|
||||
def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
|
||||
"""Select strings (presumably filenames) from 'self.files' that
|
||||
match 'pattern', a Unix-style wildcard (glob) pattern. Patterns
|
||||
are not quite the same as implemented by the 'fnmatch' module: '*'
|
||||
and '?' match non-special characters, where "special" is platform-
|
||||
dependent: slash on Unix; colon, slash, and backslash on
|
||||
DOS/Windows; and colon on Mac OS.
|
||||
|
||||
If 'anchor' is true (the default), then the pattern match is more
|
||||
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
|
||||
'anchor' is false, both of these will match.
|
||||
|
||||
If 'prefix' is supplied, then only filenames starting with 'prefix'
|
||||
(itself a pattern) and ending with 'pattern', with anything in between
|
||||
them, will match. 'anchor' is ignored in this case.
|
||||
|
||||
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
|
||||
'pattern' is assumed to be either a string containing a regex or a
|
||||
regex object -- no translation is done, the regex is just compiled
|
||||
and used as-is.
|
||||
|
||||
Selected strings will be added to self.files.
|
||||
|
||||
Return True if files are found, False otherwise.
|
||||
"""
|
||||
# XXX docstring lying about what the special chars are?
|
||||
files_found = False
|
||||
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
|
||||
self.debug_print("include_pattern: applying regex r'%s'" %
|
||||
pattern_re.pattern)
|
||||
|
||||
# delayed loading of allfiles list
|
||||
if self.allfiles is None:
|
||||
self.findall()
|
||||
|
||||
for name in self.allfiles:
|
||||
if pattern_re.search(name):
|
||||
self.debug_print(" adding " + name)
|
||||
self.files.append(name)
|
||||
files_found = True
|
||||
return files_found
|
||||
|
||||
def exclude_pattern(
|
||||
self, pattern, anchor=1, prefix=None, is_regex=0):
|
||||
"""Remove strings (presumably filenames) from 'files' that match
|
||||
'pattern'. Other parameters are the same as for
|
||||
'include_pattern()', above.
|
||||
The list 'self.files' is modified in place.
|
||||
Return True if files are found, False otherwise.
|
||||
"""
|
||||
files_found = False
|
||||
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
|
||||
self.debug_print("exclude_pattern: applying regex r'%s'" %
|
||||
pattern_re.pattern)
|
||||
for i in range(len(self.files)-1, -1, -1):
|
||||
if pattern_re.search(self.files[i]):
|
||||
self.debug_print(" removing " + self.files[i])
|
||||
del self.files[i]
|
||||
files_found = True
|
||||
return files_found
|
||||
|
||||
|
||||
# Utility functions
|
||||
|
||||
def _find_all_simple(path):
|
||||
"""
|
||||
Find all files under 'path'
|
||||
"""
|
||||
all_unique = _UniqueDirs.filter(os.walk(path, followlinks=True))
|
||||
results = (
|
||||
os.path.join(base, file)
|
||||
for base, dirs, files in all_unique
|
||||
for file in files
|
||||
)
|
||||
return filter(os.path.isfile, results)
|
||||
|
||||
|
||||
class _UniqueDirs(set):
|
||||
"""
|
||||
Exclude previously-seen dirs from walk results,
|
||||
avoiding infinite recursion.
|
||||
Ref https://bugs.python.org/issue44497.
|
||||
"""
|
||||
def __call__(self, walk_item):
|
||||
"""
|
||||
Given an item from an os.walk result, determine
|
||||
if the item represents a unique dir for this instance
|
||||
and if not, prevent further traversal.
|
||||
"""
|
||||
base, dirs, files = walk_item
|
||||
stat = os.stat(base)
|
||||
candidate = stat.st_dev, stat.st_ino
|
||||
found = candidate in self
|
||||
if found:
|
||||
del dirs[:]
|
||||
self.add(candidate)
|
||||
return not found
|
||||
|
||||
@classmethod
|
||||
def filter(cls, items):
|
||||
return filter(cls(), items)
|
||||
|
||||
|
||||
def findall(dir=os.curdir):
|
||||
"""
|
||||
Find all files under 'dir' and return the list of full filenames.
|
||||
Unless dir is '.', return full filenames with dir prepended.
|
||||
"""
|
||||
files = _find_all_simple(dir)
|
||||
if dir == os.curdir:
|
||||
make_rel = functools.partial(os.path.relpath, start=dir)
|
||||
files = map(make_rel, files)
|
||||
return list(files)
|
||||
|
||||
|
||||
def glob_to_re(pattern):
|
||||
"""Translate a shell-like glob pattern to a regular expression; return
|
||||
a string containing the regex. Differs from 'fnmatch.translate()' in
|
||||
that '*' does not match "special characters" (which are
|
||||
platform-specific).
|
||||
"""
|
||||
pattern_re = fnmatch.translate(pattern)
|
||||
|
||||
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
||||
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
||||
# and by extension they shouldn't match such "special characters" under
|
||||
# any OS. So change all non-escaped dots in the RE to match any
|
||||
# character except the special characters (currently: just os.sep).
|
||||
sep = os.sep
|
||||
if os.sep == '\\':
|
||||
# we're using a regex to manipulate a regex, so we need
|
||||
# to escape the backslash twice
|
||||
sep = r'\\\\'
|
||||
escaped = r'\1[^%s]' % sep
|
||||
pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
|
||||
return pattern_re
|
||||
|
||||
|
||||
def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0):
|
||||
"""Translate a shell-like wildcard pattern to a compiled regular
|
||||
expression. Return the compiled regex. If 'is_regex' true,
|
||||
then 'pattern' is directly compiled to a regex (if it's a string)
|
||||
or just returned as-is (assumes it's a regex object).
|
||||
"""
|
||||
if is_regex:
|
||||
if isinstance(pattern, str):
|
||||
return re.compile(pattern)
|
||||
else:
|
||||
return pattern
|
||||
|
||||
# ditch start and end characters
|
||||
start, _, end = glob_to_re('_').partition('_')
|
||||
|
||||
if pattern:
|
||||
pattern_re = glob_to_re(pattern)
|
||||
assert pattern_re.startswith(start) and pattern_re.endswith(end)
|
||||
else:
|
||||
pattern_re = ''
|
||||
|
||||
if prefix is not None:
|
||||
prefix_re = glob_to_re(prefix)
|
||||
assert prefix_re.startswith(start) and prefix_re.endswith(end)
|
||||
prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
|
||||
sep = os.sep
|
||||
if os.sep == '\\':
|
||||
sep = r'\\'
|
||||
pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
|
||||
pattern_re = r'%s\A%s%s.*%s%s' % (
|
||||
start, prefix_re, sep, pattern_re, end)
|
||||
else: # no prefix -- respect anchor flag
|
||||
if anchor:
|
||||
pattern_re = r'%s\A%s' % (start, pattern_re[len(start):])
|
||||
|
||||
return re.compile(pattern_re)
|
@ -0,0 +1,81 @@
|
||||
"""A simple log mechanism styled after PEP 282."""
|
||||
|
||||
# The class here is styled after PEP 282 so that it could later be
|
||||
# replaced with a standard Python logging implementation.
|
||||
|
||||
import sys
|
||||
|
||||
DEBUG = 1
|
||||
INFO = 2
|
||||
WARN = 3
|
||||
ERROR = 4
|
||||
FATAL = 5
|
||||
|
||||
|
||||
class Log:
|
||||
|
||||
def __init__(self, threshold=WARN):
|
||||
self.threshold = threshold
|
||||
|
||||
def _log(self, level, msg, args):
|
||||
if level not in (DEBUG, INFO, WARN, ERROR, FATAL):
|
||||
raise ValueError('%s wrong log level' % str(level))
|
||||
|
||||
if level >= self.threshold:
|
||||
if args:
|
||||
msg = msg % args
|
||||
if level in (WARN, ERROR, FATAL):
|
||||
stream = sys.stderr
|
||||
else:
|
||||
stream = sys.stdout
|
||||
try:
|
||||
stream.write('%s\n' % msg)
|
||||
except UnicodeEncodeError:
|
||||
# emulate backslashreplace error handler
|
||||
encoding = stream.encoding
|
||||
msg = msg.encode(encoding, "backslashreplace").decode(encoding)
|
||||
stream.write('%s\n' % msg)
|
||||
stream.flush()
|
||||
|
||||
def log(self, level, msg, *args):
|
||||
self._log(level, msg, args)
|
||||
|
||||
def debug(self, msg, *args):
|
||||
self._log(DEBUG, msg, args)
|
||||
|
||||
def info(self, msg, *args):
|
||||
self._log(INFO, msg, args)
|
||||
|
||||
def warn(self, msg, *args):
|
||||
self._log(WARN, msg, args)
|
||||
|
||||
def error(self, msg, *args):
|
||||
self._log(ERROR, msg, args)
|
||||
|
||||
def fatal(self, msg, *args):
|
||||
self._log(FATAL, msg, args)
|
||||
|
||||
|
||||
_global_log = Log()
|
||||
log = _global_log.log
|
||||
debug = _global_log.debug
|
||||
info = _global_log.info
|
||||
warn = _global_log.warn
|
||||
error = _global_log.error
|
||||
fatal = _global_log.fatal
|
||||
|
||||
|
||||
def set_threshold(level):
|
||||
# return the old threshold for use from tests
|
||||
old = _global_log.threshold
|
||||
_global_log.threshold = level
|
||||
return old
|
||||
|
||||
|
||||
def set_verbosity(v):
|
||||
if v <= 0:
|
||||
set_threshold(WARN)
|
||||
elif v == 1:
|
||||
set_threshold(INFO)
|
||||
elif v >= 2:
|
||||
set_threshold(DEBUG)
|
@ -0,0 +1,788 @@
|
||||
"""distutils.msvc9compiler
|
||||
|
||||
Contains MSVCCompiler, an implementation of the abstract CCompiler class
|
||||
for the Microsoft Visual Studio 2008.
|
||||
|
||||
The module is compatible with VS 2005 and VS 2008. You can find legacy support
|
||||
for older versions of VS in distutils.msvccompiler.
|
||||
"""
|
||||
|
||||
# Written by Perry Stoll
|
||||
# hacked by Robin Becker and Thomas Heller to do a better job of
|
||||
# finding DevStudio (through the registry)
|
||||
# ported to VS2005 and VS 2008 by Christian Heimes
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import re
|
||||
|
||||
from distutils.errors import DistutilsExecError, DistutilsPlatformError, \
|
||||
CompileError, LibError, LinkError
|
||||
from distutils.ccompiler import CCompiler, gen_lib_options
|
||||
from distutils import log
|
||||
from distutils.util import get_platform
|
||||
|
||||
import winreg
|
||||
|
||||
RegOpenKeyEx = winreg.OpenKeyEx
|
||||
RegEnumKey = winreg.EnumKey
|
||||
RegEnumValue = winreg.EnumValue
|
||||
RegError = winreg.error
|
||||
|
||||
HKEYS = (winreg.HKEY_USERS,
|
||||
winreg.HKEY_CURRENT_USER,
|
||||
winreg.HKEY_LOCAL_MACHINE,
|
||||
winreg.HKEY_CLASSES_ROOT)
|
||||
|
||||
NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32)
|
||||
if NATIVE_WIN64:
|
||||
# Visual C++ is a 32-bit application, so we need to look in
|
||||
# the corresponding registry branch, if we're running a
|
||||
# 64-bit Python on Win64
|
||||
VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f"
|
||||
WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows"
|
||||
NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework"
|
||||
else:
|
||||
VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
|
||||
WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
|
||||
NET_BASE = r"Software\Microsoft\.NETFramework"
|
||||
|
||||
# A map keyed by get_platform() return values to values accepted by
|
||||
# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is
|
||||
# the param to cross-compile on x86 targeting amd64.)
|
||||
PLAT_TO_VCVARS = {
|
||||
'win32' : 'x86',
|
||||
'win-amd64' : 'amd64',
|
||||
}
|
||||
|
||||
class Reg:
|
||||
"""Helper class to read values from the registry
|
||||
"""
|
||||
|
||||
def get_value(cls, path, key):
|
||||
for base in HKEYS:
|
||||
d = cls.read_values(base, path)
|
||||
if d and key in d:
|
||||
return d[key]
|
||||
raise KeyError(key)
|
||||
get_value = classmethod(get_value)
|
||||
|
||||
def read_keys(cls, base, key):
|
||||
"""Return list of registry keys."""
|
||||
try:
|
||||
handle = RegOpenKeyEx(base, key)
|
||||
except RegError:
|
||||
return None
|
||||
L = []
|
||||
i = 0
|
||||
while True:
|
||||
try:
|
||||
k = RegEnumKey(handle, i)
|
||||
except RegError:
|
||||
break
|
||||
L.append(k)
|
||||
i += 1
|
||||
return L
|
||||
read_keys = classmethod(read_keys)
|
||||
|
||||
def read_values(cls, base, key):
|
||||
"""Return dict of registry keys and values.
|
||||
|
||||
All names are converted to lowercase.
|
||||
"""
|
||||
try:
|
||||
handle = RegOpenKeyEx(base, key)
|
||||
except RegError:
|
||||
return None
|
||||
d = {}
|
||||
i = 0
|
||||
while True:
|
||||
try:
|
||||
name, value, type = RegEnumValue(handle, i)
|
||||
except RegError:
|
||||
break
|
||||
name = name.lower()
|
||||
d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
|
||||
i += 1
|
||||
return d
|
||||
read_values = classmethod(read_values)
|
||||
|
||||
def convert_mbcs(s):
|
||||
dec = getattr(s, "decode", None)
|
||||
if dec is not None:
|
||||
try:
|
||||
s = dec("mbcs")
|
||||
except UnicodeError:
|
||||
pass
|
||||
return s
|
||||
convert_mbcs = staticmethod(convert_mbcs)
|
||||
|
||||
class MacroExpander:
|
||||
|
||||
def __init__(self, version):
|
||||
self.macros = {}
|
||||
self.vsbase = VS_BASE % version
|
||||
self.load_macros(version)
|
||||
|
||||
def set_macro(self, macro, path, key):
|
||||
self.macros["$(%s)" % macro] = Reg.get_value(path, key)
|
||||
|
||||
def load_macros(self, version):
|
||||
self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
|
||||
self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
|
||||
self.set_macro("FrameworkDir", NET_BASE, "installroot")
|
||||
try:
|
||||
if version >= 8.0:
|
||||
self.set_macro("FrameworkSDKDir", NET_BASE,
|
||||
"sdkinstallrootv2.0")
|
||||
else:
|
||||
raise KeyError("sdkinstallrootv2.0")
|
||||
except KeyError:
|
||||
raise DistutilsPlatformError(
|
||||
"""Python was built with Visual Studio 2008;
|
||||
extensions must be built with a compiler than can generate compatible binaries.
|
||||
Visual Studio 2008 was not found on this system. If you have Cygwin installed,
|
||||
you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
|
||||
|
||||
if version >= 9.0:
|
||||
self.set_macro("FrameworkVersion", self.vsbase, "clr version")
|
||||
self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
|
||||
else:
|
||||
p = r"Software\Microsoft\NET Framework Setup\Product"
|
||||
for base in HKEYS:
|
||||
try:
|
||||
h = RegOpenKeyEx(base, p)
|
||||
except RegError:
|
||||
continue
|
||||
key = RegEnumKey(h, 0)
|
||||
d = Reg.get_value(base, r"%s\%s" % (p, key))
|
||||
self.macros["$(FrameworkVersion)"] = d["version"]
|
||||
|
||||
def sub(self, s):
|
||||
for k, v in self.macros.items():
|
||||
s = s.replace(k, v)
|
||||
return s
|
||||
|
||||
def get_build_version():
|
||||
"""Return the version of MSVC that was used to build Python.
|
||||
|
||||
For Python 2.3 and up, the version number is included in
|
||||
sys.version. For earlier versions, assume the compiler is MSVC 6.
|
||||
"""
|
||||
prefix = "MSC v."
|
||||
i = sys.version.find(prefix)
|
||||
if i == -1:
|
||||
return 6
|
||||
i = i + len(prefix)
|
||||
s, rest = sys.version[i:].split(" ", 1)
|
||||
majorVersion = int(s[:-2]) - 6
|
||||
if majorVersion >= 13:
|
||||
# v13 was skipped and should be v14
|
||||
majorVersion += 1
|
||||
minorVersion = int(s[2:3]) / 10.0
|
||||
# I don't think paths are affected by minor version in version 6
|
||||
if majorVersion == 6:
|
||||
minorVersion = 0
|
||||
if majorVersion >= 6:
|
||||
return majorVersion + minorVersion
|
||||
# else we don't know what version of the compiler this is
|
||||
return None
|
||||
|
||||
def normalize_and_reduce_paths(paths):
|
||||
"""Return a list of normalized paths with duplicates removed.
|
||||
|
||||
The current order of paths is maintained.
|
||||
"""
|
||||
# Paths are normalized so things like: /a and /a/ aren't both preserved.
|
||||
reduced_paths = []
|
||||
for p in paths:
|
||||
np = os.path.normpath(p)
|
||||
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
|
||||
if np not in reduced_paths:
|
||||
reduced_paths.append(np)
|
||||
return reduced_paths
|
||||
|
||||
def removeDuplicates(variable):
|
||||
"""Remove duplicate values of an environment variable.
|
||||
"""
|
||||
oldList = variable.split(os.pathsep)
|
||||
newList = []
|
||||
for i in oldList:
|
||||
if i not in newList:
|
||||
newList.append(i)
|
||||
newVariable = os.pathsep.join(newList)
|
||||
return newVariable
|
||||
|
||||
def find_vcvarsall(version):
|
||||
"""Find the vcvarsall.bat file
|
||||
|
||||
At first it tries to find the productdir of VS 2008 in the registry. If
|
||||
that fails it falls back to the VS90COMNTOOLS env var.
|
||||
"""
|
||||
vsbase = VS_BASE % version
|
||||
try:
|
||||
productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
|
||||
"productdir")
|
||||
except KeyError:
|
||||
log.debug("Unable to find productdir in registry")
|
||||
productdir = None
|
||||
|
||||
if not productdir or not os.path.isdir(productdir):
|
||||
toolskey = "VS%0.f0COMNTOOLS" % version
|
||||
toolsdir = os.environ.get(toolskey, None)
|
||||
|
||||
if toolsdir and os.path.isdir(toolsdir):
|
||||
productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
|
||||
productdir = os.path.abspath(productdir)
|
||||
if not os.path.isdir(productdir):
|
||||
log.debug("%s is not a valid directory" % productdir)
|
||||
return None
|
||||
else:
|
||||
log.debug("Env var %s is not set or invalid" % toolskey)
|
||||
if not productdir:
|
||||
log.debug("No productdir found")
|
||||
return None
|
||||
vcvarsall = os.path.join(productdir, "vcvarsall.bat")
|
||||
if os.path.isfile(vcvarsall):
|
||||
return vcvarsall
|
||||
log.debug("Unable to find vcvarsall.bat")
|
||||
return None
|
||||
|
||||
def query_vcvarsall(version, arch="x86"):
|
||||
"""Launch vcvarsall.bat and read the settings from its environment
|
||||
"""
|
||||
vcvarsall = find_vcvarsall(version)
|
||||
interesting = {"include", "lib", "libpath", "path"}
|
||||
result = {}
|
||||
|
||||
if vcvarsall is None:
|
||||
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
|
||||
log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version)
|
||||
popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
try:
|
||||
stdout, stderr = popen.communicate()
|
||||
if popen.wait() != 0:
|
||||
raise DistutilsPlatformError(stderr.decode("mbcs"))
|
||||
|
||||
stdout = stdout.decode("mbcs")
|
||||
for line in stdout.split("\n"):
|
||||
line = Reg.convert_mbcs(line)
|
||||
if '=' not in line:
|
||||
continue
|
||||
line = line.strip()
|
||||
key, value = line.split('=', 1)
|
||||
key = key.lower()
|
||||
if key in interesting:
|
||||
if value.endswith(os.pathsep):
|
||||
value = value[:-1]
|
||||
result[key] = removeDuplicates(value)
|
||||
|
||||
finally:
|
||||
popen.stdout.close()
|
||||
popen.stderr.close()
|
||||
|
||||
if len(result) != len(interesting):
|
||||
raise ValueError(str(list(result.keys())))
|
||||
|
||||
return result
|
||||
|
||||
# More globals
|
||||
VERSION = get_build_version()
|
||||
# MACROS = MacroExpander(VERSION)
|
||||
|
||||
class MSVCCompiler(CCompiler) :
|
||||
"""Concrete class that implements an interface to Microsoft Visual C++,
|
||||
as defined by the CCompiler abstract class."""
|
||||
|
||||
compiler_type = 'msvc'
|
||||
|
||||
# Just set this so CCompiler's constructor doesn't barf. We currently
|
||||
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
||||
# as it really isn't necessary for this sort of single-compiler class.
|
||||
# Would be nice to have a consistent interface with UnixCCompiler,
|
||||
# though, so it's worth thinking about.
|
||||
executables = {}
|
||||
|
||||
# Private class data (need to distinguish C from C++ source for compiler)
|
||||
_c_extensions = ['.c']
|
||||
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
||||
_rc_extensions = ['.rc']
|
||||
_mc_extensions = ['.mc']
|
||||
|
||||
# Needed for the filename generation methods provided by the
|
||||
# base class, CCompiler.
|
||||
src_extensions = (_c_extensions + _cpp_extensions +
|
||||
_rc_extensions + _mc_extensions)
|
||||
res_extension = '.res'
|
||||
obj_extension = '.obj'
|
||||
static_lib_extension = '.lib'
|
||||
shared_lib_extension = '.dll'
|
||||
static_lib_format = shared_lib_format = '%s%s'
|
||||
exe_extension = '.exe'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
CCompiler.__init__ (self, verbose, dry_run, force)
|
||||
self.__version = VERSION
|
||||
self.__root = r"Software\Microsoft\VisualStudio"
|
||||
# self.__macros = MACROS
|
||||
self.__paths = []
|
||||
# target platform (.plat_name is consistent with 'bdist')
|
||||
self.plat_name = None
|
||||
self.__arch = None # deprecated name
|
||||
self.initialized = False
|
||||
|
||||
def initialize(self, plat_name=None):
|
||||
# multi-init means we would need to check platform same each time...
|
||||
assert not self.initialized, "don't init multiple times"
|
||||
if self.__version < 8.0:
|
||||
raise DistutilsPlatformError("VC %0.1f is not supported by this module" % self.__version)
|
||||
if plat_name is None:
|
||||
plat_name = get_platform()
|
||||
# sanity check for platforms to prevent obscure errors later.
|
||||
ok_plats = 'win32', 'win-amd64'
|
||||
if plat_name not in ok_plats:
|
||||
raise DistutilsPlatformError("--plat-name must be one of %s" %
|
||||
(ok_plats,))
|
||||
|
||||
if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
|
||||
# Assume that the SDK set up everything alright; don't try to be
|
||||
# smarter
|
||||
self.cc = "cl.exe"
|
||||
self.linker = "link.exe"
|
||||
self.lib = "lib.exe"
|
||||
self.rc = "rc.exe"
|
||||
self.mc = "mc.exe"
|
||||
else:
|
||||
# On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
|
||||
# to cross compile, you use 'x86_amd64'.
|
||||
# On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
|
||||
# compile use 'x86' (ie, it runs the x86 compiler directly)
|
||||
if plat_name == get_platform() or plat_name == 'win32':
|
||||
# native build or cross-compile to win32
|
||||
plat_spec = PLAT_TO_VCVARS[plat_name]
|
||||
else:
|
||||
# cross compile from win32 -> some 64bit
|
||||
plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \
|
||||
PLAT_TO_VCVARS[plat_name]
|
||||
|
||||
vc_env = query_vcvarsall(VERSION, plat_spec)
|
||||
|
||||
self.__paths = vc_env['path'].split(os.pathsep)
|
||||
os.environ['lib'] = vc_env['lib']
|
||||
os.environ['include'] = vc_env['include']
|
||||
|
||||
if len(self.__paths) == 0:
|
||||
raise DistutilsPlatformError("Python was built with %s, "
|
||||
"and extensions need to be built with the same "
|
||||
"version of the compiler, but it isn't installed."
|
||||
% self.__product)
|
||||
|
||||
self.cc = self.find_exe("cl.exe")
|
||||
self.linker = self.find_exe("link.exe")
|
||||
self.lib = self.find_exe("lib.exe")
|
||||
self.rc = self.find_exe("rc.exe") # resource compiler
|
||||
self.mc = self.find_exe("mc.exe") # message compiler
|
||||
#self.set_path_env_var('lib')
|
||||
#self.set_path_env_var('include')
|
||||
|
||||
# extend the MSVC path with the current path
|
||||
try:
|
||||
for p in os.environ['path'].split(';'):
|
||||
self.__paths.append(p)
|
||||
except KeyError:
|
||||
pass
|
||||
self.__paths = normalize_and_reduce_paths(self.__paths)
|
||||
os.environ['path'] = ";".join(self.__paths)
|
||||
|
||||
self.preprocess_options = None
|
||||
if self.__arch == "x86":
|
||||
self.compile_options = [ '/nologo', '/O2', '/MD', '/W3',
|
||||
'/DNDEBUG']
|
||||
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
|
||||
'/Z7', '/D_DEBUG']
|
||||
else:
|
||||
# Win64
|
||||
self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' ,
|
||||
'/DNDEBUG']
|
||||
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
|
||||
'/Z7', '/D_DEBUG']
|
||||
|
||||
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
|
||||
if self.__version >= 7:
|
||||
self.ldflags_shared_debug = [
|
||||
'/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
|
||||
]
|
||||
self.ldflags_static = [ '/nologo']
|
||||
|
||||
self.initialized = True
|
||||
|
||||
# -- Worker methods ------------------------------------------------
|
||||
|
||||
def object_filenames(self,
|
||||
source_filenames,
|
||||
strip_dir=0,
|
||||
output_dir=''):
|
||||
# Copied from ccompiler.py, extended to return .res as 'object'-file
|
||||
# for .rc input file
|
||||
if output_dir is None: output_dir = ''
|
||||
obj_names = []
|
||||
for src_name in source_filenames:
|
||||
(base, ext) = os.path.splitext (src_name)
|
||||
base = os.path.splitdrive(base)[1] # Chop off the drive
|
||||
base = base[os.path.isabs(base):] # If abs, chop off leading /
|
||||
if ext not in self.src_extensions:
|
||||
# Better to raise an exception instead of silently continuing
|
||||
# and later complain about sources and targets having
|
||||
# different lengths
|
||||
raise CompileError ("Don't know how to compile %s" % src_name)
|
||||
if strip_dir:
|
||||
base = os.path.basename (base)
|
||||
if ext in self._rc_extensions:
|
||||
obj_names.append (os.path.join (output_dir,
|
||||
base + self.res_extension))
|
||||
elif ext in self._mc_extensions:
|
||||
obj_names.append (os.path.join (output_dir,
|
||||
base + self.res_extension))
|
||||
else:
|
||||
obj_names.append (os.path.join (output_dir,
|
||||
base + self.obj_extension))
|
||||
return obj_names
|
||||
|
||||
|
||||
def compile(self, sources,
|
||||
output_dir=None, macros=None, include_dirs=None, debug=0,
|
||||
extra_preargs=None, extra_postargs=None, depends=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
compile_info = self._setup_compile(output_dir, macros, include_dirs,
|
||||
sources, depends, extra_postargs)
|
||||
macros, objects, extra_postargs, pp_opts, build = compile_info
|
||||
|
||||
compile_opts = extra_preargs or []
|
||||
compile_opts.append ('/c')
|
||||
if debug:
|
||||
compile_opts.extend(self.compile_options_debug)
|
||||
else:
|
||||
compile_opts.extend(self.compile_options)
|
||||
|
||||
for obj in objects:
|
||||
try:
|
||||
src, ext = build[obj]
|
||||
except KeyError:
|
||||
continue
|
||||
if debug:
|
||||
# pass the full pathname to MSVC in debug mode,
|
||||
# this allows the debugger to find the source file
|
||||
# without asking the user to browse for it
|
||||
src = os.path.abspath(src)
|
||||
|
||||
if ext in self._c_extensions:
|
||||
input_opt = "/Tc" + src
|
||||
elif ext in self._cpp_extensions:
|
||||
input_opt = "/Tp" + src
|
||||
elif ext in self._rc_extensions:
|
||||
# compile .RC to .RES file
|
||||
input_opt = src
|
||||
output_opt = "/fo" + obj
|
||||
try:
|
||||
self.spawn([self.rc] + pp_opts +
|
||||
[output_opt] + [input_opt])
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
continue
|
||||
elif ext in self._mc_extensions:
|
||||
# Compile .MC to .RC file to .RES file.
|
||||
# * '-h dir' specifies the directory for the
|
||||
# generated include file
|
||||
# * '-r dir' specifies the target directory of the
|
||||
# generated RC file and the binary message resource
|
||||
# it includes
|
||||
#
|
||||
# For now (since there are no options to change this),
|
||||
# we use the source-directory for the include file and
|
||||
# the build directory for the RC file and message
|
||||
# resources. This works at least for win32all.
|
||||
h_dir = os.path.dirname(src)
|
||||
rc_dir = os.path.dirname(obj)
|
||||
try:
|
||||
# first compile .MC to .RC and .H file
|
||||
self.spawn([self.mc] +
|
||||
['-h', h_dir, '-r', rc_dir] + [src])
|
||||
base, _ = os.path.splitext (os.path.basename (src))
|
||||
rc_file = os.path.join (rc_dir, base + '.rc')
|
||||
# then compile .RC to .RES file
|
||||
self.spawn([self.rc] +
|
||||
["/fo" + obj] + [rc_file])
|
||||
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
continue
|
||||
else:
|
||||
# how to handle this file?
|
||||
raise CompileError("Don't know how to compile %s to %s"
|
||||
% (src, obj))
|
||||
|
||||
output_opt = "/Fo" + obj
|
||||
try:
|
||||
self.spawn([self.cc] + compile_opts + pp_opts +
|
||||
[input_opt, output_opt] +
|
||||
extra_postargs)
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
return objects
|
||||
|
||||
|
||||
def create_static_lib(self,
|
||||
objects,
|
||||
output_libname,
|
||||
output_dir=None,
|
||||
debug=0,
|
||||
target_lang=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
||||
output_filename = self.library_filename(output_libname,
|
||||
output_dir=output_dir)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
lib_args = objects + ['/OUT:' + output_filename]
|
||||
if debug:
|
||||
pass # XXX what goes here?
|
||||
try:
|
||||
self.spawn([self.lib] + lib_args)
|
||||
except DistutilsExecError as msg:
|
||||
raise LibError(msg)
|
||||
else:
|
||||
log.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
|
||||
def link(self,
|
||||
target_desc,
|
||||
objects,
|
||||
output_filename,
|
||||
output_dir=None,
|
||||
libraries=None,
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
target_lang=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
||||
fixed_args = self._fix_lib_args(libraries, library_dirs,
|
||||
runtime_library_dirs)
|
||||
(libraries, library_dirs, runtime_library_dirs) = fixed_args
|
||||
|
||||
if runtime_library_dirs:
|
||||
self.warn ("I don't know what to do with 'runtime_library_dirs': "
|
||||
+ str (runtime_library_dirs))
|
||||
|
||||
lib_opts = gen_lib_options(self,
|
||||
library_dirs, runtime_library_dirs,
|
||||
libraries)
|
||||
if output_dir is not None:
|
||||
output_filename = os.path.join(output_dir, output_filename)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
if target_desc == CCompiler.EXECUTABLE:
|
||||
if debug:
|
||||
ldflags = self.ldflags_shared_debug[1:]
|
||||
else:
|
||||
ldflags = self.ldflags_shared[1:]
|
||||
else:
|
||||
if debug:
|
||||
ldflags = self.ldflags_shared_debug
|
||||
else:
|
||||
ldflags = self.ldflags_shared
|
||||
|
||||
export_opts = []
|
||||
for sym in (export_symbols or []):
|
||||
export_opts.append("/EXPORT:" + sym)
|
||||
|
||||
ld_args = (ldflags + lib_opts + export_opts +
|
||||
objects + ['/OUT:' + output_filename])
|
||||
|
||||
# The MSVC linker generates .lib and .exp files, which cannot be
|
||||
# suppressed by any linker switches. The .lib files may even be
|
||||
# needed! Make sure they are generated in the temporary build
|
||||
# directory. Since they have different names for debug and release
|
||||
# builds, they can go into the same directory.
|
||||
build_temp = os.path.dirname(objects[0])
|
||||
if export_symbols is not None:
|
||||
(dll_name, dll_ext) = os.path.splitext(
|
||||
os.path.basename(output_filename))
|
||||
implib_file = os.path.join(
|
||||
build_temp,
|
||||
self.library_filename(dll_name))
|
||||
ld_args.append ('/IMPLIB:' + implib_file)
|
||||
|
||||
self.manifest_setup_ldargs(output_filename, build_temp, ld_args)
|
||||
|
||||
if extra_preargs:
|
||||
ld_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
ld_args.extend(extra_postargs)
|
||||
|
||||
self.mkpath(os.path.dirname(output_filename))
|
||||
try:
|
||||
self.spawn([self.linker] + ld_args)
|
||||
except DistutilsExecError as msg:
|
||||
raise LinkError(msg)
|
||||
|
||||
# embed the manifest
|
||||
# XXX - this is somewhat fragile - if mt.exe fails, distutils
|
||||
# will still consider the DLL up-to-date, but it will not have a
|
||||
# manifest. Maybe we should link to a temp file? OTOH, that
|
||||
# implies a build environment error that shouldn't go undetected.
|
||||
mfinfo = self.manifest_get_embed_info(target_desc, ld_args)
|
||||
if mfinfo is not None:
|
||||
mffilename, mfid = mfinfo
|
||||
out_arg = '-outputresource:%s;%s' % (output_filename, mfid)
|
||||
try:
|
||||
self.spawn(['mt.exe', '-nologo', '-manifest',
|
||||
mffilename, out_arg])
|
||||
except DistutilsExecError as msg:
|
||||
raise LinkError(msg)
|
||||
else:
|
||||
log.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
def manifest_setup_ldargs(self, output_filename, build_temp, ld_args):
|
||||
# If we need a manifest at all, an embedded manifest is recommended.
|
||||
# See MSDN article titled
|
||||
# "How to: Embed a Manifest Inside a C/C++ Application"
|
||||
# (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
|
||||
# Ask the linker to generate the manifest in the temp dir, so
|
||||
# we can check it, and possibly embed it, later.
|
||||
temp_manifest = os.path.join(
|
||||
build_temp,
|
||||
os.path.basename(output_filename) + ".manifest")
|
||||
ld_args.append('/MANIFESTFILE:' + temp_manifest)
|
||||
|
||||
def manifest_get_embed_info(self, target_desc, ld_args):
|
||||
# If a manifest should be embedded, return a tuple of
|
||||
# (manifest_filename, resource_id). Returns None if no manifest
|
||||
# should be embedded. See http://bugs.python.org/issue7833 for why
|
||||
# we want to avoid any manifest for extension modules if we can)
|
||||
for arg in ld_args:
|
||||
if arg.startswith("/MANIFESTFILE:"):
|
||||
temp_manifest = arg.split(":", 1)[1]
|
||||
break
|
||||
else:
|
||||
# no /MANIFESTFILE so nothing to do.
|
||||
return None
|
||||
if target_desc == CCompiler.EXECUTABLE:
|
||||
# by default, executables always get the manifest with the
|
||||
# CRT referenced.
|
||||
mfid = 1
|
||||
else:
|
||||
# Extension modules try and avoid any manifest if possible.
|
||||
mfid = 2
|
||||
temp_manifest = self._remove_visual_c_ref(temp_manifest)
|
||||
if temp_manifest is None:
|
||||
return None
|
||||
return temp_manifest, mfid
|
||||
|
||||
def _remove_visual_c_ref(self, manifest_file):
|
||||
try:
|
||||
# Remove references to the Visual C runtime, so they will
|
||||
# fall through to the Visual C dependency of Python.exe.
|
||||
# This way, when installed for a restricted user (e.g.
|
||||
# runtimes are not in WinSxS folder, but in Python's own
|
||||
# folder), the runtimes do not need to be in every folder
|
||||
# with .pyd's.
|
||||
# Returns either the filename of the modified manifest or
|
||||
# None if no manifest should be embedded.
|
||||
manifest_f = open(manifest_file)
|
||||
try:
|
||||
manifest_buf = manifest_f.read()
|
||||
finally:
|
||||
manifest_f.close()
|
||||
pattern = re.compile(
|
||||
r"""<assemblyIdentity.*?name=("|')Microsoft\."""\
|
||||
r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""",
|
||||
re.DOTALL)
|
||||
manifest_buf = re.sub(pattern, "", manifest_buf)
|
||||
pattern = r"<dependentAssembly>\s*</dependentAssembly>"
|
||||
manifest_buf = re.sub(pattern, "", manifest_buf)
|
||||
# Now see if any other assemblies are referenced - if not, we
|
||||
# don't want a manifest embedded.
|
||||
pattern = re.compile(
|
||||
r"""<assemblyIdentity.*?name=(?:"|')(.+?)(?:"|')"""
|
||||
r""".*?(?:/>|</assemblyIdentity>)""", re.DOTALL)
|
||||
if re.search(pattern, manifest_buf) is None:
|
||||
return None
|
||||
|
||||
manifest_f = open(manifest_file, 'w')
|
||||
try:
|
||||
manifest_f.write(manifest_buf)
|
||||
return manifest_file
|
||||
finally:
|
||||
manifest_f.close()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
# These are all used by the 'gen_lib_options() function, in
|
||||
# ccompiler.py.
|
||||
|
||||
def library_dir_option(self, dir):
|
||||
return "/LIBPATH:" + dir
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
raise DistutilsPlatformError(
|
||||
"don't know how to set runtime library search path for MSVC++")
|
||||
|
||||
def library_option(self, lib):
|
||||
return self.library_filename(lib)
|
||||
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=0):
|
||||
# Prefer a debugging library if found (and requested), but deal
|
||||
# with it if we don't have one.
|
||||
if debug:
|
||||
try_names = [lib + "_d", lib]
|
||||
else:
|
||||
try_names = [lib]
|
||||
for dir in dirs:
|
||||
for name in try_names:
|
||||
libfile = os.path.join(dir, self.library_filename (name))
|
||||
if os.path.exists(libfile):
|
||||
return libfile
|
||||
else:
|
||||
# Oops, didn't find it in *any* of 'dirs'
|
||||
return None
|
||||
|
||||
# Helper methods for using the MSVC registry settings
|
||||
|
||||
def find_exe(self, exe):
|
||||
"""Return path to an MSVC executable program.
|
||||
|
||||
Tries to find the program in several places: first, one of the
|
||||
MSVC program search paths from the registry; next, the directories
|
||||
in the PATH environment variable. If any of those work, return an
|
||||
absolute path that is known to exist. If none of them work, just
|
||||
return the original program name, 'exe'.
|
||||
"""
|
||||
for p in self.__paths:
|
||||
fn = os.path.join(os.path.abspath(p), exe)
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
|
||||
# didn't find it; try existing path
|
||||
for p in os.environ['Path'].split(';'):
|
||||
fn = os.path.join(os.path.abspath(p),exe)
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
|
||||
return exe
|
@ -0,0 +1,643 @@
|
||||
"""distutils.msvccompiler
|
||||
|
||||
Contains MSVCCompiler, an implementation of the abstract CCompiler class
|
||||
for the Microsoft Visual Studio.
|
||||
"""
|
||||
|
||||
# Written by Perry Stoll
|
||||
# hacked by Robin Becker and Thomas Heller to do a better job of
|
||||
# finding DevStudio (through the registry)
|
||||
|
||||
import sys, os
|
||||
from distutils.errors import \
|
||||
DistutilsExecError, DistutilsPlatformError, \
|
||||
CompileError, LibError, LinkError
|
||||
from distutils.ccompiler import \
|
||||
CCompiler, gen_lib_options
|
||||
from distutils import log
|
||||
|
||||
_can_read_reg = False
|
||||
try:
|
||||
import winreg
|
||||
|
||||
_can_read_reg = True
|
||||
hkey_mod = winreg
|
||||
|
||||
RegOpenKeyEx = winreg.OpenKeyEx
|
||||
RegEnumKey = winreg.EnumKey
|
||||
RegEnumValue = winreg.EnumValue
|
||||
RegError = winreg.error
|
||||
|
||||
except ImportError:
|
||||
try:
|
||||
import win32api
|
||||
import win32con
|
||||
_can_read_reg = True
|
||||
hkey_mod = win32con
|
||||
|
||||
RegOpenKeyEx = win32api.RegOpenKeyEx
|
||||
RegEnumKey = win32api.RegEnumKey
|
||||
RegEnumValue = win32api.RegEnumValue
|
||||
RegError = win32api.error
|
||||
except ImportError:
|
||||
log.info("Warning: Can't read registry to find the "
|
||||
"necessary compiler setting\n"
|
||||
"Make sure that Python modules winreg, "
|
||||
"win32api or win32con are installed.")
|
||||
pass
|
||||
|
||||
if _can_read_reg:
|
||||
HKEYS = (hkey_mod.HKEY_USERS,
|
||||
hkey_mod.HKEY_CURRENT_USER,
|
||||
hkey_mod.HKEY_LOCAL_MACHINE,
|
||||
hkey_mod.HKEY_CLASSES_ROOT)
|
||||
|
||||
def read_keys(base, key):
|
||||
"""Return list of registry keys."""
|
||||
try:
|
||||
handle = RegOpenKeyEx(base, key)
|
||||
except RegError:
|
||||
return None
|
||||
L = []
|
||||
i = 0
|
||||
while True:
|
||||
try:
|
||||
k = RegEnumKey(handle, i)
|
||||
except RegError:
|
||||
break
|
||||
L.append(k)
|
||||
i += 1
|
||||
return L
|
||||
|
||||
def read_values(base, key):
|
||||
"""Return dict of registry keys and values.
|
||||
|
||||
All names are converted to lowercase.
|
||||
"""
|
||||
try:
|
||||
handle = RegOpenKeyEx(base, key)
|
||||
except RegError:
|
||||
return None
|
||||
d = {}
|
||||
i = 0
|
||||
while True:
|
||||
try:
|
||||
name, value, type = RegEnumValue(handle, i)
|
||||
except RegError:
|
||||
break
|
||||
name = name.lower()
|
||||
d[convert_mbcs(name)] = convert_mbcs(value)
|
||||
i += 1
|
||||
return d
|
||||
|
||||
def convert_mbcs(s):
|
||||
dec = getattr(s, "decode", None)
|
||||
if dec is not None:
|
||||
try:
|
||||
s = dec("mbcs")
|
||||
except UnicodeError:
|
||||
pass
|
||||
return s
|
||||
|
||||
class MacroExpander:
|
||||
def __init__(self, version):
|
||||
self.macros = {}
|
||||
self.load_macros(version)
|
||||
|
||||
def set_macro(self, macro, path, key):
|
||||
for base in HKEYS:
|
||||
d = read_values(base, path)
|
||||
if d:
|
||||
self.macros["$(%s)" % macro] = d[key]
|
||||
break
|
||||
|
||||
def load_macros(self, version):
|
||||
vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
|
||||
self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
|
||||
self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
|
||||
net = r"Software\Microsoft\.NETFramework"
|
||||
self.set_macro("FrameworkDir", net, "installroot")
|
||||
try:
|
||||
if version > 7.0:
|
||||
self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
|
||||
else:
|
||||
self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
|
||||
except KeyError as exc: #
|
||||
raise DistutilsPlatformError(
|
||||
"""Python was built with Visual Studio 2003;
|
||||
extensions must be built with a compiler than can generate compatible binaries.
|
||||
Visual Studio 2003 was not found on this system. If you have Cygwin installed,
|
||||
you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
|
||||
|
||||
p = r"Software\Microsoft\NET Framework Setup\Product"
|
||||
for base in HKEYS:
|
||||
try:
|
||||
h = RegOpenKeyEx(base, p)
|
||||
except RegError:
|
||||
continue
|
||||
key = RegEnumKey(h, 0)
|
||||
d = read_values(base, r"%s\%s" % (p, key))
|
||||
self.macros["$(FrameworkVersion)"] = d["version"]
|
||||
|
||||
def sub(self, s):
|
||||
for k, v in self.macros.items():
|
||||
s = s.replace(k, v)
|
||||
return s
|
||||
|
||||
def get_build_version():
|
||||
"""Return the version of MSVC that was used to build Python.
|
||||
|
||||
For Python 2.3 and up, the version number is included in
|
||||
sys.version. For earlier versions, assume the compiler is MSVC 6.
|
||||
"""
|
||||
prefix = "MSC v."
|
||||
i = sys.version.find(prefix)
|
||||
if i == -1:
|
||||
return 6
|
||||
i = i + len(prefix)
|
||||
s, rest = sys.version[i:].split(" ", 1)
|
||||
majorVersion = int(s[:-2]) - 6
|
||||
if majorVersion >= 13:
|
||||
# v13 was skipped and should be v14
|
||||
majorVersion += 1
|
||||
minorVersion = int(s[2:3]) / 10.0
|
||||
# I don't think paths are affected by minor version in version 6
|
||||
if majorVersion == 6:
|
||||
minorVersion = 0
|
||||
if majorVersion >= 6:
|
||||
return majorVersion + minorVersion
|
||||
# else we don't know what version of the compiler this is
|
||||
return None
|
||||
|
||||
def get_build_architecture():
|
||||
"""Return the processor architecture.
|
||||
|
||||
Possible results are "Intel" or "AMD64".
|
||||
"""
|
||||
|
||||
prefix = " bit ("
|
||||
i = sys.version.find(prefix)
|
||||
if i == -1:
|
||||
return "Intel"
|
||||
j = sys.version.find(")", i)
|
||||
return sys.version[i+len(prefix):j]
|
||||
|
||||
def normalize_and_reduce_paths(paths):
|
||||
"""Return a list of normalized paths with duplicates removed.
|
||||
|
||||
The current order of paths is maintained.
|
||||
"""
|
||||
# Paths are normalized so things like: /a and /a/ aren't both preserved.
|
||||
reduced_paths = []
|
||||
for p in paths:
|
||||
np = os.path.normpath(p)
|
||||
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
|
||||
if np not in reduced_paths:
|
||||
reduced_paths.append(np)
|
||||
return reduced_paths
|
||||
|
||||
|
||||
class MSVCCompiler(CCompiler) :
|
||||
"""Concrete class that implements an interface to Microsoft Visual C++,
|
||||
as defined by the CCompiler abstract class."""
|
||||
|
||||
compiler_type = 'msvc'
|
||||
|
||||
# Just set this so CCompiler's constructor doesn't barf. We currently
|
||||
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
||||
# as it really isn't necessary for this sort of single-compiler class.
|
||||
# Would be nice to have a consistent interface with UnixCCompiler,
|
||||
# though, so it's worth thinking about.
|
||||
executables = {}
|
||||
|
||||
# Private class data (need to distinguish C from C++ source for compiler)
|
||||
_c_extensions = ['.c']
|
||||
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
||||
_rc_extensions = ['.rc']
|
||||
_mc_extensions = ['.mc']
|
||||
|
||||
# Needed for the filename generation methods provided by the
|
||||
# base class, CCompiler.
|
||||
src_extensions = (_c_extensions + _cpp_extensions +
|
||||
_rc_extensions + _mc_extensions)
|
||||
res_extension = '.res'
|
||||
obj_extension = '.obj'
|
||||
static_lib_extension = '.lib'
|
||||
shared_lib_extension = '.dll'
|
||||
static_lib_format = shared_lib_format = '%s%s'
|
||||
exe_extension = '.exe'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=0, force=0):
|
||||
CCompiler.__init__ (self, verbose, dry_run, force)
|
||||
self.__version = get_build_version()
|
||||
self.__arch = get_build_architecture()
|
||||
if self.__arch == "Intel":
|
||||
# x86
|
||||
if self.__version >= 7:
|
||||
self.__root = r"Software\Microsoft\VisualStudio"
|
||||
self.__macros = MacroExpander(self.__version)
|
||||
else:
|
||||
self.__root = r"Software\Microsoft\Devstudio"
|
||||
self.__product = "Visual Studio version %s" % self.__version
|
||||
else:
|
||||
# Win64. Assume this was built with the platform SDK
|
||||
self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
|
||||
|
||||
self.initialized = False
|
||||
|
||||
def initialize(self):
|
||||
self.__paths = []
|
||||
if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
|
||||
# Assume that the SDK set up everything alright; don't try to be
|
||||
# smarter
|
||||
self.cc = "cl.exe"
|
||||
self.linker = "link.exe"
|
||||
self.lib = "lib.exe"
|
||||
self.rc = "rc.exe"
|
||||
self.mc = "mc.exe"
|
||||
else:
|
||||
self.__paths = self.get_msvc_paths("path")
|
||||
|
||||
if len(self.__paths) == 0:
|
||||
raise DistutilsPlatformError("Python was built with %s, "
|
||||
"and extensions need to be built with the same "
|
||||
"version of the compiler, but it isn't installed."
|
||||
% self.__product)
|
||||
|
||||
self.cc = self.find_exe("cl.exe")
|
||||
self.linker = self.find_exe("link.exe")
|
||||
self.lib = self.find_exe("lib.exe")
|
||||
self.rc = self.find_exe("rc.exe") # resource compiler
|
||||
self.mc = self.find_exe("mc.exe") # message compiler
|
||||
self.set_path_env_var('lib')
|
||||
self.set_path_env_var('include')
|
||||
|
||||
# extend the MSVC path with the current path
|
||||
try:
|
||||
for p in os.environ['path'].split(';'):
|
||||
self.__paths.append(p)
|
||||
except KeyError:
|
||||
pass
|
||||
self.__paths = normalize_and_reduce_paths(self.__paths)
|
||||
os.environ['path'] = ";".join(self.__paths)
|
||||
|
||||
self.preprocess_options = None
|
||||
if self.__arch == "Intel":
|
||||
self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GX' ,
|
||||
'/DNDEBUG']
|
||||
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
|
||||
'/Z7', '/D_DEBUG']
|
||||
else:
|
||||
# Win64
|
||||
self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' ,
|
||||
'/DNDEBUG']
|
||||
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
|
||||
'/Z7', '/D_DEBUG']
|
||||
|
||||
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
|
||||
if self.__version >= 7:
|
||||
self.ldflags_shared_debug = [
|
||||
'/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
|
||||
]
|
||||
else:
|
||||
self.ldflags_shared_debug = [
|
||||
'/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
|
||||
]
|
||||
self.ldflags_static = [ '/nologo']
|
||||
|
||||
self.initialized = True
|
||||
|
||||
# -- Worker methods ------------------------------------------------
|
||||
|
||||
def object_filenames(self,
|
||||
source_filenames,
|
||||
strip_dir=0,
|
||||
output_dir=''):
|
||||
# Copied from ccompiler.py, extended to return .res as 'object'-file
|
||||
# for .rc input file
|
||||
if output_dir is None: output_dir = ''
|
||||
obj_names = []
|
||||
for src_name in source_filenames:
|
||||
(base, ext) = os.path.splitext (src_name)
|
||||
base = os.path.splitdrive(base)[1] # Chop off the drive
|
||||
base = base[os.path.isabs(base):] # If abs, chop off leading /
|
||||
if ext not in self.src_extensions:
|
||||
# Better to raise an exception instead of silently continuing
|
||||
# and later complain about sources and targets having
|
||||
# different lengths
|
||||
raise CompileError ("Don't know how to compile %s" % src_name)
|
||||
if strip_dir:
|
||||
base = os.path.basename (base)
|
||||
if ext in self._rc_extensions:
|
||||
obj_names.append (os.path.join (output_dir,
|
||||
base + self.res_extension))
|
||||
elif ext in self._mc_extensions:
|
||||
obj_names.append (os.path.join (output_dir,
|
||||
base + self.res_extension))
|
||||
else:
|
||||
obj_names.append (os.path.join (output_dir,
|
||||
base + self.obj_extension))
|
||||
return obj_names
|
||||
|
||||
|
||||
def compile(self, sources,
|
||||
output_dir=None, macros=None, include_dirs=None, debug=0,
|
||||
extra_preargs=None, extra_postargs=None, depends=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
compile_info = self._setup_compile(output_dir, macros, include_dirs,
|
||||
sources, depends, extra_postargs)
|
||||
macros, objects, extra_postargs, pp_opts, build = compile_info
|
||||
|
||||
compile_opts = extra_preargs or []
|
||||
compile_opts.append ('/c')
|
||||
if debug:
|
||||
compile_opts.extend(self.compile_options_debug)
|
||||
else:
|
||||
compile_opts.extend(self.compile_options)
|
||||
|
||||
for obj in objects:
|
||||
try:
|
||||
src, ext = build[obj]
|
||||
except KeyError:
|
||||
continue
|
||||
if debug:
|
||||
# pass the full pathname to MSVC in debug mode,
|
||||
# this allows the debugger to find the source file
|
||||
# without asking the user to browse for it
|
||||
src = os.path.abspath(src)
|
||||
|
||||
if ext in self._c_extensions:
|
||||
input_opt = "/Tc" + src
|
||||
elif ext in self._cpp_extensions:
|
||||
input_opt = "/Tp" + src
|
||||
elif ext in self._rc_extensions:
|
||||
# compile .RC to .RES file
|
||||
input_opt = src
|
||||
output_opt = "/fo" + obj
|
||||
try:
|
||||
self.spawn([self.rc] + pp_opts +
|
||||
[output_opt] + [input_opt])
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
continue
|
||||
elif ext in self._mc_extensions:
|
||||
# Compile .MC to .RC file to .RES file.
|
||||
# * '-h dir' specifies the directory for the
|
||||
# generated include file
|
||||
# * '-r dir' specifies the target directory of the
|
||||
# generated RC file and the binary message resource
|
||||
# it includes
|
||||
#
|
||||
# For now (since there are no options to change this),
|
||||
# we use the source-directory for the include file and
|
||||
# the build directory for the RC file and message
|
||||
# resources. This works at least for win32all.
|
||||
h_dir = os.path.dirname(src)
|
||||
rc_dir = os.path.dirname(obj)
|
||||
try:
|
||||
# first compile .MC to .RC and .H file
|
||||
self.spawn([self.mc] +
|
||||
['-h', h_dir, '-r', rc_dir] + [src])
|
||||
base, _ = os.path.splitext (os.path.basename (src))
|
||||
rc_file = os.path.join (rc_dir, base + '.rc')
|
||||
# then compile .RC to .RES file
|
||||
self.spawn([self.rc] +
|
||||
["/fo" + obj] + [rc_file])
|
||||
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
continue
|
||||
else:
|
||||
# how to handle this file?
|
||||
raise CompileError("Don't know how to compile %s to %s"
|
||||
% (src, obj))
|
||||
|
||||
output_opt = "/Fo" + obj
|
||||
try:
|
||||
self.spawn([self.cc] + compile_opts + pp_opts +
|
||||
[input_opt, output_opt] +
|
||||
extra_postargs)
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
return objects
|
||||
|
||||
|
||||
def create_static_lib(self,
|
||||
objects,
|
||||
output_libname,
|
||||
output_dir=None,
|
||||
debug=0,
|
||||
target_lang=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
||||
output_filename = self.library_filename(output_libname,
|
||||
output_dir=output_dir)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
lib_args = objects + ['/OUT:' + output_filename]
|
||||
if debug:
|
||||
pass # XXX what goes here?
|
||||
try:
|
||||
self.spawn([self.lib] + lib_args)
|
||||
except DistutilsExecError as msg:
|
||||
raise LibError(msg)
|
||||
else:
|
||||
log.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
|
||||
def link(self,
|
||||
target_desc,
|
||||
objects,
|
||||
output_filename,
|
||||
output_dir=None,
|
||||
libraries=None,
|
||||
library_dirs=None,
|
||||
runtime_library_dirs=None,
|
||||
export_symbols=None,
|
||||
debug=0,
|
||||
extra_preargs=None,
|
||||
extra_postargs=None,
|
||||
build_temp=None,
|
||||
target_lang=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
(objects, output_dir) = self._fix_object_args(objects, output_dir)
|
||||
fixed_args = self._fix_lib_args(libraries, library_dirs,
|
||||
runtime_library_dirs)
|
||||
(libraries, library_dirs, runtime_library_dirs) = fixed_args
|
||||
|
||||
if runtime_library_dirs:
|
||||
self.warn ("I don't know what to do with 'runtime_library_dirs': "
|
||||
+ str (runtime_library_dirs))
|
||||
|
||||
lib_opts = gen_lib_options(self,
|
||||
library_dirs, runtime_library_dirs,
|
||||
libraries)
|
||||
if output_dir is not None:
|
||||
output_filename = os.path.join(output_dir, output_filename)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
if target_desc == CCompiler.EXECUTABLE:
|
||||
if debug:
|
||||
ldflags = self.ldflags_shared_debug[1:]
|
||||
else:
|
||||
ldflags = self.ldflags_shared[1:]
|
||||
else:
|
||||
if debug:
|
||||
ldflags = self.ldflags_shared_debug
|
||||
else:
|
||||
ldflags = self.ldflags_shared
|
||||
|
||||
export_opts = []
|
||||
for sym in (export_symbols or []):
|
||||
export_opts.append("/EXPORT:" + sym)
|
||||
|
||||
ld_args = (ldflags + lib_opts + export_opts +
|
||||
objects + ['/OUT:' + output_filename])
|
||||
|
||||
# The MSVC linker generates .lib and .exp files, which cannot be
|
||||
# suppressed by any linker switches. The .lib files may even be
|
||||
# needed! Make sure they are generated in the temporary build
|
||||
# directory. Since they have different names for debug and release
|
||||
# builds, they can go into the same directory.
|
||||
if export_symbols is not None:
|
||||
(dll_name, dll_ext) = os.path.splitext(
|
||||
os.path.basename(output_filename))
|
||||
implib_file = os.path.join(
|
||||
os.path.dirname(objects[0]),
|
||||
self.library_filename(dll_name))
|
||||
ld_args.append ('/IMPLIB:' + implib_file)
|
||||
|
||||
if extra_preargs:
|
||||
ld_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
ld_args.extend(extra_postargs)
|
||||
|
||||
self.mkpath(os.path.dirname(output_filename))
|
||||
try:
|
||||
self.spawn([self.linker] + ld_args)
|
||||
except DistutilsExecError as msg:
|
||||
raise LinkError(msg)
|
||||
|
||||
else:
|
||||
log.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
# These are all used by the 'gen_lib_options() function, in
|
||||
# ccompiler.py.
|
||||
|
||||
def library_dir_option(self, dir):
|
||||
return "/LIBPATH:" + dir
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
raise DistutilsPlatformError(
|
||||
"don't know how to set runtime library search path for MSVC++")
|
||||
|
||||
def library_option(self, lib):
|
||||
return self.library_filename(lib)
|
||||
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=0):
|
||||
# Prefer a debugging library if found (and requested), but deal
|
||||
# with it if we don't have one.
|
||||
if debug:
|
||||
try_names = [lib + "_d", lib]
|
||||
else:
|
||||
try_names = [lib]
|
||||
for dir in dirs:
|
||||
for name in try_names:
|
||||
libfile = os.path.join(dir, self.library_filename (name))
|
||||
if os.path.exists(libfile):
|
||||
return libfile
|
||||
else:
|
||||
# Oops, didn't find it in *any* of 'dirs'
|
||||
return None
|
||||
|
||||
# Helper methods for using the MSVC registry settings
|
||||
|
||||
def find_exe(self, exe):
|
||||
"""Return path to an MSVC executable program.
|
||||
|
||||
Tries to find the program in several places: first, one of the
|
||||
MSVC program search paths from the registry; next, the directories
|
||||
in the PATH environment variable. If any of those work, return an
|
||||
absolute path that is known to exist. If none of them work, just
|
||||
return the original program name, 'exe'.
|
||||
"""
|
||||
for p in self.__paths:
|
||||
fn = os.path.join(os.path.abspath(p), exe)
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
|
||||
# didn't find it; try existing path
|
||||
for p in os.environ['Path'].split(';'):
|
||||
fn = os.path.join(os.path.abspath(p),exe)
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
|
||||
return exe
|
||||
|
||||
def get_msvc_paths(self, path, platform='x86'):
|
||||
"""Get a list of devstudio directories (include, lib or path).
|
||||
|
||||
Return a list of strings. The list will be empty if unable to
|
||||
access the registry or appropriate registry keys not found.
|
||||
"""
|
||||
if not _can_read_reg:
|
||||
return []
|
||||
|
||||
path = path + " dirs"
|
||||
if self.__version >= 7:
|
||||
key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
|
||||
% (self.__root, self.__version))
|
||||
else:
|
||||
key = (r"%s\6.0\Build System\Components\Platforms"
|
||||
r"\Win32 (%s)\Directories" % (self.__root, platform))
|
||||
|
||||
for base in HKEYS:
|
||||
d = read_values(base, key)
|
||||
if d:
|
||||
if self.__version >= 7:
|
||||
return self.__macros.sub(d[path]).split(";")
|
||||
else:
|
||||
return d[path].split(";")
|
||||
# MSVC 6 seems to create the registry entries we need only when
|
||||
# the GUI is run.
|
||||
if self.__version == 6:
|
||||
for base in HKEYS:
|
||||
if read_values(base, r"%s\6.0" % self.__root) is not None:
|
||||
self.warn("It seems you have Visual Studio 6 installed, "
|
||||
"but the expected registry settings are not present.\n"
|
||||
"You must at least run the Visual Studio GUI once "
|
||||
"so that these entries are created.")
|
||||
break
|
||||
return []
|
||||
|
||||
def set_path_env_var(self, name):
|
||||
"""Set environment variable 'name' to an MSVC path type value.
|
||||
|
||||
This is equivalent to a SET command prior to execution of spawned
|
||||
commands.
|
||||
"""
|
||||
|
||||
if name == "lib":
|
||||
p = self.get_msvc_paths("library")
|
||||
else:
|
||||
p = self.get_msvc_paths(name)
|
||||
if p:
|
||||
os.environ[name] = ';'.join(p)
|
||||
|
||||
|
||||
if get_build_version() >= 8.0:
|
||||
log.debug("Importing new compiler from distutils.msvc9compiler")
|
||||
OldMSVCCompiler = MSVCCompiler
|
||||
from distutils.msvc9compiler import MSVCCompiler
|
||||
# get_build_architecture not really relevant now we support cross-compile
|
||||
from distutils.msvc9compiler import MacroExpander
|
@ -0,0 +1,19 @@
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def __optim_args_from_interpreter_flags():
|
||||
"""Return a list of command-line arguments reproducing the current
|
||||
optimization settings in sys.flags."""
|
||||
args = []
|
||||
value = sys.flags.optimize
|
||||
if value > 0:
|
||||
args.append("-" + "O" * value)
|
||||
return args
|
||||
|
||||
|
||||
_optim_args_from_interpreter_flags = getattr(
|
||||
subprocess,
|
||||
"_optim_args_from_interpreter_flags",
|
||||
__optim_args_from_interpreter_flags,
|
||||
)
|
@ -0,0 +1,7 @@
|
||||
def aix_platform(osname, version, release):
|
||||
try:
|
||||
import _aix_support
|
||||
return _aix_support.aix_platform()
|
||||
except ImportError:
|
||||
pass
|
||||
return "%s-%s.%s" % (osname, version, release)
|
@ -0,0 +1,106 @@
|
||||
"""distutils.spawn
|
||||
|
||||
Provides the 'spawn()' function, a front-end to various platform-
|
||||
specific functions for launching another program in a sub-process.
|
||||
Also provides the 'find_executable()' to search the path for a given
|
||||
executable name.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from distutils.errors import DistutilsPlatformError, DistutilsExecError
|
||||
from distutils.debug import DEBUG
|
||||
from distutils import log
|
||||
|
||||
|
||||
def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None):
|
||||
"""Run another program, specified as a command list 'cmd', in a new process.
|
||||
|
||||
'cmd' is just the argument list for the new process, ie.
|
||||
cmd[0] is the program to run and cmd[1:] are the rest of its arguments.
|
||||
There is no way to run a program with a name different from that of its
|
||||
executable.
|
||||
|
||||
If 'search_path' is true (the default), the system's executable
|
||||
search path will be used to find the program; otherwise, cmd[0]
|
||||
must be the exact path to the executable. If 'dry_run' is true,
|
||||
the command will not actually be run.
|
||||
|
||||
Raise DistutilsExecError if running the program fails in any way; just
|
||||
return on success.
|
||||
"""
|
||||
# cmd is documented as a list, but just in case some code passes a tuple
|
||||
# in, protect our %-formatting code against horrible death
|
||||
cmd = list(cmd)
|
||||
|
||||
log.info(subprocess.list2cmdline(cmd))
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
if search_path:
|
||||
executable = find_executable(cmd[0])
|
||||
if executable is not None:
|
||||
cmd[0] = executable
|
||||
|
||||
env = env if env is not None else dict(os.environ)
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver
|
||||
macosx_target_ver = get_macosx_target_ver()
|
||||
if macosx_target_ver:
|
||||
env[MACOSX_VERSION_VAR] = macosx_target_ver
|
||||
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, env=env)
|
||||
proc.wait()
|
||||
exitcode = proc.returncode
|
||||
except OSError as exc:
|
||||
if not DEBUG:
|
||||
cmd = cmd[0]
|
||||
raise DistutilsExecError(
|
||||
"command %r failed: %s" % (cmd, exc.args[-1])) from exc
|
||||
|
||||
if exitcode:
|
||||
if not DEBUG:
|
||||
cmd = cmd[0]
|
||||
raise DistutilsExecError(
|
||||
"command %r failed with exit code %s" % (cmd, exitcode))
|
||||
|
||||
|
||||
def find_executable(executable, path=None):
|
||||
"""Tries to find 'executable' in the directories listed in 'path'.
|
||||
|
||||
A string listing directories separated by 'os.pathsep'; defaults to
|
||||
os.environ['PATH']. Returns the complete filename or None if not found.
|
||||
"""
|
||||
_, ext = os.path.splitext(executable)
|
||||
if (sys.platform == 'win32') and (ext != '.exe'):
|
||||
executable = executable + '.exe'
|
||||
|
||||
if os.path.isfile(executable):
|
||||
return executable
|
||||
|
||||
if path is None:
|
||||
path = os.environ.get('PATH', None)
|
||||
if path is None:
|
||||
try:
|
||||
path = os.confstr("CS_PATH")
|
||||
except (AttributeError, ValueError):
|
||||
# os.confstr() or CS_PATH is not available
|
||||
path = os.defpath
|
||||
# bpo-35755: Don't use os.defpath if the PATH environment variable is
|
||||
# set to an empty string
|
||||
|
||||
# PATH='' doesn't match, whereas PATH=':' looks in the current directory
|
||||
if not path:
|
||||
return None
|
||||
|
||||
paths = path.split(os.pathsep)
|
||||
for p in paths:
|
||||
f = os.path.join(p, executable)
|
||||
if os.path.isfile(f):
|
||||
# the file exists, we have a shot at spawn working
|
||||
return f
|
||||
return None
|
@ -0,0 +1,567 @@
|
||||
"""Provide access to Python's configuration information. The specific
|
||||
configuration variables available depend heavily on the platform and
|
||||
configuration. The values may be retrieved using
|
||||
get_config_var(name), and the list of variables is available via
|
||||
get_config_vars().keys(). Additional convenience functions are also
|
||||
available.
|
||||
|
||||
Written by: Fred L. Drake, Jr.
|
||||
Email: <fdrake@acm.org>
|
||||
"""
|
||||
|
||||
import _imp
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
|
||||
from .errors import DistutilsPlatformError
|
||||
|
||||
IS_PYPY = '__pypy__' in sys.builtin_module_names
|
||||
|
||||
# These are needed in a couple of spots, so just compute them once.
|
||||
PREFIX = os.path.normpath(sys.prefix)
|
||||
EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
|
||||
BASE_PREFIX = os.path.normpath(sys.base_prefix)
|
||||
BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix)
|
||||
|
||||
# Path to the base directory of the project. On Windows the binary may
|
||||
# live in project/PCbuild/win32 or project/PCbuild/amd64.
|
||||
# set for cross builds
|
||||
if "_PYTHON_PROJECT_BASE" in os.environ:
|
||||
project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"])
|
||||
else:
|
||||
if sys.executable:
|
||||
project_base = os.path.dirname(os.path.abspath(sys.executable))
|
||||
else:
|
||||
# sys.executable can be empty if argv[0] has been changed and Python is
|
||||
# unable to retrieve the real program name
|
||||
project_base = os.getcwd()
|
||||
|
||||
|
||||
# python_build: (Boolean) if true, we're either building Python or
|
||||
# building an extension with an un-installed Python, so we use
|
||||
# different (hard-wired) directories.
|
||||
def _is_python_source_dir(d):
|
||||
for fn in ("Setup", "Setup.local"):
|
||||
if os.path.isfile(os.path.join(d, "Modules", fn)):
|
||||
return True
|
||||
return False
|
||||
|
||||
_sys_home = getattr(sys, '_home', None)
|
||||
|
||||
if os.name == 'nt':
|
||||
def _fix_pcbuild(d):
|
||||
if d and os.path.normcase(d).startswith(
|
||||
os.path.normcase(os.path.join(PREFIX, "PCbuild"))):
|
||||
return PREFIX
|
||||
return d
|
||||
project_base = _fix_pcbuild(project_base)
|
||||
_sys_home = _fix_pcbuild(_sys_home)
|
||||
|
||||
def _python_build():
|
||||
if _sys_home:
|
||||
return _is_python_source_dir(_sys_home)
|
||||
return _is_python_source_dir(project_base)
|
||||
|
||||
python_build = _python_build()
|
||||
|
||||
|
||||
# Calculate the build qualifier flags if they are defined. Adding the flags
|
||||
# to the include and lib directories only makes sense for an installation, not
|
||||
# an in-source build.
|
||||
build_flags = ''
|
||||
try:
|
||||
if not python_build:
|
||||
build_flags = sys.abiflags
|
||||
except AttributeError:
|
||||
# It's not a configure-based build, so the sys module doesn't have
|
||||
# this attribute, which is fine.
|
||||
pass
|
||||
|
||||
def get_python_version():
|
||||
"""Return a string containing the major and minor Python version,
|
||||
leaving off the patchlevel. Sample return values could be '1.5'
|
||||
or '2.2'.
|
||||
"""
|
||||
return '%d.%d' % sys.version_info[:2]
|
||||
|
||||
|
||||
def get_python_inc(plat_specific=0, prefix=None):
|
||||
"""Return the directory containing installed Python header files.
|
||||
|
||||
If 'plat_specific' is false (the default), this is the path to the
|
||||
non-platform-specific header files, i.e. Python.h and so on;
|
||||
otherwise, this is the path to platform-specific header files
|
||||
(namely pyconfig.h).
|
||||
|
||||
If 'prefix' is supplied, use it instead of sys.base_prefix or
|
||||
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
|
||||
"""
|
||||
if prefix is None:
|
||||
prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
|
||||
if os.name == "posix":
|
||||
if IS_PYPY and sys.version_info < (3, 8):
|
||||
return os.path.join(prefix, 'include')
|
||||
if python_build:
|
||||
# Assume the executable is in the build directory. The
|
||||
# pyconfig.h file should be in the same directory. Since
|
||||
# the build directory may not be the source directory, we
|
||||
# must use "srcdir" from the makefile to find the "Include"
|
||||
# directory.
|
||||
if plat_specific:
|
||||
return _sys_home or project_base
|
||||
else:
|
||||
incdir = os.path.join(get_config_var('srcdir'), 'Include')
|
||||
return os.path.normpath(incdir)
|
||||
implementation = 'pypy' if IS_PYPY else 'python'
|
||||
python_dir = implementation + get_python_version() + build_flags
|
||||
return os.path.join(prefix, "include", python_dir)
|
||||
elif os.name == "nt":
|
||||
if python_build:
|
||||
# Include both the include and PC dir to ensure we can find
|
||||
# pyconfig.h
|
||||
return (os.path.join(prefix, "include") + os.path.pathsep +
|
||||
os.path.join(prefix, "PC"))
|
||||
return os.path.join(prefix, "include")
|
||||
else:
|
||||
raise DistutilsPlatformError(
|
||||
"I don't know where Python installs its C header files "
|
||||
"on platform '%s'" % os.name)
|
||||
|
||||
|
||||
# allow this behavior to be monkey-patched. Ref pypa/distutils#2.
|
||||
def _posix_lib(standard_lib, libpython, early_prefix, prefix):
|
||||
if standard_lib:
|
||||
return libpython
|
||||
else:
|
||||
return os.path.join(libpython, "site-packages")
|
||||
|
||||
|
||||
def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
|
||||
"""Return the directory containing the Python library (standard or
|
||||
site additions).
|
||||
|
||||
If 'plat_specific' is true, return the directory containing
|
||||
platform-specific modules, i.e. any module from a non-pure-Python
|
||||
module distribution; otherwise, return the platform-shared library
|
||||
directory. If 'standard_lib' is true, return the directory
|
||||
containing standard Python library modules; otherwise, return the
|
||||
directory for site-specific modules.
|
||||
|
||||
If 'prefix' is supplied, use it instead of sys.base_prefix or
|
||||
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
|
||||
"""
|
||||
|
||||
if IS_PYPY and sys.version_info < (3, 8):
|
||||
# PyPy-specific schema
|
||||
if prefix is None:
|
||||
prefix = PREFIX
|
||||
if standard_lib:
|
||||
return os.path.join(prefix, "lib-python", sys.version[0])
|
||||
return os.path.join(prefix, 'site-packages')
|
||||
|
||||
early_prefix = prefix
|
||||
|
||||
if prefix is None:
|
||||
if standard_lib:
|
||||
prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
|
||||
else:
|
||||
prefix = plat_specific and EXEC_PREFIX or PREFIX
|
||||
|
||||
if os.name == "posix":
|
||||
if plat_specific or standard_lib:
|
||||
# Platform-specific modules (any module from a non-pure-Python
|
||||
# module distribution) or standard Python library modules.
|
||||
libdir = getattr(sys, "platlibdir", "lib")
|
||||
else:
|
||||
# Pure Python
|
||||
libdir = "lib"
|
||||
implementation = 'pypy' if IS_PYPY else 'python'
|
||||
libpython = os.path.join(prefix, libdir,
|
||||
implementation + get_python_version())
|
||||
return _posix_lib(standard_lib, libpython, early_prefix, prefix)
|
||||
elif os.name == "nt":
|
||||
if standard_lib:
|
||||
return os.path.join(prefix, "Lib")
|
||||
else:
|
||||
return os.path.join(prefix, "Lib", "site-packages")
|
||||
else:
|
||||
raise DistutilsPlatformError(
|
||||
"I don't know where Python installs its library "
|
||||
"on platform '%s'" % os.name)
|
||||
|
||||
|
||||
|
||||
def customize_compiler(compiler):
|
||||
"""Do any platform-specific customization of a CCompiler instance.
|
||||
|
||||
Mainly needed on Unix, so we can plug in the information that
|
||||
varies across Unices and is stored in Python's Makefile.
|
||||
"""
|
||||
if compiler.compiler_type == "unix":
|
||||
if sys.platform == "darwin":
|
||||
# Perform first-time customization of compiler-related
|
||||
# config vars on OS X now that we know we need a compiler.
|
||||
# This is primarily to support Pythons from binary
|
||||
# installers. The kind and paths to build tools on
|
||||
# the user system may vary significantly from the system
|
||||
# that Python itself was built on. Also the user OS
|
||||
# version and build tools may not support the same set
|
||||
# of CPU architectures for universal builds.
|
||||
global _config_vars
|
||||
# Use get_config_var() to ensure _config_vars is initialized.
|
||||
if not get_config_var('CUSTOMIZED_OSX_COMPILER'):
|
||||
import _osx_support
|
||||
_osx_support.customize_compiler(_config_vars)
|
||||
_config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
|
||||
|
||||
(cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
|
||||
get_config_vars('CC', 'CXX', 'CFLAGS',
|
||||
'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
|
||||
|
||||
if 'CC' in os.environ:
|
||||
newcc = os.environ['CC']
|
||||
if('LDSHARED' not in os.environ
|
||||
and ldshared.startswith(cc)):
|
||||
# If CC is overridden, use that as the default
|
||||
# command for LDSHARED as well
|
||||
ldshared = newcc + ldshared[len(cc):]
|
||||
cc = newcc
|
||||
if 'CXX' in os.environ:
|
||||
cxx = os.environ['CXX']
|
||||
if 'LDSHARED' in os.environ:
|
||||
ldshared = os.environ['LDSHARED']
|
||||
if 'CPP' in os.environ:
|
||||
cpp = os.environ['CPP']
|
||||
else:
|
||||
cpp = cc + " -E" # not always
|
||||
if 'LDFLAGS' in os.environ:
|
||||
ldshared = ldshared + ' ' + os.environ['LDFLAGS']
|
||||
if 'CFLAGS' in os.environ:
|
||||
cflags = cflags + ' ' + os.environ['CFLAGS']
|
||||
ldshared = ldshared + ' ' + os.environ['CFLAGS']
|
||||
if 'CPPFLAGS' in os.environ:
|
||||
cpp = cpp + ' ' + os.environ['CPPFLAGS']
|
||||
cflags = cflags + ' ' + os.environ['CPPFLAGS']
|
||||
ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
|
||||
if 'AR' in os.environ:
|
||||
ar = os.environ['AR']
|
||||
if 'ARFLAGS' in os.environ:
|
||||
archiver = ar + ' ' + os.environ['ARFLAGS']
|
||||
else:
|
||||
archiver = ar + ' ' + ar_flags
|
||||
|
||||
cc_cmd = cc + ' ' + cflags
|
||||
compiler.set_executables(
|
||||
preprocessor=cpp,
|
||||
compiler=cc_cmd,
|
||||
compiler_so=cc_cmd + ' ' + ccshared,
|
||||
compiler_cxx=cxx,
|
||||
linker_so=ldshared,
|
||||
linker_exe=cc,
|
||||
archiver=archiver)
|
||||
|
||||
if 'RANLIB' in os.environ and compiler.executables.get('ranlib', None):
|
||||
compiler.set_executables(ranlib=os.environ['RANLIB'])
|
||||
|
||||
compiler.shared_lib_extension = shlib_suffix
|
||||
|
||||
|
||||
def get_config_h_filename():
|
||||
"""Return full pathname of installed pyconfig.h file."""
|
||||
if python_build:
|
||||
if os.name == "nt":
|
||||
inc_dir = os.path.join(_sys_home or project_base, "PC")
|
||||
else:
|
||||
inc_dir = _sys_home or project_base
|
||||
return os.path.join(inc_dir, 'pyconfig.h')
|
||||
else:
|
||||
return sysconfig.get_config_h_filename()
|
||||
|
||||
|
||||
|
||||
def get_makefile_filename():
|
||||
"""Return full pathname of installed Makefile from the Python build."""
|
||||
return sysconfig.get_makefile_filename()
|
||||
|
||||
|
||||
def parse_config_h(fp, g=None):
|
||||
"""Parse a config.h-style file.
|
||||
|
||||
A dictionary containing name/value pairs is returned. If an
|
||||
optional dictionary is passed in as the second argument, it is
|
||||
used instead of a new dictionary.
|
||||
"""
|
||||
return sysconfig.parse_config_h(fp, vars=g)
|
||||
|
||||
|
||||
# Regexes needed for parsing Makefile (and similar syntaxes,
|
||||
# like old-style Setup files).
|
||||
_variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
|
||||
_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
|
||||
_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
|
||||
|
||||
def parse_makefile(fn, g=None):
|
||||
"""Parse a Makefile-style file.
|
||||
|
||||
A dictionary containing name/value pairs is returned. If an
|
||||
optional dictionary is passed in as the second argument, it is
|
||||
used instead of a new dictionary.
|
||||
"""
|
||||
from distutils.text_file import TextFile
|
||||
fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape")
|
||||
|
||||
if g is None:
|
||||
g = {}
|
||||
done = {}
|
||||
notdone = {}
|
||||
|
||||
while True:
|
||||
line = fp.readline()
|
||||
if line is None: # eof
|
||||
break
|
||||
m = _variable_rx.match(line)
|
||||
if m:
|
||||
n, v = m.group(1, 2)
|
||||
v = v.strip()
|
||||
# `$$' is a literal `$' in make
|
||||
tmpv = v.replace('$$', '')
|
||||
|
||||
if "$" in tmpv:
|
||||
notdone[n] = v
|
||||
else:
|
||||
try:
|
||||
v = int(v)
|
||||
except ValueError:
|
||||
# insert literal `$'
|
||||
done[n] = v.replace('$$', '$')
|
||||
else:
|
||||
done[n] = v
|
||||
|
||||
# Variables with a 'PY_' prefix in the makefile. These need to
|
||||
# be made available without that prefix through sysconfig.
|
||||
# Special care is needed to ensure that variable expansion works, even
|
||||
# if the expansion uses the name without a prefix.
|
||||
renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
|
||||
|
||||
# do variable interpolation here
|
||||
while notdone:
|
||||
for name in list(notdone):
|
||||
value = notdone[name]
|
||||
m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
|
||||
if m:
|
||||
n = m.group(1)
|
||||
found = True
|
||||
if n in done:
|
||||
item = str(done[n])
|
||||
elif n in notdone:
|
||||
# get it on a subsequent round
|
||||
found = False
|
||||
elif n in os.environ:
|
||||
# do it like make: fall back to environment
|
||||
item = os.environ[n]
|
||||
|
||||
elif n in renamed_variables:
|
||||
if name.startswith('PY_') and name[3:] in renamed_variables:
|
||||
item = ""
|
||||
|
||||
elif 'PY_' + n in notdone:
|
||||
found = False
|
||||
|
||||
else:
|
||||
item = str(done['PY_' + n])
|
||||
else:
|
||||
done[n] = item = ""
|
||||
if found:
|
||||
after = value[m.end():]
|
||||
value = value[:m.start()] + item + after
|
||||
if "$" in after:
|
||||
notdone[name] = value
|
||||
else:
|
||||
try: value = int(value)
|
||||
except ValueError:
|
||||
done[name] = value.strip()
|
||||
else:
|
||||
done[name] = value
|
||||
del notdone[name]
|
||||
|
||||
if name.startswith('PY_') \
|
||||
and name[3:] in renamed_variables:
|
||||
|
||||
name = name[3:]
|
||||
if name not in done:
|
||||
done[name] = value
|
||||
else:
|
||||
# bogus variable reference; just drop it since we can't deal
|
||||
del notdone[name]
|
||||
|
||||
fp.close()
|
||||
|
||||
# strip spurious spaces
|
||||
for k, v in done.items():
|
||||
if isinstance(v, str):
|
||||
done[k] = v.strip()
|
||||
|
||||
# save the results in the global dictionary
|
||||
g.update(done)
|
||||
return g
|
||||
|
||||
|
||||
def expand_makefile_vars(s, vars):
|
||||
"""Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
|
||||
'string' according to 'vars' (a dictionary mapping variable names to
|
||||
values). Variables not present in 'vars' are silently expanded to the
|
||||
empty string. The variable values in 'vars' should not contain further
|
||||
variable expansions; if 'vars' is the output of 'parse_makefile()',
|
||||
you're fine. Returns a variable-expanded version of 's'.
|
||||
"""
|
||||
|
||||
# This algorithm does multiple expansion, so if vars['foo'] contains
|
||||
# "${bar}", it will expand ${foo} to ${bar}, and then expand
|
||||
# ${bar}... and so forth. This is fine as long as 'vars' comes from
|
||||
# 'parse_makefile()', which takes care of such expansions eagerly,
|
||||
# according to make's variable expansion semantics.
|
||||
|
||||
while True:
|
||||
m = _findvar1_rx.search(s) or _findvar2_rx.search(s)
|
||||
if m:
|
||||
(beg, end) = m.span()
|
||||
s = s[0:beg] + vars.get(m.group(1)) + s[end:]
|
||||
else:
|
||||
break
|
||||
return s
|
||||
|
||||
|
||||
_config_vars = None
|
||||
|
||||
|
||||
_sysconfig_name_tmpl = '_sysconfigdata_{abi}_{platform}_{multiarch}'
|
||||
|
||||
|
||||
def _init_posix():
|
||||
"""Initialize the module as appropriate for POSIX systems."""
|
||||
# _sysconfigdata is generated at build time, see the sysconfig module
|
||||
name = os.environ.get(
|
||||
'_PYTHON_SYSCONFIGDATA_NAME',
|
||||
_sysconfig_name_tmpl.format(
|
||||
abi=sys.abiflags,
|
||||
platform=sys.platform,
|
||||
multiarch=getattr(sys.implementation, '_multiarch', ''),
|
||||
),
|
||||
)
|
||||
try:
|
||||
_temp = __import__(name, globals(), locals(), ['build_time_vars'], 0)
|
||||
except ImportError:
|
||||
# Python 3.5 and pypy 7.3.1
|
||||
_temp = __import__(
|
||||
'_sysconfigdata', globals(), locals(), ['build_time_vars'], 0)
|
||||
build_time_vars = _temp.build_time_vars
|
||||
global _config_vars
|
||||
_config_vars = {}
|
||||
_config_vars.update(build_time_vars)
|
||||
|
||||
|
||||
def _init_nt():
|
||||
"""Initialize the module as appropriate for NT"""
|
||||
g = {}
|
||||
# set basic install directories
|
||||
g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
|
||||
g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
|
||||
|
||||
# XXX hmmm.. a normal install puts include files here
|
||||
g['INCLUDEPY'] = get_python_inc(plat_specific=0)
|
||||
|
||||
g['EXT_SUFFIX'] = _imp.extension_suffixes()[0]
|
||||
g['EXE'] = ".exe"
|
||||
g['VERSION'] = get_python_version().replace(".", "")
|
||||
g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable))
|
||||
|
||||
global _config_vars
|
||||
_config_vars = g
|
||||
|
||||
|
||||
def get_config_vars(*args):
|
||||
"""With no arguments, return a dictionary of all configuration
|
||||
variables relevant for the current platform. Generally this includes
|
||||
everything needed to build extensions and install both pure modules and
|
||||
extensions. On Unix, this means every variable defined in Python's
|
||||
installed Makefile; on Windows it's a much smaller set.
|
||||
|
||||
With arguments, return a list of values that result from looking up
|
||||
each argument in the configuration variable dictionary.
|
||||
"""
|
||||
global _config_vars
|
||||
if _config_vars is None:
|
||||
func = globals().get("_init_" + os.name)
|
||||
if func:
|
||||
func()
|
||||
else:
|
||||
_config_vars = {}
|
||||
|
||||
# Normalized versions of prefix and exec_prefix are handy to have;
|
||||
# in fact, these are the standard versions used most places in the
|
||||
# Distutils.
|
||||
_config_vars['prefix'] = PREFIX
|
||||
_config_vars['exec_prefix'] = EXEC_PREFIX
|
||||
|
||||
if not IS_PYPY:
|
||||
# For backward compatibility, see issue19555
|
||||
SO = _config_vars.get('EXT_SUFFIX')
|
||||
if SO is not None:
|
||||
_config_vars['SO'] = SO
|
||||
|
||||
# Always convert srcdir to an absolute path
|
||||
srcdir = _config_vars.get('srcdir', project_base)
|
||||
if os.name == 'posix':
|
||||
if python_build:
|
||||
# If srcdir is a relative path (typically '.' or '..')
|
||||
# then it should be interpreted relative to the directory
|
||||
# containing Makefile.
|
||||
base = os.path.dirname(get_makefile_filename())
|
||||
srcdir = os.path.join(base, srcdir)
|
||||
else:
|
||||
# srcdir is not meaningful since the installation is
|
||||
# spread about the filesystem. We choose the
|
||||
# directory containing the Makefile since we know it
|
||||
# exists.
|
||||
srcdir = os.path.dirname(get_makefile_filename())
|
||||
_config_vars['srcdir'] = os.path.abspath(os.path.normpath(srcdir))
|
||||
|
||||
# Convert srcdir into an absolute path if it appears necessary.
|
||||
# Normally it is relative to the build directory. However, during
|
||||
# testing, for example, we might be running a non-installed python
|
||||
# from a different directory.
|
||||
if python_build and os.name == "posix":
|
||||
base = project_base
|
||||
if (not os.path.isabs(_config_vars['srcdir']) and
|
||||
base != os.getcwd()):
|
||||
# srcdir is relative and we are not in the same directory
|
||||
# as the executable. Assume executable is in the build
|
||||
# directory and make srcdir absolute.
|
||||
srcdir = os.path.join(base, _config_vars['srcdir'])
|
||||
_config_vars['srcdir'] = os.path.normpath(srcdir)
|
||||
|
||||
# OS X platforms require special customization to handle
|
||||
# multi-architecture, multi-os-version installers
|
||||
if sys.platform == 'darwin':
|
||||
import _osx_support
|
||||
_osx_support.customize_config_vars(_config_vars)
|
||||
|
||||
if args:
|
||||
vals = []
|
||||
for name in args:
|
||||
vals.append(_config_vars.get(name))
|
||||
return vals
|
||||
else:
|
||||
return _config_vars
|
||||
|
||||
def get_config_var(name):
|
||||
"""Return the value of a single variable using the dictionary
|
||||
returned by 'get_config_vars()'. Equivalent to
|
||||
get_config_vars().get(name)
|
||||
"""
|
||||
if name == 'SO':
|
||||
import warnings
|
||||
warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2)
|
||||
return get_config_vars().get(name)
|
@ -0,0 +1,286 @@
|
||||
"""text_file
|
||||
|
||||
provides the TextFile class, which gives an interface to text files
|
||||
that (optionally) takes care of stripping comments, ignoring blank
|
||||
lines, and joining lines with backslashes."""
|
||||
|
||||
import sys, io
|
||||
|
||||
|
||||
class TextFile:
|
||||
"""Provides a file-like object that takes care of all the things you
|
||||
commonly want to do when processing a text file that has some
|
||||
line-by-line syntax: strip comments (as long as "#" is your
|
||||
comment character), skip blank lines, join adjacent lines by
|
||||
escaping the newline (ie. backslash at end of line), strip
|
||||
leading and/or trailing whitespace. All of these are optional
|
||||
and independently controllable.
|
||||
|
||||
Provides a 'warn()' method so you can generate warning messages that
|
||||
report physical line number, even if the logical line in question
|
||||
spans multiple physical lines. Also provides 'unreadline()' for
|
||||
implementing line-at-a-time lookahead.
|
||||
|
||||
Constructor is called as:
|
||||
|
||||
TextFile (filename=None, file=None, **options)
|
||||
|
||||
It bombs (RuntimeError) if both 'filename' and 'file' are None;
|
||||
'filename' should be a string, and 'file' a file object (or
|
||||
something that provides 'readline()' and 'close()' methods). It is
|
||||
recommended that you supply at least 'filename', so that TextFile
|
||||
can include it in warning messages. If 'file' is not supplied,
|
||||
TextFile creates its own using 'io.open()'.
|
||||
|
||||
The options are all boolean, and affect the value returned by
|
||||
'readline()':
|
||||
strip_comments [default: true]
|
||||
strip from "#" to end-of-line, as well as any whitespace
|
||||
leading up to the "#" -- unless it is escaped by a backslash
|
||||
lstrip_ws [default: false]
|
||||
strip leading whitespace from each line before returning it
|
||||
rstrip_ws [default: true]
|
||||
strip trailing whitespace (including line terminator!) from
|
||||
each line before returning it
|
||||
skip_blanks [default: true}
|
||||
skip lines that are empty *after* stripping comments and
|
||||
whitespace. (If both lstrip_ws and rstrip_ws are false,
|
||||
then some lines may consist of solely whitespace: these will
|
||||
*not* be skipped, even if 'skip_blanks' is true.)
|
||||
join_lines [default: false]
|
||||
if a backslash is the last non-newline character on a line
|
||||
after stripping comments and whitespace, join the following line
|
||||
to it to form one "logical line"; if N consecutive lines end
|
||||
with a backslash, then N+1 physical lines will be joined to
|
||||
form one logical line.
|
||||
collapse_join [default: false]
|
||||
strip leading whitespace from lines that are joined to their
|
||||
predecessor; only matters if (join_lines and not lstrip_ws)
|
||||
errors [default: 'strict']
|
||||
error handler used to decode the file content
|
||||
|
||||
Note that since 'rstrip_ws' can strip the trailing newline, the
|
||||
semantics of 'readline()' must differ from those of the builtin file
|
||||
object's 'readline()' method! In particular, 'readline()' returns
|
||||
None for end-of-file: an empty string might just be a blank line (or
|
||||
an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is
|
||||
not."""
|
||||
|
||||
default_options = { 'strip_comments': 1,
|
||||
'skip_blanks': 1,
|
||||
'lstrip_ws': 0,
|
||||
'rstrip_ws': 1,
|
||||
'join_lines': 0,
|
||||
'collapse_join': 0,
|
||||
'errors': 'strict',
|
||||
}
|
||||
|
||||
def __init__(self, filename=None, file=None, **options):
|
||||
"""Construct a new TextFile object. At least one of 'filename'
|
||||
(a string) and 'file' (a file-like object) must be supplied.
|
||||
They keyword argument options are described above and affect
|
||||
the values returned by 'readline()'."""
|
||||
if filename is None and file is None:
|
||||
raise RuntimeError("you must supply either or both of 'filename' and 'file'")
|
||||
|
||||
# set values for all options -- either from client option hash
|
||||
# or fallback to default_options
|
||||
for opt in self.default_options.keys():
|
||||
if opt in options:
|
||||
setattr(self, opt, options[opt])
|
||||
else:
|
||||
setattr(self, opt, self.default_options[opt])
|
||||
|
||||
# sanity check client option hash
|
||||
for opt in options.keys():
|
||||
if opt not in self.default_options:
|
||||
raise KeyError("invalid TextFile option '%s'" % opt)
|
||||
|
||||
if file is None:
|
||||
self.open(filename)
|
||||
else:
|
||||
self.filename = filename
|
||||
self.file = file
|
||||
self.current_line = 0 # assuming that file is at BOF!
|
||||
|
||||
# 'linebuf' is a stack of lines that will be emptied before we
|
||||
# actually read from the file; it's only populated by an
|
||||
# 'unreadline()' operation
|
||||
self.linebuf = []
|
||||
|
||||
def open(self, filename):
|
||||
"""Open a new file named 'filename'. This overrides both the
|
||||
'filename' and 'file' arguments to the constructor."""
|
||||
self.filename = filename
|
||||
self.file = io.open(self.filename, 'r', errors=self.errors)
|
||||
self.current_line = 0
|
||||
|
||||
def close(self):
|
||||
"""Close the current file and forget everything we know about it
|
||||
(filename, current line number)."""
|
||||
file = self.file
|
||||
self.file = None
|
||||
self.filename = None
|
||||
self.current_line = None
|
||||
file.close()
|
||||
|
||||
def gen_error(self, msg, line=None):
|
||||
outmsg = []
|
||||
if line is None:
|
||||
line = self.current_line
|
||||
outmsg.append(self.filename + ", ")
|
||||
if isinstance(line, (list, tuple)):
|
||||
outmsg.append("lines %d-%d: " % tuple(line))
|
||||
else:
|
||||
outmsg.append("line %d: " % line)
|
||||
outmsg.append(str(msg))
|
||||
return "".join(outmsg)
|
||||
|
||||
def error(self, msg, line=None):
|
||||
raise ValueError("error: " + self.gen_error(msg, line))
|
||||
|
||||
def warn(self, msg, line=None):
|
||||
"""Print (to stderr) a warning message tied to the current logical
|
||||
line in the current file. If the current logical line in the
|
||||
file spans multiple physical lines, the warning refers to the
|
||||
whole range, eg. "lines 3-5". If 'line' supplied, it overrides
|
||||
the current line number; it may be a list or tuple to indicate a
|
||||
range of physical lines, or an integer for a single physical
|
||||
line."""
|
||||
sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n")
|
||||
|
||||
def readline(self):
|
||||
"""Read and return a single logical line from the current file (or
|
||||
from an internal buffer if lines have previously been "unread"
|
||||
with 'unreadline()'). If the 'join_lines' option is true, this
|
||||
may involve reading multiple physical lines concatenated into a
|
||||
single string. Updates the current line number, so calling
|
||||
'warn()' after 'readline()' emits a warning about the physical
|
||||
line(s) just read. Returns None on end-of-file, since the empty
|
||||
string can occur if 'rstrip_ws' is true but 'strip_blanks' is
|
||||
not."""
|
||||
# If any "unread" lines waiting in 'linebuf', return the top
|
||||
# one. (We don't actually buffer read-ahead data -- lines only
|
||||
# get put in 'linebuf' if the client explicitly does an
|
||||
# 'unreadline()'.
|
||||
if self.linebuf:
|
||||
line = self.linebuf[-1]
|
||||
del self.linebuf[-1]
|
||||
return line
|
||||
|
||||
buildup_line = ''
|
||||
|
||||
while True:
|
||||
# read the line, make it None if EOF
|
||||
line = self.file.readline()
|
||||
if line == '':
|
||||
line = None
|
||||
|
||||
if self.strip_comments and line:
|
||||
|
||||
# Look for the first "#" in the line. If none, never
|
||||
# mind. If we find one and it's the first character, or
|
||||
# is not preceded by "\", then it starts a comment --
|
||||
# strip the comment, strip whitespace before it, and
|
||||
# carry on. Otherwise, it's just an escaped "#", so
|
||||
# unescape it (and any other escaped "#"'s that might be
|
||||
# lurking in there) and otherwise leave the line alone.
|
||||
|
||||
pos = line.find("#")
|
||||
if pos == -1: # no "#" -- no comments
|
||||
pass
|
||||
|
||||
# It's definitely a comment -- either "#" is the first
|
||||
# character, or it's elsewhere and unescaped.
|
||||
elif pos == 0 or line[pos-1] != "\\":
|
||||
# Have to preserve the trailing newline, because it's
|
||||
# the job of a later step (rstrip_ws) to remove it --
|
||||
# and if rstrip_ws is false, we'd better preserve it!
|
||||
# (NB. this means that if the final line is all comment
|
||||
# and has no trailing newline, we will think that it's
|
||||
# EOF; I think that's OK.)
|
||||
eol = (line[-1] == '\n') and '\n' or ''
|
||||
line = line[0:pos] + eol
|
||||
|
||||
# If all that's left is whitespace, then skip line
|
||||
# *now*, before we try to join it to 'buildup_line' --
|
||||
# that way constructs like
|
||||
# hello \\
|
||||
# # comment that should be ignored
|
||||
# there
|
||||
# result in "hello there".
|
||||
if line.strip() == "":
|
||||
continue
|
||||
else: # it's an escaped "#"
|
||||
line = line.replace("\\#", "#")
|
||||
|
||||
# did previous line end with a backslash? then accumulate
|
||||
if self.join_lines and buildup_line:
|
||||
# oops: end of file
|
||||
if line is None:
|
||||
self.warn("continuation line immediately precedes "
|
||||
"end-of-file")
|
||||
return buildup_line
|
||||
|
||||
if self.collapse_join:
|
||||
line = line.lstrip()
|
||||
line = buildup_line + line
|
||||
|
||||
# careful: pay attention to line number when incrementing it
|
||||
if isinstance(self.current_line, list):
|
||||
self.current_line[1] = self.current_line[1] + 1
|
||||
else:
|
||||
self.current_line = [self.current_line,
|
||||
self.current_line + 1]
|
||||
# just an ordinary line, read it as usual
|
||||
else:
|
||||
if line is None: # eof
|
||||
return None
|
||||
|
||||
# still have to be careful about incrementing the line number!
|
||||
if isinstance(self.current_line, list):
|
||||
self.current_line = self.current_line[1] + 1
|
||||
else:
|
||||
self.current_line = self.current_line + 1
|
||||
|
||||
# strip whitespace however the client wants (leading and
|
||||
# trailing, or one or the other, or neither)
|
||||
if self.lstrip_ws and self.rstrip_ws:
|
||||
line = line.strip()
|
||||
elif self.lstrip_ws:
|
||||
line = line.lstrip()
|
||||
elif self.rstrip_ws:
|
||||
line = line.rstrip()
|
||||
|
||||
# blank line (whether we rstrip'ed or not)? skip to next line
|
||||
# if appropriate
|
||||
if (line == '' or line == '\n') and self.skip_blanks:
|
||||
continue
|
||||
|
||||
if self.join_lines:
|
||||
if line[-1] == '\\':
|
||||
buildup_line = line[:-1]
|
||||
continue
|
||||
|
||||
if line[-2:] == '\\\n':
|
||||
buildup_line = line[0:-2] + '\n'
|
||||
continue
|
||||
|
||||
# well, I guess there's some actual content there: return it
|
||||
return line
|
||||
|
||||
def readlines(self):
|
||||
"""Read and return the list of all logical lines remaining in the
|
||||
current file."""
|
||||
lines = []
|
||||
while True:
|
||||
line = self.readline()
|
||||
if line is None:
|
||||
return lines
|
||||
lines.append(line)
|
||||
|
||||
def unreadline(self, line):
|
||||
"""Push 'line' (a string) onto an internal buffer that will be
|
||||
checked by future 'readline()' calls. Handy for implementing
|
||||
a parser with line-at-a-time lookahead."""
|
||||
self.linebuf.append(line)
|
@ -0,0 +1,325 @@
|
||||
"""distutils.unixccompiler
|
||||
|
||||
Contains the UnixCCompiler class, a subclass of CCompiler that handles
|
||||
the "typical" Unix-style command-line C compiler:
|
||||
* macros defined with -Dname[=value]
|
||||
* macros undefined with -Uname
|
||||
* include search directories specified with -Idir
|
||||
* libraries specified with -lllib
|
||||
* library search directories specified with -Ldir
|
||||
* compile handled by 'cc' (or similar) executable with -c option:
|
||||
compiles .c to .o
|
||||
* link static library handled by 'ar' command (possibly with 'ranlib')
|
||||
* link shared library handled by 'cc -shared'
|
||||
"""
|
||||
|
||||
import os, sys, re, shlex
|
||||
|
||||
from distutils import sysconfig
|
||||
from distutils.dep_util import newer
|
||||
from distutils.ccompiler import \
|
||||
CCompiler, gen_preprocess_options, gen_lib_options
|
||||
from distutils.errors import \
|
||||
DistutilsExecError, CompileError, LibError, LinkError
|
||||
from distutils import log
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
import _osx_support
|
||||
|
||||
# XXX Things not currently handled:
|
||||
# * optimization/debug/warning flags; we just use whatever's in Python's
|
||||
# Makefile and live with it. Is this adequate? If not, we might
|
||||
# have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
|
||||
# SunCCompiler, and I suspect down that road lies madness.
|
||||
# * even if we don't know a warning flag from an optimization flag,
|
||||
# we need some way for outsiders to feed preprocessor/compiler/linker
|
||||
# flags in to us -- eg. a sysadmin might want to mandate certain flags
|
||||
# via a site config file, or a user might want to set something for
|
||||
# compiling this module distribution only via the setup.py command
|
||||
# line, whatever. As long as these options come from something on the
|
||||
# current system, they can be as system-dependent as they like, and we
|
||||
# should just happily stuff them into the preprocessor/compiler/linker
|
||||
# options and carry on.
|
||||
|
||||
|
||||
class UnixCCompiler(CCompiler):
|
||||
|
||||
compiler_type = 'unix'
|
||||
|
||||
# These are used by CCompiler in two places: the constructor sets
|
||||
# instance attributes 'preprocessor', 'compiler', etc. from them, and
|
||||
# 'set_executable()' allows any of these to be set. The defaults here
|
||||
# are pretty generic; they will probably have to be set by an outsider
|
||||
# (eg. using information discovered by the sysconfig about building
|
||||
# Python extensions).
|
||||
executables = {'preprocessor' : None,
|
||||
'compiler' : ["cc"],
|
||||
'compiler_so' : ["cc"],
|
||||
'compiler_cxx' : ["cc"],
|
||||
'linker_so' : ["cc", "-shared"],
|
||||
'linker_exe' : ["cc"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : None,
|
||||
}
|
||||
|
||||
if sys.platform[:6] == "darwin":
|
||||
executables['ranlib'] = ["ranlib"]
|
||||
|
||||
# Needed for the filename generation methods provided by the base
|
||||
# class, CCompiler. NB. whoever instantiates/uses a particular
|
||||
# UnixCCompiler instance should set 'shared_lib_ext' -- we set a
|
||||
# reasonable common default here, but it's not necessarily used on all
|
||||
# Unices!
|
||||
|
||||
src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
|
||||
obj_extension = ".o"
|
||||
static_lib_extension = ".a"
|
||||
shared_lib_extension = ".so"
|
||||
dylib_lib_extension = ".dylib"
|
||||
xcode_stub_lib_extension = ".tbd"
|
||||
static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
|
||||
xcode_stub_lib_format = dylib_lib_format
|
||||
if sys.platform == "cygwin":
|
||||
exe_extension = ".exe"
|
||||
|
||||
def preprocess(self, source, output_file=None, macros=None,
|
||||
include_dirs=None, extra_preargs=None, extra_postargs=None):
|
||||
fixed_args = self._fix_compile_args(None, macros, include_dirs)
|
||||
ignore, macros, include_dirs = fixed_args
|
||||
pp_opts = gen_preprocess_options(macros, include_dirs)
|
||||
pp_args = self.preprocessor + pp_opts
|
||||
if output_file:
|
||||
pp_args.extend(['-o', output_file])
|
||||
if extra_preargs:
|
||||
pp_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
pp_args.extend(extra_postargs)
|
||||
pp_args.append(source)
|
||||
|
||||
# We need to preprocess: either we're being forced to, or we're
|
||||
# generating output to stdout, or there's a target output file and
|
||||
# the source file is newer than the target (or the target doesn't
|
||||
# exist).
|
||||
if self.force or output_file is None or newer(source, output_file):
|
||||
if output_file:
|
||||
self.mkpath(os.path.dirname(output_file))
|
||||
try:
|
||||
self.spawn(pp_args)
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
||||
compiler_so = self.compiler_so
|
||||
if sys.platform == 'darwin':
|
||||
compiler_so = _osx_support.compiler_fixup(compiler_so,
|
||||
cc_args + extra_postargs)
|
||||
try:
|
||||
self.spawn(compiler_so + cc_args + [src, '-o', obj] +
|
||||
extra_postargs)
|
||||
except DistutilsExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
def create_static_lib(self, objects, output_libname,
|
||||
output_dir=None, debug=0, target_lang=None):
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
|
||||
output_filename = \
|
||||
self.library_filename(output_libname, output_dir=output_dir)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
self.mkpath(os.path.dirname(output_filename))
|
||||
self.spawn(self.archiver +
|
||||
[output_filename] +
|
||||
objects + self.objects)
|
||||
|
||||
# Not many Unices required ranlib anymore -- SunOS 4.x is, I
|
||||
# think the only major Unix that does. Maybe we need some
|
||||
# platform intelligence here to skip ranlib if it's not
|
||||
# needed -- or maybe Python's configure script took care of
|
||||
# it for us, hence the check for leading colon.
|
||||
if self.ranlib:
|
||||
try:
|
||||
self.spawn(self.ranlib + [output_filename])
|
||||
except DistutilsExecError as msg:
|
||||
raise LibError(msg)
|
||||
else:
|
||||
log.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
def link(self, target_desc, objects,
|
||||
output_filename, output_dir=None, libraries=None,
|
||||
library_dirs=None, runtime_library_dirs=None,
|
||||
export_symbols=None, debug=0, extra_preargs=None,
|
||||
extra_postargs=None, build_temp=None, target_lang=None):
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
fixed_args = self._fix_lib_args(libraries, library_dirs,
|
||||
runtime_library_dirs)
|
||||
libraries, library_dirs, runtime_library_dirs = fixed_args
|
||||
|
||||
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
|
||||
libraries)
|
||||
if not isinstance(output_dir, (str, type(None))):
|
||||
raise TypeError("'output_dir' must be a string or None")
|
||||
if output_dir is not None:
|
||||
output_filename = os.path.join(output_dir, output_filename)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
ld_args = (objects + self.objects +
|
||||
lib_opts + ['-o', output_filename])
|
||||
if debug:
|
||||
ld_args[:0] = ['-g']
|
||||
if extra_preargs:
|
||||
ld_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
ld_args.extend(extra_postargs)
|
||||
self.mkpath(os.path.dirname(output_filename))
|
||||
try:
|
||||
if target_desc == CCompiler.EXECUTABLE:
|
||||
linker = self.linker_exe[:]
|
||||
else:
|
||||
linker = self.linker_so[:]
|
||||
if target_lang == "c++" and self.compiler_cxx:
|
||||
# skip over environment variable settings if /usr/bin/env
|
||||
# is used to set up the linker's environment.
|
||||
# This is needed on OSX. Note: this assumes that the
|
||||
# normal and C++ compiler have the same environment
|
||||
# settings.
|
||||
i = 0
|
||||
if os.path.basename(linker[0]) == "env":
|
||||
i = 1
|
||||
while '=' in linker[i]:
|
||||
i += 1
|
||||
|
||||
if os.path.basename(linker[i]) == 'ld_so_aix':
|
||||
# AIX platforms prefix the compiler with the ld_so_aix
|
||||
# script, so we need to adjust our linker index
|
||||
offset = 1
|
||||
else:
|
||||
offset = 0
|
||||
|
||||
linker[i+offset] = self.compiler_cxx[i]
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
linker = _osx_support.compiler_fixup(linker, ld_args)
|
||||
|
||||
self.spawn(linker + ld_args)
|
||||
except DistutilsExecError as msg:
|
||||
raise LinkError(msg)
|
||||
else:
|
||||
log.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
# These are all used by the 'gen_lib_options() function, in
|
||||
# ccompiler.py.
|
||||
|
||||
def library_dir_option(self, dir):
|
||||
return "-L" + dir
|
||||
|
||||
def _is_gcc(self, compiler_name):
|
||||
return "gcc" in compiler_name or "g++" in compiler_name
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
# XXX Hackish, at the very least. See Python bug #445902:
|
||||
# http://sourceforge.net/tracker/index.php
|
||||
# ?func=detail&aid=445902&group_id=5470&atid=105470
|
||||
# Linkers on different platforms need different options to
|
||||
# specify that directories need to be added to the list of
|
||||
# directories searched for dependencies when a dynamic library
|
||||
# is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to
|
||||
# be told to pass the -R option through to the linker, whereas
|
||||
# other compilers and gcc on other systems just know this.
|
||||
# Other compilers may need something slightly different. At
|
||||
# this time, there's no way to determine this information from
|
||||
# the configuration data stored in the Python installation, so
|
||||
# we use this hack.
|
||||
compiler = os.path.basename(shlex.split(sysconfig.get_config_var("CC"))[0])
|
||||
if sys.platform[:6] == "darwin":
|
||||
from distutils.util import get_macosx_target_ver, split_version
|
||||
macosx_target_ver = get_macosx_target_ver()
|
||||
if macosx_target_ver and split_version(macosx_target_ver) >= [10, 5]:
|
||||
return "-Wl,-rpath," + dir
|
||||
else: # no support for -rpath on earlier macOS versions
|
||||
return "-L" + dir
|
||||
elif sys.platform[:7] == "freebsd":
|
||||
return "-Wl,-rpath=" + dir
|
||||
elif sys.platform[:5] == "hp-ux":
|
||||
if self._is_gcc(compiler):
|
||||
return ["-Wl,+s", "-L" + dir]
|
||||
return ["+s", "-L" + dir]
|
||||
|
||||
# For all compilers, `-Wl` is the presumed way to
|
||||
# pass a compiler option to the linker and `-R` is
|
||||
# the way to pass an RPATH.
|
||||
if sysconfig.get_config_var("GNULD") == "yes":
|
||||
# GNU ld needs an extra option to get a RUNPATH
|
||||
# instead of just an RPATH.
|
||||
return "-Wl,--enable-new-dtags,-R" + dir
|
||||
else:
|
||||
return "-Wl,-R" + dir
|
||||
|
||||
def library_option(self, lib):
|
||||
return "-l" + lib
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=0):
|
||||
shared_f = self.library_filename(lib, lib_type='shared')
|
||||
dylib_f = self.library_filename(lib, lib_type='dylib')
|
||||
xcode_stub_f = self.library_filename(lib, lib_type='xcode_stub')
|
||||
static_f = self.library_filename(lib, lib_type='static')
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
# On OSX users can specify an alternate SDK using
|
||||
# '-isysroot', calculate the SDK root if it is specified
|
||||
# (and use it further on)
|
||||
#
|
||||
# Note that, as of Xcode 7, Apple SDKs may contain textual stub
|
||||
# libraries with .tbd extensions rather than the normal .dylib
|
||||
# shared libraries installed in /. The Apple compiler tool
|
||||
# chain handles this transparently but it can cause problems
|
||||
# for programs that are being built with an SDK and searching
|
||||
# for specific libraries. Callers of find_library_file need to
|
||||
# keep in mind that the base filename of the returned SDK library
|
||||
# file might have a different extension from that of the library
|
||||
# file installed on the running system, for example:
|
||||
# /Applications/Xcode.app/Contents/Developer/Platforms/
|
||||
# MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/
|
||||
# usr/lib/libedit.tbd
|
||||
# vs
|
||||
# /usr/lib/libedit.dylib
|
||||
cflags = sysconfig.get_config_var('CFLAGS')
|
||||
m = re.search(r'-isysroot\s*(\S+)', cflags)
|
||||
if m is None:
|
||||
sysroot = '/'
|
||||
else:
|
||||
sysroot = m.group(1)
|
||||
|
||||
|
||||
|
||||
for dir in dirs:
|
||||
shared = os.path.join(dir, shared_f)
|
||||
dylib = os.path.join(dir, dylib_f)
|
||||
static = os.path.join(dir, static_f)
|
||||
xcode_stub = os.path.join(dir, xcode_stub_f)
|
||||
|
||||
if sys.platform == 'darwin' and (
|
||||
dir.startswith('/System/') or (
|
||||
dir.startswith('/usr/') and not dir.startswith('/usr/local/'))):
|
||||
|
||||
shared = os.path.join(sysroot, dir[1:], shared_f)
|
||||
dylib = os.path.join(sysroot, dir[1:], dylib_f)
|
||||
static = os.path.join(sysroot, dir[1:], static_f)
|
||||
xcode_stub = os.path.join(sysroot, dir[1:], xcode_stub_f)
|
||||
|
||||
# We're second-guessing the linker here, with not much hard
|
||||
# data to go on: GCC seems to prefer the shared library, so I'm
|
||||
# assuming that *all* Unix C compilers do. And of course I'm
|
||||
# ignoring even GCC's "-static" option. So sue me.
|
||||
if os.path.exists(dylib):
|
||||
return dylib
|
||||
elif os.path.exists(xcode_stub):
|
||||
return xcode_stub
|
||||
elif os.path.exists(shared):
|
||||
return shared
|
||||
elif os.path.exists(static):
|
||||
return static
|
||||
|
||||
# Oops, didn't find it in *any* of 'dirs'
|
||||
return None
|
@ -0,0 +1,548 @@
|
||||
"""distutils.util
|
||||
|
||||
Miscellaneous utility functions -- anything that doesn't fit into
|
||||
one of the other *util.py modules.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import importlib.util
|
||||
import string
|
||||
import sys
|
||||
from distutils.errors import DistutilsPlatformError
|
||||
from distutils.dep_util import newer
|
||||
from distutils.spawn import spawn
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsByteCompileError
|
||||
from .py35compat import _optim_args_from_interpreter_flags
|
||||
|
||||
|
||||
def get_host_platform():
|
||||
"""Return a string that identifies the current platform. This is used mainly to
|
||||
distinguish platform-specific build directories and platform-specific built
|
||||
distributions. Typically includes the OS name and version and the
|
||||
architecture (as supplied by 'os.uname()'), although the exact information
|
||||
included depends on the OS; eg. on Linux, the kernel version isn't
|
||||
particularly important.
|
||||
|
||||
Examples of returned values:
|
||||
linux-i586
|
||||
linux-alpha (?)
|
||||
solaris-2.6-sun4u
|
||||
|
||||
Windows will return one of:
|
||||
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
|
||||
win32 (all others - specifically, sys.platform is returned)
|
||||
|
||||
For other non-POSIX platforms, currently just returns 'sys.platform'.
|
||||
|
||||
"""
|
||||
if os.name == 'nt':
|
||||
if 'amd64' in sys.version.lower():
|
||||
return 'win-amd64'
|
||||
if '(arm)' in sys.version.lower():
|
||||
return 'win-arm32'
|
||||
if '(arm64)' in sys.version.lower():
|
||||
return 'win-arm64'
|
||||
return sys.platform
|
||||
|
||||
# Set for cross builds explicitly
|
||||
if "_PYTHON_HOST_PLATFORM" in os.environ:
|
||||
return os.environ["_PYTHON_HOST_PLATFORM"]
|
||||
|
||||
if os.name != "posix" or not hasattr(os, 'uname'):
|
||||
# XXX what about the architecture? NT is Intel or Alpha,
|
||||
# Mac OS is M68k or PPC, etc.
|
||||
return sys.platform
|
||||
|
||||
# Try to distinguish various flavours of Unix
|
||||
|
||||
(osname, host, release, version, machine) = os.uname()
|
||||
|
||||
# Convert the OS name to lowercase, remove '/' characters, and translate
|
||||
# spaces (for "Power Macintosh")
|
||||
osname = osname.lower().replace('/', '')
|
||||
machine = machine.replace(' ', '_')
|
||||
machine = machine.replace('/', '-')
|
||||
|
||||
if osname[:5] == "linux":
|
||||
# At least on Linux/Intel, 'machine' is the processor --
|
||||
# i386, etc.
|
||||
# XXX what about Alpha, SPARC, etc?
|
||||
return "%s-%s" % (osname, machine)
|
||||
elif osname[:5] == "sunos":
|
||||
if release[0] >= "5": # SunOS 5 == Solaris 2
|
||||
osname = "solaris"
|
||||
release = "%d.%s" % (int(release[0]) - 3, release[2:])
|
||||
# We can't use "platform.architecture()[0]" because a
|
||||
# bootstrap problem. We use a dict to get an error
|
||||
# if some suspicious happens.
|
||||
bitness = {2147483647:"32bit", 9223372036854775807:"64bit"}
|
||||
machine += ".%s" % bitness[sys.maxsize]
|
||||
# fall through to standard osname-release-machine representation
|
||||
elif osname[:3] == "aix":
|
||||
from .py38compat import aix_platform
|
||||
return aix_platform(osname, version, release)
|
||||
elif osname[:6] == "cygwin":
|
||||
osname = "cygwin"
|
||||
rel_re = re.compile (r'[\d.]+', re.ASCII)
|
||||
m = rel_re.match(release)
|
||||
if m:
|
||||
release = m.group()
|
||||
elif osname[:6] == "darwin":
|
||||
import _osx_support, distutils.sysconfig
|
||||
osname, release, machine = _osx_support.get_platform_osx(
|
||||
distutils.sysconfig.get_config_vars(),
|
||||
osname, release, machine)
|
||||
|
||||
return "%s-%s-%s" % (osname, release, machine)
|
||||
|
||||
def get_platform():
|
||||
if os.name == 'nt':
|
||||
TARGET_TO_PLAT = {
|
||||
'x86' : 'win32',
|
||||
'x64' : 'win-amd64',
|
||||
'arm' : 'win-arm32',
|
||||
'arm64': 'win-arm64',
|
||||
}
|
||||
return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform()
|
||||
else:
|
||||
return get_host_platform()
|
||||
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
_syscfg_macosx_ver = None # cache the version pulled from sysconfig
|
||||
MACOSX_VERSION_VAR = 'MACOSX_DEPLOYMENT_TARGET'
|
||||
|
||||
def _clear_cached_macosx_ver():
|
||||
"""For testing only. Do not call."""
|
||||
global _syscfg_macosx_ver
|
||||
_syscfg_macosx_ver = None
|
||||
|
||||
def get_macosx_target_ver_from_syscfg():
|
||||
"""Get the version of macOS latched in the Python interpreter configuration.
|
||||
Returns the version as a string or None if can't obtain one. Cached."""
|
||||
global _syscfg_macosx_ver
|
||||
if _syscfg_macosx_ver is None:
|
||||
from distutils import sysconfig
|
||||
ver = sysconfig.get_config_var(MACOSX_VERSION_VAR) or ''
|
||||
if ver:
|
||||
_syscfg_macosx_ver = ver
|
||||
return _syscfg_macosx_ver
|
||||
|
||||
def get_macosx_target_ver():
|
||||
"""Return the version of macOS for which we are building.
|
||||
|
||||
The target version defaults to the version in sysconfig latched at time
|
||||
the Python interpreter was built, unless overridden by an environment
|
||||
variable. If neither source has a value, then None is returned"""
|
||||
|
||||
syscfg_ver = get_macosx_target_ver_from_syscfg()
|
||||
env_ver = os.environ.get(MACOSX_VERSION_VAR)
|
||||
|
||||
if env_ver:
|
||||
# Validate overridden version against sysconfig version, if have both.
|
||||
# Ensure that the deployment target of the build process is not less
|
||||
# than 10.3 if the interpreter was built for 10.3 or later. This
|
||||
# ensures extension modules are built with correct compatibility
|
||||
# values, specifically LDSHARED which can use
|
||||
# '-undefined dynamic_lookup' which only works on >= 10.3.
|
||||
if syscfg_ver and split_version(syscfg_ver) >= [10, 3] and \
|
||||
split_version(env_ver) < [10, 3]:
|
||||
my_msg = ('$' + MACOSX_VERSION_VAR + ' mismatch: '
|
||||
'now "%s" but "%s" during configure; '
|
||||
'must use 10.3 or later'
|
||||
% (env_ver, syscfg_ver))
|
||||
raise DistutilsPlatformError(my_msg)
|
||||
return env_ver
|
||||
return syscfg_ver
|
||||
|
||||
|
||||
def split_version(s):
|
||||
"""Convert a dot-separated string into a list of numbers for comparisons"""
|
||||
return [int(n) for n in s.split('.')]
|
||||
|
||||
|
||||
def convert_path (pathname):
|
||||
"""Return 'pathname' as a name that will work on the native filesystem,
|
||||
i.e. split it on '/' and put it back together again using the current
|
||||
directory separator. Needed because filenames in the setup script are
|
||||
always supplied in Unix style, and have to be converted to the local
|
||||
convention before we can actually use them in the filesystem. Raises
|
||||
ValueError on non-Unix-ish systems if 'pathname' either starts or
|
||||
ends with a slash.
|
||||
"""
|
||||
if os.sep == '/':
|
||||
return pathname
|
||||
if not pathname:
|
||||
return pathname
|
||||
if pathname[0] == '/':
|
||||
raise ValueError("path '%s' cannot be absolute" % pathname)
|
||||
if pathname[-1] == '/':
|
||||
raise ValueError("path '%s' cannot end with '/'" % pathname)
|
||||
|
||||
paths = pathname.split('/')
|
||||
while '.' in paths:
|
||||
paths.remove('.')
|
||||
if not paths:
|
||||
return os.curdir
|
||||
return os.path.join(*paths)
|
||||
|
||||
# convert_path ()
|
||||
|
||||
|
||||
def change_root (new_root, pathname):
|
||||
"""Return 'pathname' with 'new_root' prepended. If 'pathname' is
|
||||
relative, this is equivalent to "os.path.join(new_root,pathname)".
|
||||
Otherwise, it requires making 'pathname' relative and then joining the
|
||||
two, which is tricky on DOS/Windows and Mac OS.
|
||||
"""
|
||||
if os.name == 'posix':
|
||||
if not os.path.isabs(pathname):
|
||||
return os.path.join(new_root, pathname)
|
||||
else:
|
||||
return os.path.join(new_root, pathname[1:])
|
||||
|
||||
elif os.name == 'nt':
|
||||
(drive, path) = os.path.splitdrive(pathname)
|
||||
if path[0] == '\\':
|
||||
path = path[1:]
|
||||
return os.path.join(new_root, path)
|
||||
|
||||
else:
|
||||
raise DistutilsPlatformError("nothing known about platform '%s'" % os.name)
|
||||
|
||||
|
||||
_environ_checked = 0
|
||||
def check_environ ():
|
||||
"""Ensure that 'os.environ' has all the environment variables we
|
||||
guarantee that users can use in config files, command-line options,
|
||||
etc. Currently this includes:
|
||||
HOME - user's home directory (Unix only)
|
||||
PLAT - description of the current platform, including hardware
|
||||
and OS (see 'get_platform()')
|
||||
"""
|
||||
global _environ_checked
|
||||
if _environ_checked:
|
||||
return
|
||||
|
||||
if os.name == 'posix' and 'HOME' not in os.environ:
|
||||
try:
|
||||
import pwd
|
||||
os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
|
||||
except (ImportError, KeyError):
|
||||
# bpo-10496: if the current user identifier doesn't exist in the
|
||||
# password database, do nothing
|
||||
pass
|
||||
|
||||
if 'PLAT' not in os.environ:
|
||||
os.environ['PLAT'] = get_platform()
|
||||
|
||||
_environ_checked = 1
|
||||
|
||||
|
||||
def subst_vars (s, local_vars):
|
||||
"""
|
||||
Perform variable substitution on 'string'.
|
||||
Variables are indicated by format-style braces ("{var}").
|
||||
Variable is substituted by the value found in the 'local_vars'
|
||||
dictionary or in 'os.environ' if it's not in 'local_vars'.
|
||||
'os.environ' is first checked/augmented to guarantee that it contains
|
||||
certain values: see 'check_environ()'. Raise ValueError for any
|
||||
variables not found in either 'local_vars' or 'os.environ'.
|
||||
"""
|
||||
check_environ()
|
||||
lookup = dict(os.environ)
|
||||
lookup.update((name, str(value)) for name, value in local_vars.items())
|
||||
try:
|
||||
return _subst_compat(s).format_map(lookup)
|
||||
except KeyError as var:
|
||||
raise ValueError(f"invalid variable {var}")
|
||||
|
||||
# subst_vars ()
|
||||
|
||||
|
||||
def _subst_compat(s):
|
||||
"""
|
||||
Replace shell/Perl-style variable substitution with
|
||||
format-style. For compatibility.
|
||||
"""
|
||||
def _subst(match):
|
||||
return f'{{{match.group(1)}}}'
|
||||
repl = re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s)
|
||||
if repl != s:
|
||||
import warnings
|
||||
warnings.warn(
|
||||
"shell/Perl-style substitions are deprecated",
|
||||
DeprecationWarning,
|
||||
)
|
||||
return repl
|
||||
|
||||
|
||||
def grok_environment_error (exc, prefix="error: "):
|
||||
# Function kept for backward compatibility.
|
||||
# Used to try clever things with EnvironmentErrors,
|
||||
# but nowadays str(exception) produces good messages.
|
||||
return prefix + str(exc)
|
||||
|
||||
|
||||
# Needed by 'split_quoted()'
|
||||
_wordchars_re = _squote_re = _dquote_re = None
|
||||
def _init_regex():
|
||||
global _wordchars_re, _squote_re, _dquote_re
|
||||
_wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
|
||||
_squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
|
||||
_dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
|
||||
|
||||
def split_quoted (s):
|
||||
"""Split a string up according to Unix shell-like rules for quotes and
|
||||
backslashes. In short: words are delimited by spaces, as long as those
|
||||
spaces are not escaped by a backslash, or inside a quoted string.
|
||||
Single and double quotes are equivalent, and the quote characters can
|
||||
be backslash-escaped. The backslash is stripped from any two-character
|
||||
escape sequence, leaving only the escaped character. The quote
|
||||
characters are stripped from any quoted string. Returns a list of
|
||||
words.
|
||||
"""
|
||||
|
||||
# This is a nice algorithm for splitting up a single string, since it
|
||||
# doesn't require character-by-character examination. It was a little
|
||||
# bit of a brain-bender to get it working right, though...
|
||||
if _wordchars_re is None: _init_regex()
|
||||
|
||||
s = s.strip()
|
||||
words = []
|
||||
pos = 0
|
||||
|
||||
while s:
|
||||
m = _wordchars_re.match(s, pos)
|
||||
end = m.end()
|
||||
if end == len(s):
|
||||
words.append(s[:end])
|
||||
break
|
||||
|
||||
if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
|
||||
words.append(s[:end]) # we definitely have a word delimiter
|
||||
s = s[end:].lstrip()
|
||||
pos = 0
|
||||
|
||||
elif s[end] == '\\': # preserve whatever is being escaped;
|
||||
# will become part of the current word
|
||||
s = s[:end] + s[end+1:]
|
||||
pos = end+1
|
||||
|
||||
else:
|
||||
if s[end] == "'": # slurp singly-quoted string
|
||||
m = _squote_re.match(s, end)
|
||||
elif s[end] == '"': # slurp doubly-quoted string
|
||||
m = _dquote_re.match(s, end)
|
||||
else:
|
||||
raise RuntimeError("this can't happen (bad char '%c')" % s[end])
|
||||
|
||||
if m is None:
|
||||
raise ValueError("bad string (mismatched %s quotes?)" % s[end])
|
||||
|
||||
(beg, end) = m.span()
|
||||
s = s[:beg] + s[beg+1:end-1] + s[end:]
|
||||
pos = m.end() - 2
|
||||
|
||||
if pos >= len(s):
|
||||
words.append(s)
|
||||
break
|
||||
|
||||
return words
|
||||
|
||||
# split_quoted ()
|
||||
|
||||
|
||||
def execute (func, args, msg=None, verbose=0, dry_run=0):
|
||||
"""Perform some action that affects the outside world (eg. by
|
||||
writing to the filesystem). Such actions are special because they
|
||||
are disabled by the 'dry_run' flag. This method takes care of all
|
||||
that bureaucracy for you; all you have to do is supply the
|
||||
function to call and an argument tuple for it (to embody the
|
||||
"external action" being performed), and an optional message to
|
||||
print.
|
||||
"""
|
||||
if msg is None:
|
||||
msg = "%s%r" % (func.__name__, args)
|
||||
if msg[-2:] == ',)': # correct for singleton tuple
|
||||
msg = msg[0:-2] + ')'
|
||||
|
||||
log.info(msg)
|
||||
if not dry_run:
|
||||
func(*args)
|
||||
|
||||
|
||||
def strtobool (val):
|
||||
"""Convert a string representation of truth to true (1) or false (0).
|
||||
|
||||
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
|
||||
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
|
||||
'val' is anything else.
|
||||
"""
|
||||
val = val.lower()
|
||||
if val in ('y', 'yes', 't', 'true', 'on', '1'):
|
||||
return 1
|
||||
elif val in ('n', 'no', 'f', 'false', 'off', '0'):
|
||||
return 0
|
||||
else:
|
||||
raise ValueError("invalid truth value %r" % (val,))
|
||||
|
||||
|
||||
def byte_compile (py_files,
|
||||
optimize=0, force=0,
|
||||
prefix=None, base_dir=None,
|
||||
verbose=1, dry_run=0,
|
||||
direct=None):
|
||||
"""Byte-compile a collection of Python source files to .pyc
|
||||
files in a __pycache__ subdirectory. 'py_files' is a list
|
||||
of files to compile; any files that don't end in ".py" are silently
|
||||
skipped. 'optimize' must be one of the following:
|
||||
0 - don't optimize
|
||||
1 - normal optimization (like "python -O")
|
||||
2 - extra optimization (like "python -OO")
|
||||
If 'force' is true, all files are recompiled regardless of
|
||||
timestamps.
|
||||
|
||||
The source filename encoded in each bytecode file defaults to the
|
||||
filenames listed in 'py_files'; you can modify these with 'prefix' and
|
||||
'basedir'. 'prefix' is a string that will be stripped off of each
|
||||
source filename, and 'base_dir' is a directory name that will be
|
||||
prepended (after 'prefix' is stripped). You can supply either or both
|
||||
(or neither) of 'prefix' and 'base_dir', as you wish.
|
||||
|
||||
If 'dry_run' is true, doesn't actually do anything that would
|
||||
affect the filesystem.
|
||||
|
||||
Byte-compilation is either done directly in this interpreter process
|
||||
with the standard py_compile module, or indirectly by writing a
|
||||
temporary script and executing it. Normally, you should let
|
||||
'byte_compile()' figure out to use direct compilation or not (see
|
||||
the source for details). The 'direct' flag is used by the script
|
||||
generated in indirect mode; unless you know what you're doing, leave
|
||||
it set to None.
|
||||
"""
|
||||
|
||||
# Late import to fix a bootstrap issue: _posixsubprocess is built by
|
||||
# setup.py, but setup.py uses distutils.
|
||||
import subprocess
|
||||
|
||||
# nothing is done if sys.dont_write_bytecode is True
|
||||
if sys.dont_write_bytecode:
|
||||
raise DistutilsByteCompileError('byte-compiling is disabled.')
|
||||
|
||||
# First, if the caller didn't force us into direct or indirect mode,
|
||||
# figure out which mode we should be in. We take a conservative
|
||||
# approach: choose direct mode *only* if the current interpreter is
|
||||
# in debug mode and optimize is 0. If we're not in debug mode (-O
|
||||
# or -OO), we don't know which level of optimization this
|
||||
# interpreter is running with, so we can't do direct
|
||||
# byte-compilation and be certain that it's the right thing. Thus,
|
||||
# always compile indirectly if the current interpreter is in either
|
||||
# optimize mode, or if either optimization level was requested by
|
||||
# the caller.
|
||||
if direct is None:
|
||||
direct = (__debug__ and optimize == 0)
|
||||
|
||||
# "Indirect" byte-compilation: write a temporary script and then
|
||||
# run it with the appropriate flags.
|
||||
if not direct:
|
||||
try:
|
||||
from tempfile import mkstemp
|
||||
(script_fd, script_name) = mkstemp(".py")
|
||||
except ImportError:
|
||||
from tempfile import mktemp
|
||||
(script_fd, script_name) = None, mktemp(".py")
|
||||
log.info("writing byte-compilation script '%s'", script_name)
|
||||
if not dry_run:
|
||||
if script_fd is not None:
|
||||
script = os.fdopen(script_fd, "w")
|
||||
else:
|
||||
script = open(script_name, "w")
|
||||
|
||||
with script:
|
||||
script.write("""\
|
||||
from distutils.util import byte_compile
|
||||
files = [
|
||||
""")
|
||||
|
||||
# XXX would be nice to write absolute filenames, just for
|
||||
# safety's sake (script should be more robust in the face of
|
||||
# chdir'ing before running it). But this requires abspath'ing
|
||||
# 'prefix' as well, and that breaks the hack in build_lib's
|
||||
# 'byte_compile()' method that carefully tacks on a trailing
|
||||
# slash (os.sep really) to make sure the prefix here is "just
|
||||
# right". This whole prefix business is rather delicate -- the
|
||||
# problem is that it's really a directory, but I'm treating it
|
||||
# as a dumb string, so trailing slashes and so forth matter.
|
||||
|
||||
#py_files = map(os.path.abspath, py_files)
|
||||
#if prefix:
|
||||
# prefix = os.path.abspath(prefix)
|
||||
|
||||
script.write(",\n".join(map(repr, py_files)) + "]\n")
|
||||
script.write("""
|
||||
byte_compile(files, optimize=%r, force=%r,
|
||||
prefix=%r, base_dir=%r,
|
||||
verbose=%r, dry_run=0,
|
||||
direct=1)
|
||||
""" % (optimize, force, prefix, base_dir, verbose))
|
||||
|
||||
cmd = [sys.executable]
|
||||
cmd.extend(_optim_args_from_interpreter_flags())
|
||||
cmd.append(script_name)
|
||||
spawn(cmd, dry_run=dry_run)
|
||||
execute(os.remove, (script_name,), "removing %s" % script_name,
|
||||
dry_run=dry_run)
|
||||
|
||||
# "Direct" byte-compilation: use the py_compile module to compile
|
||||
# right here, right now. Note that the script generated in indirect
|
||||
# mode simply calls 'byte_compile()' in direct mode, a weird sort of
|
||||
# cross-process recursion. Hey, it works!
|
||||
else:
|
||||
from py_compile import compile
|
||||
|
||||
for file in py_files:
|
||||
if file[-3:] != ".py":
|
||||
# This lets us be lazy and not filter filenames in
|
||||
# the "install_lib" command.
|
||||
continue
|
||||
|
||||
# Terminology from the py_compile module:
|
||||
# cfile - byte-compiled file
|
||||
# dfile - purported source filename (same as 'file' by default)
|
||||
if optimize >= 0:
|
||||
opt = '' if optimize == 0 else optimize
|
||||
cfile = importlib.util.cache_from_source(
|
||||
file, optimization=opt)
|
||||
else:
|
||||
cfile = importlib.util.cache_from_source(file)
|
||||
dfile = file
|
||||
if prefix:
|
||||
if file[:len(prefix)] != prefix:
|
||||
raise ValueError("invalid prefix: filename %r doesn't start with %r"
|
||||
% (file, prefix))
|
||||
dfile = dfile[len(prefix):]
|
||||
if base_dir:
|
||||
dfile = os.path.join(base_dir, dfile)
|
||||
|
||||
cfile_base = os.path.basename(cfile)
|
||||
if direct:
|
||||
if force or newer(file, cfile):
|
||||
log.info("byte-compiling %s to %s", file, cfile_base)
|
||||
if not dry_run:
|
||||
compile(file, cfile, dfile)
|
||||
else:
|
||||
log.debug("skipping byte-compilation of %s to %s",
|
||||
file, cfile_base)
|
||||
|
||||
# byte_compile ()
|
||||
|
||||
def rfc822_escape (header):
|
||||
"""Return a version of the string escaped for inclusion in an
|
||||
RFC-822 header, by ensuring there are 8 spaces space after each newline.
|
||||
"""
|
||||
lines = header.split('\n')
|
||||
sep = '\n' + 8 * ' '
|
||||
return sep.join(lines)
|
@ -0,0 +1,363 @@
|
||||
#
|
||||
# distutils/version.py
|
||||
#
|
||||
# Implements multiple version numbering conventions for the
|
||||
# Python Module Distribution Utilities.
|
||||
#
|
||||
# $Id$
|
||||
#
|
||||
|
||||
"""Provides classes to represent module version numbers (one class for
|
||||
each style of version numbering). There are currently two such classes
|
||||
implemented: StrictVersion and LooseVersion.
|
||||
|
||||
Every version number class implements the following interface:
|
||||
* the 'parse' method takes a string and parses it to some internal
|
||||
representation; if the string is an invalid version number,
|
||||
'parse' raises a ValueError exception
|
||||
* the class constructor takes an optional string argument which,
|
||||
if supplied, is passed to 'parse'
|
||||
* __str__ reconstructs the string that was passed to 'parse' (or
|
||||
an equivalent string -- ie. one that will generate an equivalent
|
||||
version number instance)
|
||||
* __repr__ generates Python code to recreate the version number instance
|
||||
* _cmp compares the current instance with either another instance
|
||||
of the same class or a string (which will be parsed to an instance
|
||||
of the same class, thus must follow the same rules)
|
||||
"""
|
||||
|
||||
import re
|
||||
import warnings
|
||||
import contextlib
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def suppress_known_deprecation():
|
||||
with warnings.catch_warnings(record=True) as ctx:
|
||||
warnings.filterwarnings(
|
||||
action='default',
|
||||
category=DeprecationWarning,
|
||||
message="distutils Version classes are deprecated.",
|
||||
)
|
||||
yield ctx
|
||||
|
||||
|
||||
class Version:
|
||||
"""Abstract base class for version numbering classes. Just provides
|
||||
constructor (__init__) and reproducer (__repr__), because those
|
||||
seem to be the same for all version numbering classes; and route
|
||||
rich comparisons to _cmp.
|
||||
"""
|
||||
|
||||
def __init__ (self, vstring=None):
|
||||
warnings.warn(
|
||||
"distutils Version classes are deprecated. "
|
||||
"Use packaging.version instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
if vstring:
|
||||
self.parse(vstring)
|
||||
|
||||
def __repr__ (self):
|
||||
return "%s ('%s')" % (self.__class__.__name__, str(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
c = self._cmp(other)
|
||||
if c is NotImplemented:
|
||||
return c
|
||||
return c == 0
|
||||
|
||||
def __lt__(self, other):
|
||||
c = self._cmp(other)
|
||||
if c is NotImplemented:
|
||||
return c
|
||||
return c < 0
|
||||
|
||||
def __le__(self, other):
|
||||
c = self._cmp(other)
|
||||
if c is NotImplemented:
|
||||
return c
|
||||
return c <= 0
|
||||
|
||||
def __gt__(self, other):
|
||||
c = self._cmp(other)
|
||||
if c is NotImplemented:
|
||||
return c
|
||||
return c > 0
|
||||
|
||||
def __ge__(self, other):
|
||||
c = self._cmp(other)
|
||||
if c is NotImplemented:
|
||||
return c
|
||||
return c >= 0
|
||||
|
||||
|
||||
# Interface for version-number classes -- must be implemented
|
||||
# by the following classes (the concrete ones -- Version should
|
||||
# be treated as an abstract class).
|
||||
# __init__ (string) - create and take same action as 'parse'
|
||||
# (string parameter is optional)
|
||||
# parse (string) - convert a string representation to whatever
|
||||
# internal representation is appropriate for
|
||||
# this style of version numbering
|
||||
# __str__ (self) - convert back to a string; should be very similar
|
||||
# (if not identical to) the string supplied to parse
|
||||
# __repr__ (self) - generate Python code to recreate
|
||||
# the instance
|
||||
# _cmp (self, other) - compare two version numbers ('other' may
|
||||
# be an unparsed version string, or another
|
||||
# instance of your version class)
|
||||
|
||||
|
||||
class StrictVersion (Version):
|
||||
|
||||
"""Version numbering for anal retentives and software idealists.
|
||||
Implements the standard interface for version number classes as
|
||||
described above. A version number consists of two or three
|
||||
dot-separated numeric components, with an optional "pre-release" tag
|
||||
on the end. The pre-release tag consists of the letter 'a' or 'b'
|
||||
followed by a number. If the numeric components of two version
|
||||
numbers are equal, then one with a pre-release tag will always
|
||||
be deemed earlier (lesser) than one without.
|
||||
|
||||
The following are valid version numbers (shown in the order that
|
||||
would be obtained by sorting according to the supplied cmp function):
|
||||
|
||||
0.4 0.4.0 (these two are equivalent)
|
||||
0.4.1
|
||||
0.5a1
|
||||
0.5b3
|
||||
0.5
|
||||
0.9.6
|
||||
1.0
|
||||
1.0.4a3
|
||||
1.0.4b1
|
||||
1.0.4
|
||||
|
||||
The following are examples of invalid version numbers:
|
||||
|
||||
1
|
||||
2.7.2.2
|
||||
1.3.a4
|
||||
1.3pl1
|
||||
1.3c4
|
||||
|
||||
The rationale for this version numbering system will be explained
|
||||
in the distutils documentation.
|
||||
"""
|
||||
|
||||
version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$',
|
||||
re.VERBOSE | re.ASCII)
|
||||
|
||||
|
||||
def parse (self, vstring):
|
||||
match = self.version_re.match(vstring)
|
||||
if not match:
|
||||
raise ValueError("invalid version number '%s'" % vstring)
|
||||
|
||||
(major, minor, patch, prerelease, prerelease_num) = \
|
||||
match.group(1, 2, 4, 5, 6)
|
||||
|
||||
if patch:
|
||||
self.version = tuple(map(int, [major, minor, patch]))
|
||||
else:
|
||||
self.version = tuple(map(int, [major, minor])) + (0,)
|
||||
|
||||
if prerelease:
|
||||
self.prerelease = (prerelease[0], int(prerelease_num))
|
||||
else:
|
||||
self.prerelease = None
|
||||
|
||||
|
||||
def __str__ (self):
|
||||
|
||||
if self.version[2] == 0:
|
||||
vstring = '.'.join(map(str, self.version[0:2]))
|
||||
else:
|
||||
vstring = '.'.join(map(str, self.version))
|
||||
|
||||
if self.prerelease:
|
||||
vstring = vstring + self.prerelease[0] + str(self.prerelease[1])
|
||||
|
||||
return vstring
|
||||
|
||||
|
||||
def _cmp (self, other):
|
||||
if isinstance(other, str):
|
||||
with suppress_known_deprecation():
|
||||
other = StrictVersion(other)
|
||||
elif not isinstance(other, StrictVersion):
|
||||
return NotImplemented
|
||||
|
||||
if self.version != other.version:
|
||||
# numeric versions don't match
|
||||
# prerelease stuff doesn't matter
|
||||
if self.version < other.version:
|
||||
return -1
|
||||
else:
|
||||
return 1
|
||||
|
||||
# have to compare prerelease
|
||||
# case 1: neither has prerelease; they're equal
|
||||
# case 2: self has prerelease, other doesn't; other is greater
|
||||
# case 3: self doesn't have prerelease, other does: self is greater
|
||||
# case 4: both have prerelease: must compare them!
|
||||
|
||||
if (not self.prerelease and not other.prerelease):
|
||||
return 0
|
||||
elif (self.prerelease and not other.prerelease):
|
||||
return -1
|
||||
elif (not self.prerelease and other.prerelease):
|
||||
return 1
|
||||
elif (self.prerelease and other.prerelease):
|
||||
if self.prerelease == other.prerelease:
|
||||
return 0
|
||||
elif self.prerelease < other.prerelease:
|
||||
return -1
|
||||
else:
|
||||
return 1
|
||||
else:
|
||||
assert False, "never get here"
|
||||
|
||||
# end class StrictVersion
|
||||
|
||||
|
||||
# The rules according to Greg Stein:
|
||||
# 1) a version number has 1 or more numbers separated by a period or by
|
||||
# sequences of letters. If only periods, then these are compared
|
||||
# left-to-right to determine an ordering.
|
||||
# 2) sequences of letters are part of the tuple for comparison and are
|
||||
# compared lexicographically
|
||||
# 3) recognize the numeric components may have leading zeroes
|
||||
#
|
||||
# The LooseVersion class below implements these rules: a version number
|
||||
# string is split up into a tuple of integer and string components, and
|
||||
# comparison is a simple tuple comparison. This means that version
|
||||
# numbers behave in a predictable and obvious way, but a way that might
|
||||
# not necessarily be how people *want* version numbers to behave. There
|
||||
# wouldn't be a problem if people could stick to purely numeric version
|
||||
# numbers: just split on period and compare the numbers as tuples.
|
||||
# However, people insist on putting letters into their version numbers;
|
||||
# the most common purpose seems to be:
|
||||
# - indicating a "pre-release" version
|
||||
# ('alpha', 'beta', 'a', 'b', 'pre', 'p')
|
||||
# - indicating a post-release patch ('p', 'pl', 'patch')
|
||||
# but of course this can't cover all version number schemes, and there's
|
||||
# no way to know what a programmer means without asking him.
|
||||
#
|
||||
# The problem is what to do with letters (and other non-numeric
|
||||
# characters) in a version number. The current implementation does the
|
||||
# obvious and predictable thing: keep them as strings and compare
|
||||
# lexically within a tuple comparison. This has the desired effect if
|
||||
# an appended letter sequence implies something "post-release":
|
||||
# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002".
|
||||
#
|
||||
# However, if letters in a version number imply a pre-release version,
|
||||
# the "obvious" thing isn't correct. Eg. you would expect that
|
||||
# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison
|
||||
# implemented here, this just isn't so.
|
||||
#
|
||||
# Two possible solutions come to mind. The first is to tie the
|
||||
# comparison algorithm to a particular set of semantic rules, as has
|
||||
# been done in the StrictVersion class above. This works great as long
|
||||
# as everyone can go along with bondage and discipline. Hopefully a
|
||||
# (large) subset of Python module programmers will agree that the
|
||||
# particular flavour of bondage and discipline provided by StrictVersion
|
||||
# provides enough benefit to be worth using, and will submit their
|
||||
# version numbering scheme to its domination. The free-thinking
|
||||
# anarchists in the lot will never give in, though, and something needs
|
||||
# to be done to accommodate them.
|
||||
#
|
||||
# Perhaps a "moderately strict" version class could be implemented that
|
||||
# lets almost anything slide (syntactically), and makes some heuristic
|
||||
# assumptions about non-digits in version number strings. This could
|
||||
# sink into special-case-hell, though; if I was as talented and
|
||||
# idiosyncratic as Larry Wall, I'd go ahead and implement a class that
|
||||
# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is
|
||||
# just as happy dealing with things like "2g6" and "1.13++". I don't
|
||||
# think I'm smart enough to do it right though.
|
||||
#
|
||||
# In any case, I've coded the test suite for this module (see
|
||||
# ../test/test_version.py) specifically to fail on things like comparing
|
||||
# "1.2a2" and "1.2". That's not because the *code* is doing anything
|
||||
# wrong, it's because the simple, obvious design doesn't match my
|
||||
# complicated, hairy expectations for real-world version numbers. It
|
||||
# would be a snap to fix the test suite to say, "Yep, LooseVersion does
|
||||
# the Right Thing" (ie. the code matches the conception). But I'd rather
|
||||
# have a conception that matches common notions about version numbers.
|
||||
|
||||
class LooseVersion (Version):
|
||||
|
||||
"""Version numbering for anarchists and software realists.
|
||||
Implements the standard interface for version number classes as
|
||||
described above. A version number consists of a series of numbers,
|
||||
separated by either periods or strings of letters. When comparing
|
||||
version numbers, the numeric components will be compared
|
||||
numerically, and the alphabetic components lexically. The following
|
||||
are all valid version numbers, in no particular order:
|
||||
|
||||
1.5.1
|
||||
1.5.2b2
|
||||
161
|
||||
3.10a
|
||||
8.02
|
||||
3.4j
|
||||
1996.07.12
|
||||
3.2.pl0
|
||||
3.1.1.6
|
||||
2g6
|
||||
11g
|
||||
0.960923
|
||||
2.2beta29
|
||||
1.13++
|
||||
5.5.kw
|
||||
2.0b1pl0
|
||||
|
||||
In fact, there is no such thing as an invalid version number under
|
||||
this scheme; the rules for comparison are simple and predictable,
|
||||
but may not always give the results you want (for some definition
|
||||
of "want").
|
||||
"""
|
||||
|
||||
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
|
||||
|
||||
def parse (self, vstring):
|
||||
# I've given up on thinking I can reconstruct the version string
|
||||
# from the parsed tuple -- so I just store the string here for
|
||||
# use by __str__
|
||||
self.vstring = vstring
|
||||
components = [x for x in self.component_re.split(vstring)
|
||||
if x and x != '.']
|
||||
for i, obj in enumerate(components):
|
||||
try:
|
||||
components[i] = int(obj)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.version = components
|
||||
|
||||
|
||||
def __str__ (self):
|
||||
return self.vstring
|
||||
|
||||
|
||||
def __repr__ (self):
|
||||
return "LooseVersion ('%s')" % str(self)
|
||||
|
||||
|
||||
def _cmp (self, other):
|
||||
if isinstance(other, str):
|
||||
other = LooseVersion(other)
|
||||
elif not isinstance(other, LooseVersion):
|
||||
return NotImplemented
|
||||
|
||||
if self.version == other.version:
|
||||
return 0
|
||||
if self.version < other.version:
|
||||
return -1
|
||||
if self.version > other.version:
|
||||
return 1
|
||||
|
||||
|
||||
# end class LooseVersion
|
@ -0,0 +1,169 @@
|
||||
"""Module for parsing and testing package version predicate strings.
|
||||
"""
|
||||
import re
|
||||
import distutils.version
|
||||
import operator
|
||||
|
||||
|
||||
re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)",
|
||||
re.ASCII)
|
||||
# (package) (rest)
|
||||
|
||||
re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses
|
||||
re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
|
||||
# (comp) (version)
|
||||
|
||||
|
||||
def splitUp(pred):
|
||||
"""Parse a single version comparison.
|
||||
|
||||
Return (comparison string, StrictVersion)
|
||||
"""
|
||||
res = re_splitComparison.match(pred)
|
||||
if not res:
|
||||
raise ValueError("bad package restriction syntax: %r" % pred)
|
||||
comp, verStr = res.groups()
|
||||
with distutils.version.suppress_known_deprecation():
|
||||
other = distutils.version.StrictVersion(verStr)
|
||||
return (comp, other)
|
||||
|
||||
compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq,
|
||||
">": operator.gt, ">=": operator.ge, "!=": operator.ne}
|
||||
|
||||
class VersionPredicate:
|
||||
"""Parse and test package version predicates.
|
||||
|
||||
>>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)')
|
||||
|
||||
The `name` attribute provides the full dotted name that is given::
|
||||
|
||||
>>> v.name
|
||||
'pyepat.abc'
|
||||
|
||||
The str() of a `VersionPredicate` provides a normalized
|
||||
human-readable version of the expression::
|
||||
|
||||
>>> print(v)
|
||||
pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3)
|
||||
|
||||
The `satisfied_by()` method can be used to determine with a given
|
||||
version number is included in the set described by the version
|
||||
restrictions::
|
||||
|
||||
>>> v.satisfied_by('1.1')
|
||||
True
|
||||
>>> v.satisfied_by('1.4')
|
||||
True
|
||||
>>> v.satisfied_by('1.0')
|
||||
False
|
||||
>>> v.satisfied_by('4444.4')
|
||||
False
|
||||
>>> v.satisfied_by('1555.1b3')
|
||||
False
|
||||
|
||||
`VersionPredicate` is flexible in accepting extra whitespace::
|
||||
|
||||
>>> v = VersionPredicate(' pat( == 0.1 ) ')
|
||||
>>> v.name
|
||||
'pat'
|
||||
>>> v.satisfied_by('0.1')
|
||||
True
|
||||
>>> v.satisfied_by('0.2')
|
||||
False
|
||||
|
||||
If any version numbers passed in do not conform to the
|
||||
restrictions of `StrictVersion`, a `ValueError` is raised::
|
||||
|
||||
>>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: invalid version number '1.2zb3'
|
||||
|
||||
It the module or package name given does not conform to what's
|
||||
allowed as a legal module or package name, `ValueError` is
|
||||
raised::
|
||||
|
||||
>>> v = VersionPredicate('foo-bar')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: expected parenthesized list: '-bar'
|
||||
|
||||
>>> v = VersionPredicate('foo bar (12.21)')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: expected parenthesized list: 'bar (12.21)'
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, versionPredicateStr):
|
||||
"""Parse a version predicate string.
|
||||
"""
|
||||
# Fields:
|
||||
# name: package name
|
||||
# pred: list of (comparison string, StrictVersion)
|
||||
|
||||
versionPredicateStr = versionPredicateStr.strip()
|
||||
if not versionPredicateStr:
|
||||
raise ValueError("empty package restriction")
|
||||
match = re_validPackage.match(versionPredicateStr)
|
||||
if not match:
|
||||
raise ValueError("bad package name in %r" % versionPredicateStr)
|
||||
self.name, paren = match.groups()
|
||||
paren = paren.strip()
|
||||
if paren:
|
||||
match = re_paren.match(paren)
|
||||
if not match:
|
||||
raise ValueError("expected parenthesized list: %r" % paren)
|
||||
str = match.groups()[0]
|
||||
self.pred = [splitUp(aPred) for aPred in str.split(",")]
|
||||
if not self.pred:
|
||||
raise ValueError("empty parenthesized list in %r"
|
||||
% versionPredicateStr)
|
||||
else:
|
||||
self.pred = []
|
||||
|
||||
def __str__(self):
|
||||
if self.pred:
|
||||
seq = [cond + " " + str(ver) for cond, ver in self.pred]
|
||||
return self.name + " (" + ", ".join(seq) + ")"
|
||||
else:
|
||||
return self.name
|
||||
|
||||
def satisfied_by(self, version):
|
||||
"""True if version is compatible with all the predicates in self.
|
||||
The parameter version must be acceptable to the StrictVersion
|
||||
constructor. It may be either a string or StrictVersion.
|
||||
"""
|
||||
for cond, ver in self.pred:
|
||||
if not compmap[cond](version, ver):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
_provision_rx = None
|
||||
|
||||
def split_provision(value):
|
||||
"""Return the name and optional version number of a provision.
|
||||
|
||||
The version number, if given, will be returned as a `StrictVersion`
|
||||
instance, otherwise it will be `None`.
|
||||
|
||||
>>> split_provision('mypkg')
|
||||
('mypkg', None)
|
||||
>>> split_provision(' mypkg( 1.2 ) ')
|
||||
('mypkg', StrictVersion ('1.2'))
|
||||
"""
|
||||
global _provision_rx
|
||||
if _provision_rx is None:
|
||||
_provision_rx = re.compile(
|
||||
r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$",
|
||||
re.ASCII)
|
||||
value = value.strip()
|
||||
m = _provision_rx.match(value)
|
||||
if not m:
|
||||
raise ValueError("illegal provides specification: %r" % value)
|
||||
ver = m.group(2) or None
|
||||
if ver:
|
||||
with distutils.version.suppress_known_deprecation():
|
||||
ver = distutils.version.StrictVersion(ver)
|
||||
return m.group(1), ver
|
82
utils/python-venv/Lib/site-packages/setuptools/_imp.py
Normal file
82
utils/python-venv/Lib/site-packages/setuptools/_imp.py
Normal file
@ -0,0 +1,82 @@
|
||||
"""
|
||||
Re-implementation of find_module and get_frozen_object
|
||||
from the deprecated imp module.
|
||||
"""
|
||||
|
||||
import os
|
||||
import importlib.util
|
||||
import importlib.machinery
|
||||
|
||||
from .py34compat import module_from_spec
|
||||
|
||||
|
||||
PY_SOURCE = 1
|
||||
PY_COMPILED = 2
|
||||
C_EXTENSION = 3
|
||||
C_BUILTIN = 6
|
||||
PY_FROZEN = 7
|
||||
|
||||
|
||||
def find_spec(module, paths):
|
||||
finder = (
|
||||
importlib.machinery.PathFinder().find_spec
|
||||
if isinstance(paths, list) else
|
||||
importlib.util.find_spec
|
||||
)
|
||||
return finder(module, paths)
|
||||
|
||||
|
||||
def find_module(module, paths=None):
|
||||
"""Just like 'imp.find_module()', but with package support"""
|
||||
spec = find_spec(module, paths)
|
||||
if spec is None:
|
||||
raise ImportError("Can't find %s" % module)
|
||||
if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
|
||||
spec = importlib.util.spec_from_loader('__init__.py', spec.loader)
|
||||
|
||||
kind = -1
|
||||
file = None
|
||||
static = isinstance(spec.loader, type)
|
||||
if spec.origin == 'frozen' or static and issubclass(
|
||||
spec.loader, importlib.machinery.FrozenImporter):
|
||||
kind = PY_FROZEN
|
||||
path = None # imp compabilty
|
||||
suffix = mode = '' # imp compatibility
|
||||
elif spec.origin == 'built-in' or static and issubclass(
|
||||
spec.loader, importlib.machinery.BuiltinImporter):
|
||||
kind = C_BUILTIN
|
||||
path = None # imp compabilty
|
||||
suffix = mode = '' # imp compatibility
|
||||
elif spec.has_location:
|
||||
path = spec.origin
|
||||
suffix = os.path.splitext(path)[1]
|
||||
mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb'
|
||||
|
||||
if suffix in importlib.machinery.SOURCE_SUFFIXES:
|
||||
kind = PY_SOURCE
|
||||
elif suffix in importlib.machinery.BYTECODE_SUFFIXES:
|
||||
kind = PY_COMPILED
|
||||
elif suffix in importlib.machinery.EXTENSION_SUFFIXES:
|
||||
kind = C_EXTENSION
|
||||
|
||||
if kind in {PY_SOURCE, PY_COMPILED}:
|
||||
file = open(path, mode)
|
||||
else:
|
||||
path = None
|
||||
suffix = mode = ''
|
||||
|
||||
return file, path, (suffix, mode, kind)
|
||||
|
||||
|
||||
def get_frozen_object(module, paths=None):
|
||||
spec = find_spec(module, paths)
|
||||
if not spec:
|
||||
raise ImportError("Can't find %s" % module)
|
||||
return spec.loader.get_code(module)
|
||||
|
||||
|
||||
def get_module(module, paths, info):
|
||||
spec = find_spec(module, paths)
|
||||
if not spec:
|
||||
raise ImportError("Can't find %s" % module)
|
||||
return module_from_spec(spec)
|
@ -0,0 +1,4 @@
|
||||
from .more import * # noqa
|
||||
from .recipes import * # noqa
|
||||
|
||||
__version__ = '8.8.0'
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,620 @@
|
||||
"""Imported from the recipes section of the itertools documentation.
|
||||
|
||||
All functions taken from the recipes section of the itertools library docs
|
||||
[1]_.
|
||||
Some backward-compatible usability improvements have been made.
|
||||
|
||||
.. [1] http://docs.python.org/library/itertools.html#recipes
|
||||
|
||||
"""
|
||||
import warnings
|
||||
from collections import deque
|
||||
from itertools import (
|
||||
chain,
|
||||
combinations,
|
||||
count,
|
||||
cycle,
|
||||
groupby,
|
||||
islice,
|
||||
repeat,
|
||||
starmap,
|
||||
tee,
|
||||
zip_longest,
|
||||
)
|
||||
import operator
|
||||
from random import randrange, sample, choice
|
||||
|
||||
__all__ = [
|
||||
'all_equal',
|
||||
'consume',
|
||||
'convolve',
|
||||
'dotproduct',
|
||||
'first_true',
|
||||
'flatten',
|
||||
'grouper',
|
||||
'iter_except',
|
||||
'ncycles',
|
||||
'nth',
|
||||
'nth_combination',
|
||||
'padnone',
|
||||
'pad_none',
|
||||
'pairwise',
|
||||
'partition',
|
||||
'powerset',
|
||||
'prepend',
|
||||
'quantify',
|
||||
'random_combination_with_replacement',
|
||||
'random_combination',
|
||||
'random_permutation',
|
||||
'random_product',
|
||||
'repeatfunc',
|
||||
'roundrobin',
|
||||
'tabulate',
|
||||
'tail',
|
||||
'take',
|
||||
'unique_everseen',
|
||||
'unique_justseen',
|
||||
]
|
||||
|
||||
|
||||
def take(n, iterable):
|
||||
"""Return first *n* items of the iterable as a list.
|
||||
|
||||
>>> take(3, range(10))
|
||||
[0, 1, 2]
|
||||
|
||||
If there are fewer than *n* items in the iterable, all of them are
|
||||
returned.
|
||||
|
||||
>>> take(10, range(3))
|
||||
[0, 1, 2]
|
||||
|
||||
"""
|
||||
return list(islice(iterable, n))
|
||||
|
||||
|
||||
def tabulate(function, start=0):
|
||||
"""Return an iterator over the results of ``func(start)``,
|
||||
``func(start + 1)``, ``func(start + 2)``...
|
||||
|
||||
*func* should be a function that accepts one integer argument.
|
||||
|
||||
If *start* is not specified it defaults to 0. It will be incremented each
|
||||
time the iterator is advanced.
|
||||
|
||||
>>> square = lambda x: x ** 2
|
||||
>>> iterator = tabulate(square, -3)
|
||||
>>> take(4, iterator)
|
||||
[9, 4, 1, 0]
|
||||
|
||||
"""
|
||||
return map(function, count(start))
|
||||
|
||||
|
||||
def tail(n, iterable):
|
||||
"""Return an iterator over the last *n* items of *iterable*.
|
||||
|
||||
>>> t = tail(3, 'ABCDEFG')
|
||||
>>> list(t)
|
||||
['E', 'F', 'G']
|
||||
|
||||
"""
|
||||
return iter(deque(iterable, maxlen=n))
|
||||
|
||||
|
||||
def consume(iterator, n=None):
|
||||
"""Advance *iterable* by *n* steps. If *n* is ``None``, consume it
|
||||
entirely.
|
||||
|
||||
Efficiently exhausts an iterator without returning values. Defaults to
|
||||
consuming the whole iterator, but an optional second argument may be
|
||||
provided to limit consumption.
|
||||
|
||||
>>> i = (x for x in range(10))
|
||||
>>> next(i)
|
||||
0
|
||||
>>> consume(i, 3)
|
||||
>>> next(i)
|
||||
4
|
||||
>>> consume(i)
|
||||
>>> next(i)
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
StopIteration
|
||||
|
||||
If the iterator has fewer items remaining than the provided limit, the
|
||||
whole iterator will be consumed.
|
||||
|
||||
>>> i = (x for x in range(3))
|
||||
>>> consume(i, 5)
|
||||
>>> next(i)
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
StopIteration
|
||||
|
||||
"""
|
||||
# Use functions that consume iterators at C speed.
|
||||
if n is None:
|
||||
# feed the entire iterator into a zero-length deque
|
||||
deque(iterator, maxlen=0)
|
||||
else:
|
||||
# advance to the empty slice starting at position n
|
||||
next(islice(iterator, n, n), None)
|
||||
|
||||
|
||||
def nth(iterable, n, default=None):
|
||||
"""Returns the nth item or a default value.
|
||||
|
||||
>>> l = range(10)
|
||||
>>> nth(l, 3)
|
||||
3
|
||||
>>> nth(l, 20, "zebra")
|
||||
'zebra'
|
||||
|
||||
"""
|
||||
return next(islice(iterable, n, None), default)
|
||||
|
||||
|
||||
def all_equal(iterable):
|
||||
"""
|
||||
Returns ``True`` if all the elements are equal to each other.
|
||||
|
||||
>>> all_equal('aaaa')
|
||||
True
|
||||
>>> all_equal('aaab')
|
||||
False
|
||||
|
||||
"""
|
||||
g = groupby(iterable)
|
||||
return next(g, True) and not next(g, False)
|
||||
|
||||
|
||||
def quantify(iterable, pred=bool):
|
||||
"""Return the how many times the predicate is true.
|
||||
|
||||
>>> quantify([True, False, True])
|
||||
2
|
||||
|
||||
"""
|
||||
return sum(map(pred, iterable))
|
||||
|
||||
|
||||
def pad_none(iterable):
|
||||
"""Returns the sequence of elements and then returns ``None`` indefinitely.
|
||||
|
||||
>>> take(5, pad_none(range(3)))
|
||||
[0, 1, 2, None, None]
|
||||
|
||||
Useful for emulating the behavior of the built-in :func:`map` function.
|
||||
|
||||
See also :func:`padded`.
|
||||
|
||||
"""
|
||||
return chain(iterable, repeat(None))
|
||||
|
||||
|
||||
padnone = pad_none
|
||||
|
||||
|
||||
def ncycles(iterable, n):
|
||||
"""Returns the sequence elements *n* times
|
||||
|
||||
>>> list(ncycles(["a", "b"], 3))
|
||||
['a', 'b', 'a', 'b', 'a', 'b']
|
||||
|
||||
"""
|
||||
return chain.from_iterable(repeat(tuple(iterable), n))
|
||||
|
||||
|
||||
def dotproduct(vec1, vec2):
|
||||
"""Returns the dot product of the two iterables.
|
||||
|
||||
>>> dotproduct([10, 10], [20, 20])
|
||||
400
|
||||
|
||||
"""
|
||||
return sum(map(operator.mul, vec1, vec2))
|
||||
|
||||
|
||||
def flatten(listOfLists):
|
||||
"""Return an iterator flattening one level of nesting in a list of lists.
|
||||
|
||||
>>> list(flatten([[0, 1], [2, 3]]))
|
||||
[0, 1, 2, 3]
|
||||
|
||||
See also :func:`collapse`, which can flatten multiple levels of nesting.
|
||||
|
||||
"""
|
||||
return chain.from_iterable(listOfLists)
|
||||
|
||||
|
||||
def repeatfunc(func, times=None, *args):
|
||||
"""Call *func* with *args* repeatedly, returning an iterable over the
|
||||
results.
|
||||
|
||||
If *times* is specified, the iterable will terminate after that many
|
||||
repetitions:
|
||||
|
||||
>>> from operator import add
|
||||
>>> times = 4
|
||||
>>> args = 3, 5
|
||||
>>> list(repeatfunc(add, times, *args))
|
||||
[8, 8, 8, 8]
|
||||
|
||||
If *times* is ``None`` the iterable will not terminate:
|
||||
|
||||
>>> from random import randrange
|
||||
>>> times = None
|
||||
>>> args = 1, 11
|
||||
>>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP
|
||||
[2, 4, 8, 1, 8, 4]
|
||||
|
||||
"""
|
||||
if times is None:
|
||||
return starmap(func, repeat(args))
|
||||
return starmap(func, repeat(args, times))
|
||||
|
||||
|
||||
def _pairwise(iterable):
|
||||
"""Returns an iterator of paired items, overlapping, from the original
|
||||
|
||||
>>> take(4, pairwise(count()))
|
||||
[(0, 1), (1, 2), (2, 3), (3, 4)]
|
||||
|
||||
On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`.
|
||||
|
||||
"""
|
||||
a, b = tee(iterable)
|
||||
next(b, None)
|
||||
yield from zip(a, b)
|
||||
|
||||
|
||||
try:
|
||||
from itertools import pairwise as itertools_pairwise
|
||||
except ImportError:
|
||||
pairwise = _pairwise
|
||||
else:
|
||||
|
||||
def pairwise(iterable):
|
||||
yield from itertools_pairwise(iterable)
|
||||
|
||||
pairwise.__doc__ = _pairwise.__doc__
|
||||
|
||||
|
||||
def grouper(iterable, n, fillvalue=None):
|
||||
"""Collect data into fixed-length chunks or blocks.
|
||||
|
||||
>>> list(grouper('ABCDEFG', 3, 'x'))
|
||||
[('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
|
||||
|
||||
"""
|
||||
if isinstance(iterable, int):
|
||||
warnings.warn(
|
||||
"grouper expects iterable as first parameter", DeprecationWarning
|
||||
)
|
||||
n, iterable = iterable, n
|
||||
args = [iter(iterable)] * n
|
||||
return zip_longest(fillvalue=fillvalue, *args)
|
||||
|
||||
|
||||
def roundrobin(*iterables):
|
||||
"""Yields an item from each iterable, alternating between them.
|
||||
|
||||
>>> list(roundrobin('ABC', 'D', 'EF'))
|
||||
['A', 'D', 'E', 'B', 'F', 'C']
|
||||
|
||||
This function produces the same output as :func:`interleave_longest`, but
|
||||
may perform better for some inputs (in particular when the number of
|
||||
iterables is small).
|
||||
|
||||
"""
|
||||
# Recipe credited to George Sakkis
|
||||
pending = len(iterables)
|
||||
nexts = cycle(iter(it).__next__ for it in iterables)
|
||||
while pending:
|
||||
try:
|
||||
for next in nexts:
|
||||
yield next()
|
||||
except StopIteration:
|
||||
pending -= 1
|
||||
nexts = cycle(islice(nexts, pending))
|
||||
|
||||
|
||||
def partition(pred, iterable):
|
||||
"""
|
||||
Returns a 2-tuple of iterables derived from the input iterable.
|
||||
The first yields the items that have ``pred(item) == False``.
|
||||
The second yields the items that have ``pred(item) == True``.
|
||||
|
||||
>>> is_odd = lambda x: x % 2 != 0
|
||||
>>> iterable = range(10)
|
||||
>>> even_items, odd_items = partition(is_odd, iterable)
|
||||
>>> list(even_items), list(odd_items)
|
||||
([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
|
||||
|
||||
If *pred* is None, :func:`bool` is used.
|
||||
|
||||
>>> iterable = [0, 1, False, True, '', ' ']
|
||||
>>> false_items, true_items = partition(None, iterable)
|
||||
>>> list(false_items), list(true_items)
|
||||
([0, False, ''], [1, True, ' '])
|
||||
|
||||
"""
|
||||
if pred is None:
|
||||
pred = bool
|
||||
|
||||
evaluations = ((pred(x), x) for x in iterable)
|
||||
t1, t2 = tee(evaluations)
|
||||
return (
|
||||
(x for (cond, x) in t1 if not cond),
|
||||
(x for (cond, x) in t2 if cond),
|
||||
)
|
||||
|
||||
|
||||
def powerset(iterable):
|
||||
"""Yields all possible subsets of the iterable.
|
||||
|
||||
>>> list(powerset([1, 2, 3]))
|
||||
[(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
|
||||
|
||||
:func:`powerset` will operate on iterables that aren't :class:`set`
|
||||
instances, so repeated elements in the input will produce repeated elements
|
||||
in the output. Use :func:`unique_everseen` on the input to avoid generating
|
||||
duplicates:
|
||||
|
||||
>>> seq = [1, 1, 0]
|
||||
>>> list(powerset(seq))
|
||||
[(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
|
||||
>>> from more_itertools import unique_everseen
|
||||
>>> list(powerset(unique_everseen(seq)))
|
||||
[(), (1,), (0,), (1, 0)]
|
||||
|
||||
"""
|
||||
s = list(iterable)
|
||||
return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
|
||||
|
||||
|
||||
def unique_everseen(iterable, key=None):
|
||||
"""
|
||||
Yield unique elements, preserving order.
|
||||
|
||||
>>> list(unique_everseen('AAAABBBCCDAABBB'))
|
||||
['A', 'B', 'C', 'D']
|
||||
>>> list(unique_everseen('ABBCcAD', str.lower))
|
||||
['A', 'B', 'C', 'D']
|
||||
|
||||
Sequences with a mix of hashable and unhashable items can be used.
|
||||
The function will be slower (i.e., `O(n^2)`) for unhashable items.
|
||||
|
||||
Remember that ``list`` objects are unhashable - you can use the *key*
|
||||
parameter to transform the list to a tuple (which is hashable) to
|
||||
avoid a slowdown.
|
||||
|
||||
>>> iterable = ([1, 2], [2, 3], [1, 2])
|
||||
>>> list(unique_everseen(iterable)) # Slow
|
||||
[[1, 2], [2, 3]]
|
||||
>>> list(unique_everseen(iterable, key=tuple)) # Faster
|
||||
[[1, 2], [2, 3]]
|
||||
|
||||
Similary, you may want to convert unhashable ``set`` objects with
|
||||
``key=frozenset``. For ``dict`` objects,
|
||||
``key=lambda x: frozenset(x.items())`` can be used.
|
||||
|
||||
"""
|
||||
seenset = set()
|
||||
seenset_add = seenset.add
|
||||
seenlist = []
|
||||
seenlist_add = seenlist.append
|
||||
use_key = key is not None
|
||||
|
||||
for element in iterable:
|
||||
k = key(element) if use_key else element
|
||||
try:
|
||||
if k not in seenset:
|
||||
seenset_add(k)
|
||||
yield element
|
||||
except TypeError:
|
||||
if k not in seenlist:
|
||||
seenlist_add(k)
|
||||
yield element
|
||||
|
||||
|
||||
def unique_justseen(iterable, key=None):
|
||||
"""Yields elements in order, ignoring serial duplicates
|
||||
|
||||
>>> list(unique_justseen('AAAABBBCCDAABBB'))
|
||||
['A', 'B', 'C', 'D', 'A', 'B']
|
||||
>>> list(unique_justseen('ABBCcAD', str.lower))
|
||||
['A', 'B', 'C', 'A', 'D']
|
||||
|
||||
"""
|
||||
return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
|
||||
|
||||
|
||||
def iter_except(func, exception, first=None):
|
||||
"""Yields results from a function repeatedly until an exception is raised.
|
||||
|
||||
Converts a call-until-exception interface to an iterator interface.
|
||||
Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
|
||||
to end the loop.
|
||||
|
||||
>>> l = [0, 1, 2]
|
||||
>>> list(iter_except(l.pop, IndexError))
|
||||
[2, 1, 0]
|
||||
|
||||
"""
|
||||
try:
|
||||
if first is not None:
|
||||
yield first()
|
||||
while 1:
|
||||
yield func()
|
||||
except exception:
|
||||
pass
|
||||
|
||||
|
||||
def first_true(iterable, default=None, pred=None):
|
||||
"""
|
||||
Returns the first true value in the iterable.
|
||||
|
||||
If no true value is found, returns *default*
|
||||
|
||||
If *pred* is not None, returns the first item for which
|
||||
``pred(item) == True`` .
|
||||
|
||||
>>> first_true(range(10))
|
||||
1
|
||||
>>> first_true(range(10), pred=lambda x: x > 5)
|
||||
6
|
||||
>>> first_true(range(10), default='missing', pred=lambda x: x > 9)
|
||||
'missing'
|
||||
|
||||
"""
|
||||
return next(filter(pred, iterable), default)
|
||||
|
||||
|
||||
def random_product(*args, repeat=1):
|
||||
"""Draw an item at random from each of the input iterables.
|
||||
|
||||
>>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
|
||||
('c', 3, 'Z')
|
||||
|
||||
If *repeat* is provided as a keyword argument, that many items will be
|
||||
drawn from each iterable.
|
||||
|
||||
>>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP
|
||||
('a', 2, 'd', 3)
|
||||
|
||||
This equivalent to taking a random selection from
|
||||
``itertools.product(*args, **kwarg)``.
|
||||
|
||||
"""
|
||||
pools = [tuple(pool) for pool in args] * repeat
|
||||
return tuple(choice(pool) for pool in pools)
|
||||
|
||||
|
||||
def random_permutation(iterable, r=None):
|
||||
"""Return a random *r* length permutation of the elements in *iterable*.
|
||||
|
||||
If *r* is not specified or is ``None``, then *r* defaults to the length of
|
||||
*iterable*.
|
||||
|
||||
>>> random_permutation(range(5)) # doctest:+SKIP
|
||||
(3, 4, 0, 1, 2)
|
||||
|
||||
This equivalent to taking a random selection from
|
||||
``itertools.permutations(iterable, r)``.
|
||||
|
||||
"""
|
||||
pool = tuple(iterable)
|
||||
r = len(pool) if r is None else r
|
||||
return tuple(sample(pool, r))
|
||||
|
||||
|
||||
def random_combination(iterable, r):
|
||||
"""Return a random *r* length subsequence of the elements in *iterable*.
|
||||
|
||||
>>> random_combination(range(5), 3) # doctest:+SKIP
|
||||
(2, 3, 4)
|
||||
|
||||
This equivalent to taking a random selection from
|
||||
``itertools.combinations(iterable, r)``.
|
||||
|
||||
"""
|
||||
pool = tuple(iterable)
|
||||
n = len(pool)
|
||||
indices = sorted(sample(range(n), r))
|
||||
return tuple(pool[i] for i in indices)
|
||||
|
||||
|
||||
def random_combination_with_replacement(iterable, r):
|
||||
"""Return a random *r* length subsequence of elements in *iterable*,
|
||||
allowing individual elements to be repeated.
|
||||
|
||||
>>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
|
||||
(0, 0, 1, 2, 2)
|
||||
|
||||
This equivalent to taking a random selection from
|
||||
``itertools.combinations_with_replacement(iterable, r)``.
|
||||
|
||||
"""
|
||||
pool = tuple(iterable)
|
||||
n = len(pool)
|
||||
indices = sorted(randrange(n) for i in range(r))
|
||||
return tuple(pool[i] for i in indices)
|
||||
|
||||
|
||||
def nth_combination(iterable, r, index):
|
||||
"""Equivalent to ``list(combinations(iterable, r))[index]``.
|
||||
|
||||
The subsequences of *iterable* that are of length *r* can be ordered
|
||||
lexicographically. :func:`nth_combination` computes the subsequence at
|
||||
sort position *index* directly, without computing the previous
|
||||
subsequences.
|
||||
|
||||
>>> nth_combination(range(5), 3, 5)
|
||||
(0, 3, 4)
|
||||
|
||||
``ValueError`` will be raised If *r* is negative or greater than the length
|
||||
of *iterable*.
|
||||
``IndexError`` will be raised if the given *index* is invalid.
|
||||
"""
|
||||
pool = tuple(iterable)
|
||||
n = len(pool)
|
||||
if (r < 0) or (r > n):
|
||||
raise ValueError
|
||||
|
||||
c = 1
|
||||
k = min(r, n - r)
|
||||
for i in range(1, k + 1):
|
||||
c = c * (n - k + i) // i
|
||||
|
||||
if index < 0:
|
||||
index += c
|
||||
|
||||
if (index < 0) or (index >= c):
|
||||
raise IndexError
|
||||
|
||||
result = []
|
||||
while r:
|
||||
c, n, r = c * r // n, n - 1, r - 1
|
||||
while index >= c:
|
||||
index -= c
|
||||
c, n = c * (n - r) // n, n - 1
|
||||
result.append(pool[-1 - n])
|
||||
|
||||
return tuple(result)
|
||||
|
||||
|
||||
def prepend(value, iterator):
|
||||
"""Yield *value*, followed by the elements in *iterator*.
|
||||
|
||||
>>> value = '0'
|
||||
>>> iterator = ['1', '2', '3']
|
||||
>>> list(prepend(value, iterator))
|
||||
['0', '1', '2', '3']
|
||||
|
||||
To prepend multiple values, see :func:`itertools.chain`
|
||||
or :func:`value_chain`.
|
||||
|
||||
"""
|
||||
return chain([value], iterator)
|
||||
|
||||
|
||||
def convolve(signal, kernel):
|
||||
"""Convolve the iterable *signal* with the iterable *kernel*.
|
||||
|
||||
>>> signal = (1, 2, 3, 4, 5)
|
||||
>>> kernel = [3, 2, 1]
|
||||
>>> list(convolve(signal, kernel))
|
||||
[3, 8, 14, 20, 26, 14, 5]
|
||||
|
||||
Note: the input arguments are not interchangeable, as the *kernel*
|
||||
is immediately consumed and stored.
|
||||
|
||||
"""
|
||||
kernel = tuple(kernel)[::-1]
|
||||
n = len(kernel)
|
||||
window = deque([0], maxlen=n) * n
|
||||
for x in chain(signal, repeat(0, n - 1)):
|
||||
window.append(x)
|
||||
yield sum(map(operator.mul, kernel, window))
|
@ -0,0 +1,488 @@
|
||||
"""
|
||||
An OrderedSet is a custom MutableSet that remembers its order, so that every
|
||||
entry has an index that can be looked up.
|
||||
|
||||
Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger,
|
||||
and released under the MIT license.
|
||||
"""
|
||||
import itertools as it
|
||||
from collections import deque
|
||||
|
||||
try:
|
||||
# Python 3
|
||||
from collections.abc import MutableSet, Sequence
|
||||
except ImportError:
|
||||
# Python 2.7
|
||||
from collections import MutableSet, Sequence
|
||||
|
||||
SLICE_ALL = slice(None)
|
||||
__version__ = "3.1"
|
||||
|
||||
|
||||
def is_iterable(obj):
|
||||
"""
|
||||
Are we being asked to look up a list of things, instead of a single thing?
|
||||
We check for the `__iter__` attribute so that this can cover types that
|
||||
don't have to be known by this module, such as NumPy arrays.
|
||||
|
||||
Strings, however, should be considered as atomic values to look up, not
|
||||
iterables. The same goes for tuples, since they are immutable and therefore
|
||||
valid entries.
|
||||
|
||||
We don't need to check for the Python 2 `unicode` type, because it doesn't
|
||||
have an `__iter__` attribute anyway.
|
||||
"""
|
||||
return (
|
||||
hasattr(obj, "__iter__")
|
||||
and not isinstance(obj, str)
|
||||
and not isinstance(obj, tuple)
|
||||
)
|
||||
|
||||
|
||||
class OrderedSet(MutableSet, Sequence):
|
||||
"""
|
||||
An OrderedSet is a custom MutableSet that remembers its order, so that
|
||||
every entry has an index that can be looked up.
|
||||
|
||||
Example:
|
||||
>>> OrderedSet([1, 1, 2, 3, 2])
|
||||
OrderedSet([1, 2, 3])
|
||||
"""
|
||||
|
||||
def __init__(self, iterable=None):
|
||||
self.items = []
|
||||
self.map = {}
|
||||
if iterable is not None:
|
||||
self |= iterable
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Returns the number of unique elements in the ordered set
|
||||
|
||||
Example:
|
||||
>>> len(OrderedSet([]))
|
||||
0
|
||||
>>> len(OrderedSet([1, 2]))
|
||||
2
|
||||
"""
|
||||
return len(self.items)
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Get the item at a given index.
|
||||
|
||||
If `index` is a slice, you will get back that slice of items, as a
|
||||
new OrderedSet.
|
||||
|
||||
If `index` is a list or a similar iterable, you'll get a list of
|
||||
items corresponding to those indices. This is similar to NumPy's
|
||||
"fancy indexing". The result is not an OrderedSet because you may ask
|
||||
for duplicate indices, and the number of elements returned should be
|
||||
the number of elements asked for.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset[1]
|
||||
2
|
||||
"""
|
||||
if isinstance(index, slice) and index == SLICE_ALL:
|
||||
return self.copy()
|
||||
elif is_iterable(index):
|
||||
return [self.items[i] for i in index]
|
||||
elif hasattr(index, "__index__") or isinstance(index, slice):
|
||||
result = self.items[index]
|
||||
if isinstance(result, list):
|
||||
return self.__class__(result)
|
||||
else:
|
||||
return result
|
||||
else:
|
||||
raise TypeError("Don't know how to index an OrderedSet by %r" % index)
|
||||
|
||||
def copy(self):
|
||||
"""
|
||||
Return a shallow copy of this object.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 2, 3])
|
||||
>>> other = this.copy()
|
||||
>>> this == other
|
||||
True
|
||||
>>> this is other
|
||||
False
|
||||
"""
|
||||
return self.__class__(self)
|
||||
|
||||
def __getstate__(self):
|
||||
if len(self) == 0:
|
||||
# The state can't be an empty list.
|
||||
# We need to return a truthy value, or else __setstate__ won't be run.
|
||||
#
|
||||
# This could have been done more gracefully by always putting the state
|
||||
# in a tuple, but this way is backwards- and forwards- compatible with
|
||||
# previous versions of OrderedSet.
|
||||
return (None,)
|
||||
else:
|
||||
return list(self)
|
||||
|
||||
def __setstate__(self, state):
|
||||
if state == (None,):
|
||||
self.__init__([])
|
||||
else:
|
||||
self.__init__(state)
|
||||
|
||||
def __contains__(self, key):
|
||||
"""
|
||||
Test if the item is in this ordered set
|
||||
|
||||
Example:
|
||||
>>> 1 in OrderedSet([1, 3, 2])
|
||||
True
|
||||
>>> 5 in OrderedSet([1, 3, 2])
|
||||
False
|
||||
"""
|
||||
return key in self.map
|
||||
|
||||
def add(self, key):
|
||||
"""
|
||||
Add `key` as an item to this OrderedSet, then return its index.
|
||||
|
||||
If `key` is already in the OrderedSet, return the index it already
|
||||
had.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet()
|
||||
>>> oset.append(3)
|
||||
0
|
||||
>>> print(oset)
|
||||
OrderedSet([3])
|
||||
"""
|
||||
if key not in self.map:
|
||||
self.map[key] = len(self.items)
|
||||
self.items.append(key)
|
||||
return self.map[key]
|
||||
|
||||
append = add
|
||||
|
||||
def update(self, sequence):
|
||||
"""
|
||||
Update the set with the given iterable sequence, then return the index
|
||||
of the last element inserted.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset.update([3, 1, 5, 1, 4])
|
||||
4
|
||||
>>> print(oset)
|
||||
OrderedSet([1, 2, 3, 5, 4])
|
||||
"""
|
||||
item_index = None
|
||||
try:
|
||||
for item in sequence:
|
||||
item_index = self.add(item)
|
||||
except TypeError:
|
||||
raise ValueError(
|
||||
"Argument needs to be an iterable, got %s" % type(sequence)
|
||||
)
|
||||
return item_index
|
||||
|
||||
def index(self, key):
|
||||
"""
|
||||
Get the index of a given entry, raising an IndexError if it's not
|
||||
present.
|
||||
|
||||
`key` can be an iterable of entries that is not a string, in which case
|
||||
this returns a list of indices.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset.index(2)
|
||||
1
|
||||
"""
|
||||
if is_iterable(key):
|
||||
return [self.index(subkey) for subkey in key]
|
||||
return self.map[key]
|
||||
|
||||
# Provide some compatibility with pd.Index
|
||||
get_loc = index
|
||||
get_indexer = index
|
||||
|
||||
def pop(self):
|
||||
"""
|
||||
Remove and return the last element from the set.
|
||||
|
||||
Raises KeyError if the set is empty.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset.pop()
|
||||
3
|
||||
"""
|
||||
if not self.items:
|
||||
raise KeyError("Set is empty")
|
||||
|
||||
elem = self.items[-1]
|
||||
del self.items[-1]
|
||||
del self.map[elem]
|
||||
return elem
|
||||
|
||||
def discard(self, key):
|
||||
"""
|
||||
Remove an element. Do not raise an exception if absent.
|
||||
|
||||
The MutableSet mixin uses this to implement the .remove() method, which
|
||||
*does* raise an error when asked to remove a non-existent item.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 2, 3])
|
||||
>>> oset.discard(2)
|
||||
>>> print(oset)
|
||||
OrderedSet([1, 3])
|
||||
>>> oset.discard(2)
|
||||
>>> print(oset)
|
||||
OrderedSet([1, 3])
|
||||
"""
|
||||
if key in self:
|
||||
i = self.map[key]
|
||||
del self.items[i]
|
||||
del self.map[key]
|
||||
for k, v in self.map.items():
|
||||
if v >= i:
|
||||
self.map[k] = v - 1
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
Remove all items from this OrderedSet.
|
||||
"""
|
||||
del self.items[:]
|
||||
self.map.clear()
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Example:
|
||||
>>> list(iter(OrderedSet([1, 2, 3])))
|
||||
[1, 2, 3]
|
||||
"""
|
||||
return iter(self.items)
|
||||
|
||||
def __reversed__(self):
|
||||
"""
|
||||
Example:
|
||||
>>> list(reversed(OrderedSet([1, 2, 3])))
|
||||
[3, 2, 1]
|
||||
"""
|
||||
return reversed(self.items)
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return "%s()" % (self.__class__.__name__,)
|
||||
return "%s(%r)" % (self.__class__.__name__, list(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Returns true if the containers have the same items. If `other` is a
|
||||
Sequence, then order is checked, otherwise it is ignored.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet([1, 3, 2])
|
||||
>>> oset == [1, 3, 2]
|
||||
True
|
||||
>>> oset == [1, 2, 3]
|
||||
False
|
||||
>>> oset == [2, 3]
|
||||
False
|
||||
>>> oset == OrderedSet([3, 2, 1])
|
||||
False
|
||||
"""
|
||||
# In Python 2 deque is not a Sequence, so treat it as one for
|
||||
# consistent behavior with Python 3.
|
||||
if isinstance(other, (Sequence, deque)):
|
||||
# Check that this OrderedSet contains the same elements, in the
|
||||
# same order, as the other object.
|
||||
return list(self) == list(other)
|
||||
try:
|
||||
other_as_set = set(other)
|
||||
except TypeError:
|
||||
# If `other` can't be converted into a set, it's not equal.
|
||||
return False
|
||||
else:
|
||||
return set(self) == other_as_set
|
||||
|
||||
def union(self, *sets):
|
||||
"""
|
||||
Combines all unique items.
|
||||
Each items order is defined by its first appearance.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0])
|
||||
>>> print(oset)
|
||||
OrderedSet([3, 1, 4, 5, 2, 0])
|
||||
>>> oset.union([8, 9])
|
||||
OrderedSet([3, 1, 4, 5, 2, 0, 8, 9])
|
||||
>>> oset | {10}
|
||||
OrderedSet([3, 1, 4, 5, 2, 0, 10])
|
||||
"""
|
||||
cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
|
||||
containers = map(list, it.chain([self], sets))
|
||||
items = it.chain.from_iterable(containers)
|
||||
return cls(items)
|
||||
|
||||
def __and__(self, other):
|
||||
# the parent implementation of this is backwards
|
||||
return self.intersection(other)
|
||||
|
||||
def intersection(self, *sets):
|
||||
"""
|
||||
Returns elements in common between all sets. Order is defined only
|
||||
by the first set.
|
||||
|
||||
Example:
|
||||
>>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3])
|
||||
>>> print(oset)
|
||||
OrderedSet([1, 2, 3])
|
||||
>>> oset.intersection([2, 4, 5], [1, 2, 3, 4])
|
||||
OrderedSet([2])
|
||||
>>> oset.intersection()
|
||||
OrderedSet([1, 2, 3])
|
||||
"""
|
||||
cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
|
||||
if sets:
|
||||
common = set.intersection(*map(set, sets))
|
||||
items = (item for item in self if item in common)
|
||||
else:
|
||||
items = self
|
||||
return cls(items)
|
||||
|
||||
def difference(self, *sets):
|
||||
"""
|
||||
Returns all elements that are in this set but not the others.
|
||||
|
||||
Example:
|
||||
>>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]))
|
||||
OrderedSet([1, 3])
|
||||
>>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3]))
|
||||
OrderedSet([1])
|
||||
>>> OrderedSet([1, 2, 3]) - OrderedSet([2])
|
||||
OrderedSet([1, 3])
|
||||
>>> OrderedSet([1, 2, 3]).difference()
|
||||
OrderedSet([1, 2, 3])
|
||||
"""
|
||||
cls = self.__class__
|
||||
if sets:
|
||||
other = set.union(*map(set, sets))
|
||||
items = (item for item in self if item not in other)
|
||||
else:
|
||||
items = self
|
||||
return cls(items)
|
||||
|
||||
def issubset(self, other):
|
||||
"""
|
||||
Report whether another set contains this set.
|
||||
|
||||
Example:
|
||||
>>> OrderedSet([1, 2, 3]).issubset({1, 2})
|
||||
False
|
||||
>>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4})
|
||||
True
|
||||
>>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5})
|
||||
False
|
||||
"""
|
||||
if len(self) > len(other): # Fast check for obvious cases
|
||||
return False
|
||||
return all(item in other for item in self)
|
||||
|
||||
def issuperset(self, other):
|
||||
"""
|
||||
Report whether this set contains another set.
|
||||
|
||||
Example:
|
||||
>>> OrderedSet([1, 2]).issuperset([1, 2, 3])
|
||||
False
|
||||
>>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3})
|
||||
True
|
||||
>>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3})
|
||||
False
|
||||
"""
|
||||
if len(self) < len(other): # Fast check for obvious cases
|
||||
return False
|
||||
return all(item in self for item in other)
|
||||
|
||||
def symmetric_difference(self, other):
|
||||
"""
|
||||
Return the symmetric difference of two OrderedSets as a new set.
|
||||
That is, the new set will contain all elements that are in exactly
|
||||
one of the sets.
|
||||
|
||||
Their order will be preserved, with elements from `self` preceding
|
||||
elements from `other`.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 4, 3, 5, 7])
|
||||
>>> other = OrderedSet([9, 7, 1, 3, 2])
|
||||
>>> this.symmetric_difference(other)
|
||||
OrderedSet([4, 5, 9, 2])
|
||||
"""
|
||||
cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
|
||||
diff1 = cls(self).difference(other)
|
||||
diff2 = cls(other).difference(self)
|
||||
return diff1.union(diff2)
|
||||
|
||||
def _update_items(self, items):
|
||||
"""
|
||||
Replace the 'items' list of this OrderedSet with a new one, updating
|
||||
self.map accordingly.
|
||||
"""
|
||||
self.items = items
|
||||
self.map = {item: idx for (idx, item) in enumerate(items)}
|
||||
|
||||
def difference_update(self, *sets):
|
||||
"""
|
||||
Update this OrderedSet to remove items from one or more other sets.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 2, 3])
|
||||
>>> this.difference_update(OrderedSet([2, 4]))
|
||||
>>> print(this)
|
||||
OrderedSet([1, 3])
|
||||
|
||||
>>> this = OrderedSet([1, 2, 3, 4, 5])
|
||||
>>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6]))
|
||||
>>> print(this)
|
||||
OrderedSet([3, 5])
|
||||
"""
|
||||
items_to_remove = set()
|
||||
for other in sets:
|
||||
items_to_remove |= set(other)
|
||||
self._update_items([item for item in self.items if item not in items_to_remove])
|
||||
|
||||
def intersection_update(self, other):
|
||||
"""
|
||||
Update this OrderedSet to keep only items in another set, preserving
|
||||
their order in this set.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 4, 3, 5, 7])
|
||||
>>> other = OrderedSet([9, 7, 1, 3, 2])
|
||||
>>> this.intersection_update(other)
|
||||
>>> print(this)
|
||||
OrderedSet([1, 3, 7])
|
||||
"""
|
||||
other = set(other)
|
||||
self._update_items([item for item in self.items if item in other])
|
||||
|
||||
def symmetric_difference_update(self, other):
|
||||
"""
|
||||
Update this OrderedSet to remove items from another set, then
|
||||
add items from the other set that were not present in this set.
|
||||
|
||||
Example:
|
||||
>>> this = OrderedSet([1, 4, 3, 5, 7])
|
||||
>>> other = OrderedSet([9, 7, 1, 3, 2])
|
||||
>>> this.symmetric_difference_update(other)
|
||||
>>> print(this)
|
||||
OrderedSet([4, 5, 9, 2])
|
||||
"""
|
||||
items_to_add = [item for item in other if item not in self]
|
||||
items_to_remove = set(other)
|
||||
self._update_items(
|
||||
[item for item in self.items if item not in items_to_remove] + items_to_add
|
||||
)
|
@ -0,0 +1,26 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
__all__ = [
|
||||
"__title__",
|
||||
"__summary__",
|
||||
"__uri__",
|
||||
"__version__",
|
||||
"__author__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__copyright__",
|
||||
]
|
||||
|
||||
__title__ = "packaging"
|
||||
__summary__ = "Core utilities for Python packages"
|
||||
__uri__ = "https://github.com/pypa/packaging"
|
||||
|
||||
__version__ = "21.2"
|
||||
|
||||
__author__ = "Donald Stufft and individual contributors"
|
||||
__email__ = "donald@stufft.io"
|
||||
|
||||
__license__ = "BSD-2-Clause or Apache-2.0"
|
||||
__copyright__ = "2014-2019 %s" % __author__
|
@ -0,0 +1,25 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from .__about__ import (
|
||||
__author__,
|
||||
__copyright__,
|
||||
__email__,
|
||||
__license__,
|
||||
__summary__,
|
||||
__title__,
|
||||
__uri__,
|
||||
__version__,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"__title__",
|
||||
"__summary__",
|
||||
"__uri__",
|
||||
"__version__",
|
||||
"__author__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__copyright__",
|
||||
]
|
@ -0,0 +1,301 @@
|
||||
import collections
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
import struct
|
||||
import sys
|
||||
import warnings
|
||||
from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple
|
||||
|
||||
|
||||
# Python does not provide platform information at sufficient granularity to
|
||||
# identify the architecture of the running executable in some cases, so we
|
||||
# determine it dynamically by reading the information from the running
|
||||
# process. This only applies on Linux, which uses the ELF format.
|
||||
class _ELFFileHeader:
|
||||
# https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
|
||||
class _InvalidELFFileHeader(ValueError):
|
||||
"""
|
||||
An invalid ELF file header was found.
|
||||
"""
|
||||
|
||||
ELF_MAGIC_NUMBER = 0x7F454C46
|
||||
ELFCLASS32 = 1
|
||||
ELFCLASS64 = 2
|
||||
ELFDATA2LSB = 1
|
||||
ELFDATA2MSB = 2
|
||||
EM_386 = 3
|
||||
EM_S390 = 22
|
||||
EM_ARM = 40
|
||||
EM_X86_64 = 62
|
||||
EF_ARM_ABIMASK = 0xFF000000
|
||||
EF_ARM_ABI_VER5 = 0x05000000
|
||||
EF_ARM_ABI_FLOAT_HARD = 0x00000400
|
||||
|
||||
def __init__(self, file: IO[bytes]) -> None:
|
||||
def unpack(fmt: str) -> int:
|
||||
try:
|
||||
data = file.read(struct.calcsize(fmt))
|
||||
result: Tuple[int, ...] = struct.unpack(fmt, data)
|
||||
except struct.error:
|
||||
raise _ELFFileHeader._InvalidELFFileHeader()
|
||||
return result[0]
|
||||
|
||||
self.e_ident_magic = unpack(">I")
|
||||
if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
|
||||
raise _ELFFileHeader._InvalidELFFileHeader()
|
||||
self.e_ident_class = unpack("B")
|
||||
if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
|
||||
raise _ELFFileHeader._InvalidELFFileHeader()
|
||||
self.e_ident_data = unpack("B")
|
||||
if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
|
||||
raise _ELFFileHeader._InvalidELFFileHeader()
|
||||
self.e_ident_version = unpack("B")
|
||||
self.e_ident_osabi = unpack("B")
|
||||
self.e_ident_abiversion = unpack("B")
|
||||
self.e_ident_pad = file.read(7)
|
||||
format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
|
||||
format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
|
||||
format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
|
||||
format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
|
||||
self.e_type = unpack(format_h)
|
||||
self.e_machine = unpack(format_h)
|
||||
self.e_version = unpack(format_i)
|
||||
self.e_entry = unpack(format_p)
|
||||
self.e_phoff = unpack(format_p)
|
||||
self.e_shoff = unpack(format_p)
|
||||
self.e_flags = unpack(format_i)
|
||||
self.e_ehsize = unpack(format_h)
|
||||
self.e_phentsize = unpack(format_h)
|
||||
self.e_phnum = unpack(format_h)
|
||||
self.e_shentsize = unpack(format_h)
|
||||
self.e_shnum = unpack(format_h)
|
||||
self.e_shstrndx = unpack(format_h)
|
||||
|
||||
|
||||
def _get_elf_header() -> Optional[_ELFFileHeader]:
|
||||
try:
|
||||
with open(sys.executable, "rb") as f:
|
||||
elf_header = _ELFFileHeader(f)
|
||||
except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
|
||||
return None
|
||||
return elf_header
|
||||
|
||||
|
||||
def _is_linux_armhf() -> bool:
|
||||
# hard-float ABI can be detected from the ELF header of the running
|
||||
# process
|
||||
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
|
||||
elf_header = _get_elf_header()
|
||||
if elf_header is None:
|
||||
return False
|
||||
result = elf_header.e_ident_class == elf_header.ELFCLASS32
|
||||
result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
|
||||
result &= elf_header.e_machine == elf_header.EM_ARM
|
||||
result &= (
|
||||
elf_header.e_flags & elf_header.EF_ARM_ABIMASK
|
||||
) == elf_header.EF_ARM_ABI_VER5
|
||||
result &= (
|
||||
elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
|
||||
) == elf_header.EF_ARM_ABI_FLOAT_HARD
|
||||
return result
|
||||
|
||||
|
||||
def _is_linux_i686() -> bool:
|
||||
elf_header = _get_elf_header()
|
||||
if elf_header is None:
|
||||
return False
|
||||
result = elf_header.e_ident_class == elf_header.ELFCLASS32
|
||||
result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
|
||||
result &= elf_header.e_machine == elf_header.EM_386
|
||||
return result
|
||||
|
||||
|
||||
def _have_compatible_abi(arch: str) -> bool:
|
||||
if arch == "armv7l":
|
||||
return _is_linux_armhf()
|
||||
if arch == "i686":
|
||||
return _is_linux_i686()
|
||||
return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
|
||||
|
||||
|
||||
# If glibc ever changes its major version, we need to know what the last
|
||||
# minor version was, so we can build the complete list of all versions.
|
||||
# For now, guess what the highest minor version might be, assume it will
|
||||
# be 50 for testing. Once this actually happens, update the dictionary
|
||||
# with the actual value.
|
||||
_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
|
||||
|
||||
|
||||
class _GLibCVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
def _glibc_version_string_confstr() -> Optional[str]:
|
||||
"""
|
||||
Primary implementation of glibc_version_string using os.confstr.
|
||||
"""
|
||||
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
||||
# to be broken or missing. This strategy is used in the standard library
|
||||
# platform module.
|
||||
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
||||
try:
|
||||
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
|
||||
version_string = os.confstr("CS_GNU_LIBC_VERSION")
|
||||
assert version_string is not None
|
||||
_, version = version_string.split()
|
||||
except (AssertionError, AttributeError, OSError, ValueError):
|
||||
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
||||
return None
|
||||
return version
|
||||
|
||||
|
||||
def _glibc_version_string_ctypes() -> Optional[str]:
|
||||
"""
|
||||
Fallback implementation of glibc_version_string using ctypes.
|
||||
"""
|
||||
try:
|
||||
import ctypes
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
||||
# manpage says, "If filename is NULL, then the returned handle is for the
|
||||
# main program". This way we can let the linker do the work to figure out
|
||||
# which libc our process is actually using.
|
||||
#
|
||||
# We must also handle the special case where the executable is not a
|
||||
# dynamically linked executable. This can occur when using musl libc,
|
||||
# for example. In this situation, dlopen() will error, leading to an
|
||||
# OSError. Interestingly, at least in the case of musl, there is no
|
||||
# errno set on the OSError. The single string argument used to construct
|
||||
# OSError comes from libc itself and is therefore not portable to
|
||||
# hard code here. In any case, failure to call dlopen() means we
|
||||
# can proceed, so we bail on our attempt.
|
||||
try:
|
||||
process_namespace = ctypes.CDLL(None)
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
try:
|
||||
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
||||
except AttributeError:
|
||||
# Symbol doesn't exist -> therefore, we are not linked to
|
||||
# glibc.
|
||||
return None
|
||||
|
||||
# Call gnu_get_libc_version, which returns a string like "2.5"
|
||||
gnu_get_libc_version.restype = ctypes.c_char_p
|
||||
version_str: str = gnu_get_libc_version()
|
||||
# py2 / py3 compatibility:
|
||||
if not isinstance(version_str, str):
|
||||
version_str = version_str.decode("ascii")
|
||||
|
||||
return version_str
|
||||
|
||||
|
||||
def _glibc_version_string() -> Optional[str]:
|
||||
"""Returns glibc version string, or None if not using glibc."""
|
||||
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
|
||||
|
||||
|
||||
def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
|
||||
"""Parse glibc version.
|
||||
|
||||
We use a regexp instead of str.split because we want to discard any
|
||||
random junk that might come after the minor version -- this might happen
|
||||
in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
||||
uses version strings like "2.20-2014.11"). See gh-3588.
|
||||
"""
|
||||
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
||||
if not m:
|
||||
warnings.warn(
|
||||
"Expected glibc version with 2 components major.minor,"
|
||||
" got: %s" % version_str,
|
||||
RuntimeWarning,
|
||||
)
|
||||
return -1, -1
|
||||
return int(m.group("major")), int(m.group("minor"))
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def _get_glibc_version() -> Tuple[int, int]:
|
||||
version_str = _glibc_version_string()
|
||||
if version_str is None:
|
||||
return (-1, -1)
|
||||
return _parse_glibc_version(version_str)
|
||||
|
||||
|
||||
# From PEP 513, PEP 600
|
||||
def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
|
||||
sys_glibc = _get_glibc_version()
|
||||
if sys_glibc < version:
|
||||
return False
|
||||
# Check for presence of _manylinux module.
|
||||
try:
|
||||
import _manylinux # noqa
|
||||
except ImportError:
|
||||
return True
|
||||
if hasattr(_manylinux, "manylinux_compatible"):
|
||||
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
|
||||
if result is not None:
|
||||
return bool(result)
|
||||
return True
|
||||
if version == _GLibCVersion(2, 5):
|
||||
if hasattr(_manylinux, "manylinux1_compatible"):
|
||||
return bool(_manylinux.manylinux1_compatible)
|
||||
if version == _GLibCVersion(2, 12):
|
||||
if hasattr(_manylinux, "manylinux2010_compatible"):
|
||||
return bool(_manylinux.manylinux2010_compatible)
|
||||
if version == _GLibCVersion(2, 17):
|
||||
if hasattr(_manylinux, "manylinux2014_compatible"):
|
||||
return bool(_manylinux.manylinux2014_compatible)
|
||||
return True
|
||||
|
||||
|
||||
_LEGACY_MANYLINUX_MAP = {
|
||||
# CentOS 7 w/ glibc 2.17 (PEP 599)
|
||||
(2, 17): "manylinux2014",
|
||||
# CentOS 6 w/ glibc 2.12 (PEP 571)
|
||||
(2, 12): "manylinux2010",
|
||||
# CentOS 5 w/ glibc 2.5 (PEP 513)
|
||||
(2, 5): "manylinux1",
|
||||
}
|
||||
|
||||
|
||||
def platform_tags(linux: str, arch: str) -> Iterator[str]:
|
||||
if not _have_compatible_abi(arch):
|
||||
return
|
||||
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
||||
too_old_glibc2 = _GLibCVersion(2, 16)
|
||||
if arch in {"x86_64", "i686"}:
|
||||
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
||||
too_old_glibc2 = _GLibCVersion(2, 4)
|
||||
current_glibc = _GLibCVersion(*_get_glibc_version())
|
||||
glibc_max_list = [current_glibc]
|
||||
# We can assume compatibility across glibc major versions.
|
||||
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
|
||||
#
|
||||
# Build a list of maximum glibc versions so that we can
|
||||
# output the canonical list of all glibc from current_glibc
|
||||
# down to too_old_glibc2, including all intermediary versions.
|
||||
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
||||
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
||||
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
||||
for glibc_max in glibc_max_list:
|
||||
if glibc_max.major == too_old_glibc2.major:
|
||||
min_minor = too_old_glibc2.minor
|
||||
else:
|
||||
# For other glibc major versions oldest supported is (x, 0).
|
||||
min_minor = -1
|
||||
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
||||
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
||||
tag = "manylinux_{}_{}".format(*glibc_version)
|
||||
if _is_compatible(tag, arch, glibc_version):
|
||||
yield linux.replace("linux", tag)
|
||||
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
||||
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
||||
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
||||
if _is_compatible(legacy_tag, arch, glibc_version):
|
||||
yield linux.replace("linux", legacy_tag)
|
@ -0,0 +1,136 @@
|
||||
"""PEP 656 support.
|
||||
|
||||
This module implements logic to detect if the currently running Python is
|
||||
linked against musl, and what musl version is used.
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import functools
|
||||
import operator
|
||||
import os
|
||||
import re
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import IO, Iterator, NamedTuple, Optional, Tuple
|
||||
|
||||
|
||||
def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]:
|
||||
return struct.unpack(fmt, f.read(struct.calcsize(fmt)))
|
||||
|
||||
|
||||
def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]:
|
||||
"""Detect musl libc location by parsing the Python executable.
|
||||
|
||||
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
|
||||
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
|
||||
"""
|
||||
f.seek(0)
|
||||
try:
|
||||
ident = _read_unpacked(f, "16B")
|
||||
except struct.error:
|
||||
return None
|
||||
if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF.
|
||||
return None
|
||||
f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version.
|
||||
|
||||
try:
|
||||
# e_fmt: Format for program header.
|
||||
# p_fmt: Format for section header.
|
||||
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
|
||||
e_fmt, p_fmt, p_idx = {
|
||||
1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit.
|
||||
2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit.
|
||||
}[ident[4]]
|
||||
except KeyError:
|
||||
return None
|
||||
else:
|
||||
p_get = operator.itemgetter(*p_idx)
|
||||
|
||||
# Find the interpreter section and return its content.
|
||||
try:
|
||||
_, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt)
|
||||
except struct.error:
|
||||
return None
|
||||
for i in range(e_phnum + 1):
|
||||
f.seek(e_phoff + e_phentsize * i)
|
||||
try:
|
||||
p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt))
|
||||
except struct.error:
|
||||
return None
|
||||
if p_type != 3: # Not PT_INTERP.
|
||||
continue
|
||||
f.seek(p_offset)
|
||||
interpreter = os.fsdecode(f.read(p_filesz)).strip("\0")
|
||||
if "musl" not in interpreter:
|
||||
return None
|
||||
return interpreter
|
||||
return None
|
||||
|
||||
|
||||
class _MuslVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
|
||||
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
|
||||
if len(lines) < 2 or lines[0][:4] != "musl":
|
||||
return None
|
||||
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
|
||||
if not m:
|
||||
return None
|
||||
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
|
||||
"""Detect currently-running musl runtime version.
|
||||
|
||||
This is done by checking the specified executable's dynamic linking
|
||||
information, and invoking the loader to parse its output for a version
|
||||
string. If the loader is musl, the output would be something like::
|
||||
|
||||
musl libc (x86_64)
|
||||
Version 1.2.2
|
||||
Dynamic Program Loader
|
||||
"""
|
||||
with contextlib.ExitStack() as stack:
|
||||
try:
|
||||
f = stack.enter_context(open(executable, "rb"))
|
||||
except IOError:
|
||||
return None
|
||||
ld = _parse_ld_musl_from_elf(f)
|
||||
if not ld:
|
||||
return None
|
||||
proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
|
||||
return _parse_musl_version(proc.stderr)
|
||||
|
||||
|
||||
def platform_tags(arch: str) -> Iterator[str]:
|
||||
"""Generate musllinux tags compatible to the current platform.
|
||||
|
||||
:param arch: Should be the part of platform tag after the ``linux_``
|
||||
prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
|
||||
prerequisite for the current platform to be musllinux-compatible.
|
||||
|
||||
:returns: An iterator of compatible musllinux tags.
|
||||
"""
|
||||
sys_musl = _get_musl_version(sys.executable)
|
||||
if sys_musl is None: # Python not dynamically linked against musl.
|
||||
return
|
||||
for minor in range(sys_musl.minor, -1, -1):
|
||||
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
import sysconfig
|
||||
|
||||
plat = sysconfig.get_platform()
|
||||
assert plat.startswith("linux-"), "not linux"
|
||||
|
||||
print("plat:", plat)
|
||||
print("musl:", _get_musl_version(sys.executable))
|
||||
print("tags:", end=" ")
|
||||
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
|
||||
print(t, end="\n ")
|
@ -0,0 +1,67 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
|
||||
class InfinityType:
|
||||
def __repr__(self) -> str:
|
||||
return "Infinity"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __le__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other: object) -> bool:
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __ge__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __neg__(self: object) -> "NegativeInfinityType":
|
||||
return NegativeInfinity
|
||||
|
||||
|
||||
Infinity = InfinityType()
|
||||
|
||||
|
||||
class NegativeInfinityType:
|
||||
def __repr__(self) -> str:
|
||||
return "-Infinity"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __le__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other: object) -> bool:
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __ge__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __neg__(self: object) -> InfinityType:
|
||||
return Infinity
|
||||
|
||||
|
||||
NegativeInfinity = NegativeInfinityType()
|
@ -0,0 +1,304 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import operator
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
|
||||
from setuptools.extern.pyparsing import ( # noqa: N817
|
||||
Forward,
|
||||
Group,
|
||||
Literal as L,
|
||||
ParseException,
|
||||
ParseResults,
|
||||
QuotedString,
|
||||
ZeroOrMore,
|
||||
stringEnd,
|
||||
stringStart,
|
||||
)
|
||||
|
||||
from .specifiers import InvalidSpecifier, Specifier
|
||||
|
||||
__all__ = [
|
||||
"InvalidMarker",
|
||||
"UndefinedComparison",
|
||||
"UndefinedEnvironmentName",
|
||||
"Marker",
|
||||
"default_environment",
|
||||
]
|
||||
|
||||
Operator = Callable[[str, str], bool]
|
||||
|
||||
|
||||
class InvalidMarker(ValueError):
|
||||
"""
|
||||
An invalid marker was found, users should refer to PEP 508.
|
||||
"""
|
||||
|
||||
|
||||
class UndefinedComparison(ValueError):
|
||||
"""
|
||||
An invalid operation was attempted on a value that doesn't support it.
|
||||
"""
|
||||
|
||||
|
||||
class UndefinedEnvironmentName(ValueError):
|
||||
"""
|
||||
A name was attempted to be used that does not exist inside of the
|
||||
environment.
|
||||
"""
|
||||
|
||||
|
||||
class Node:
|
||||
def __init__(self, value: Any) -> None:
|
||||
self.value = value
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.value)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}('{self}')>"
|
||||
|
||||
def serialize(self) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Variable(Node):
|
||||
def serialize(self) -> str:
|
||||
return str(self)
|
||||
|
||||
|
||||
class Value(Node):
|
||||
def serialize(self) -> str:
|
||||
return f'"{self}"'
|
||||
|
||||
|
||||
class Op(Node):
|
||||
def serialize(self) -> str:
|
||||
return str(self)
|
||||
|
||||
|
||||
VARIABLE = (
|
||||
L("implementation_version")
|
||||
| L("platform_python_implementation")
|
||||
| L("implementation_name")
|
||||
| L("python_full_version")
|
||||
| L("platform_release")
|
||||
| L("platform_version")
|
||||
| L("platform_machine")
|
||||
| L("platform_system")
|
||||
| L("python_version")
|
||||
| L("sys_platform")
|
||||
| L("os_name")
|
||||
| L("os.name") # PEP-345
|
||||
| L("sys.platform") # PEP-345
|
||||
| L("platform.version") # PEP-345
|
||||
| L("platform.machine") # PEP-345
|
||||
| L("platform.python_implementation") # PEP-345
|
||||
| L("python_implementation") # undocumented setuptools legacy
|
||||
| L("extra") # PEP-508
|
||||
)
|
||||
ALIASES = {
|
||||
"os.name": "os_name",
|
||||
"sys.platform": "sys_platform",
|
||||
"platform.version": "platform_version",
|
||||
"platform.machine": "platform_machine",
|
||||
"platform.python_implementation": "platform_python_implementation",
|
||||
"python_implementation": "platform_python_implementation",
|
||||
}
|
||||
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
|
||||
|
||||
VERSION_CMP = (
|
||||
L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
|
||||
)
|
||||
|
||||
MARKER_OP = VERSION_CMP | L("not in") | L("in")
|
||||
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
|
||||
|
||||
MARKER_VALUE = QuotedString("'") | QuotedString('"')
|
||||
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
|
||||
|
||||
BOOLOP = L("and") | L("or")
|
||||
|
||||
MARKER_VAR = VARIABLE | MARKER_VALUE
|
||||
|
||||
MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
|
||||
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
|
||||
|
||||
LPAREN = L("(").suppress()
|
||||
RPAREN = L(")").suppress()
|
||||
|
||||
MARKER_EXPR = Forward()
|
||||
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
|
||||
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
|
||||
|
||||
MARKER = stringStart + MARKER_EXPR + stringEnd
|
||||
|
||||
|
||||
def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]:
|
||||
if isinstance(results, ParseResults):
|
||||
return [_coerce_parse_result(i) for i in results]
|
||||
else:
|
||||
return results
|
||||
|
||||
|
||||
def _format_marker(
|
||||
marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True
|
||||
) -> str:
|
||||
|
||||
assert isinstance(marker, (list, tuple, str))
|
||||
|
||||
# Sometimes we have a structure like [[...]] which is a single item list
|
||||
# where the single item is itself it's own list. In that case we want skip
|
||||
# the rest of this function so that we don't get extraneous () on the
|
||||
# outside.
|
||||
if (
|
||||
isinstance(marker, list)
|
||||
and len(marker) == 1
|
||||
and isinstance(marker[0], (list, tuple))
|
||||
):
|
||||
return _format_marker(marker[0])
|
||||
|
||||
if isinstance(marker, list):
|
||||
inner = (_format_marker(m, first=False) for m in marker)
|
||||
if first:
|
||||
return " ".join(inner)
|
||||
else:
|
||||
return "(" + " ".join(inner) + ")"
|
||||
elif isinstance(marker, tuple):
|
||||
return " ".join([m.serialize() for m in marker])
|
||||
else:
|
||||
return marker
|
||||
|
||||
|
||||
_operators: Dict[str, Operator] = {
|
||||
"in": lambda lhs, rhs: lhs in rhs,
|
||||
"not in": lambda lhs, rhs: lhs not in rhs,
|
||||
"<": operator.lt,
|
||||
"<=": operator.le,
|
||||
"==": operator.eq,
|
||||
"!=": operator.ne,
|
||||
">=": operator.ge,
|
||||
">": operator.gt,
|
||||
}
|
||||
|
||||
|
||||
def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
|
||||
try:
|
||||
spec = Specifier("".join([op.serialize(), rhs]))
|
||||
except InvalidSpecifier:
|
||||
pass
|
||||
else:
|
||||
return spec.contains(lhs)
|
||||
|
||||
oper: Optional[Operator] = _operators.get(op.serialize())
|
||||
if oper is None:
|
||||
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
|
||||
|
||||
return oper(lhs, rhs)
|
||||
|
||||
|
||||
class Undefined:
|
||||
pass
|
||||
|
||||
|
||||
_undefined = Undefined()
|
||||
|
||||
|
||||
def _get_env(environment: Dict[str, str], name: str) -> str:
|
||||
value: Union[str, Undefined] = environment.get(name, _undefined)
|
||||
|
||||
if isinstance(value, Undefined):
|
||||
raise UndefinedEnvironmentName(
|
||||
f"{name!r} does not exist in evaluation environment."
|
||||
)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool:
|
||||
groups: List[List[bool]] = [[]]
|
||||
|
||||
for marker in markers:
|
||||
assert isinstance(marker, (list, tuple, str))
|
||||
|
||||
if isinstance(marker, list):
|
||||
groups[-1].append(_evaluate_markers(marker, environment))
|
||||
elif isinstance(marker, tuple):
|
||||
lhs, op, rhs = marker
|
||||
|
||||
if isinstance(lhs, Variable):
|
||||
lhs_value = _get_env(environment, lhs.value)
|
||||
rhs_value = rhs.value
|
||||
else:
|
||||
lhs_value = lhs.value
|
||||
rhs_value = _get_env(environment, rhs.value)
|
||||
|
||||
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
|
||||
else:
|
||||
assert marker in ["and", "or"]
|
||||
if marker == "or":
|
||||
groups.append([])
|
||||
|
||||
return any(all(item) for item in groups)
|
||||
|
||||
|
||||
def format_full_version(info: "sys._version_info") -> str:
|
||||
version = "{0.major}.{0.minor}.{0.micro}".format(info)
|
||||
kind = info.releaselevel
|
||||
if kind != "final":
|
||||
version += kind[0] + str(info.serial)
|
||||
return version
|
||||
|
||||
|
||||
def default_environment() -> Dict[str, str]:
|
||||
iver = format_full_version(sys.implementation.version)
|
||||
implementation_name = sys.implementation.name
|
||||
return {
|
||||
"implementation_name": implementation_name,
|
||||
"implementation_version": iver,
|
||||
"os_name": os.name,
|
||||
"platform_machine": platform.machine(),
|
||||
"platform_release": platform.release(),
|
||||
"platform_system": platform.system(),
|
||||
"platform_version": platform.version(),
|
||||
"python_full_version": platform.python_version(),
|
||||
"platform_python_implementation": platform.python_implementation(),
|
||||
"python_version": ".".join(platform.python_version_tuple()[:2]),
|
||||
"sys_platform": sys.platform,
|
||||
}
|
||||
|
||||
|
||||
class Marker:
|
||||
def __init__(self, marker: str) -> None:
|
||||
try:
|
||||
self._markers = _coerce_parse_result(MARKER.parseString(marker))
|
||||
except ParseException as e:
|
||||
raise InvalidMarker(
|
||||
f"Invalid marker: {marker!r}, parse error at "
|
||||
f"{marker[e.loc : e.loc + 8]!r}"
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return _format_marker(self._markers)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Marker('{self}')>"
|
||||
|
||||
def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
|
||||
"""Evaluate a marker.
|
||||
|
||||
Return the boolean from evaluating the given marker against the
|
||||
environment. environment is an optional argument to override all or
|
||||
part of the determined environment.
|
||||
|
||||
The environment is determined from the current Python process.
|
||||
"""
|
||||
current_environment = default_environment()
|
||||
if environment is not None:
|
||||
current_environment.update(environment)
|
||||
|
||||
return _evaluate_markers(self._markers, current_environment)
|
@ -0,0 +1,146 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import re
|
||||
import string
|
||||
import urllib.parse
|
||||
from typing import List, Optional as TOptional, Set
|
||||
|
||||
from setuptools.extern.pyparsing import ( # noqa
|
||||
Combine,
|
||||
Literal as L,
|
||||
Optional,
|
||||
ParseException,
|
||||
Regex,
|
||||
Word,
|
||||
ZeroOrMore,
|
||||
originalTextFor,
|
||||
stringEnd,
|
||||
stringStart,
|
||||
)
|
||||
|
||||
from .markers import MARKER_EXPR, Marker
|
||||
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
|
||||
|
||||
|
||||
class InvalidRequirement(ValueError):
|
||||
"""
|
||||
An invalid requirement was found, users should refer to PEP 508.
|
||||
"""
|
||||
|
||||
|
||||
ALPHANUM = Word(string.ascii_letters + string.digits)
|
||||
|
||||
LBRACKET = L("[").suppress()
|
||||
RBRACKET = L("]").suppress()
|
||||
LPAREN = L("(").suppress()
|
||||
RPAREN = L(")").suppress()
|
||||
COMMA = L(",").suppress()
|
||||
SEMICOLON = L(";").suppress()
|
||||
AT = L("@").suppress()
|
||||
|
||||
PUNCTUATION = Word("-_.")
|
||||
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
|
||||
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
|
||||
|
||||
NAME = IDENTIFIER("name")
|
||||
EXTRA = IDENTIFIER
|
||||
|
||||
URI = Regex(r"[^ ]+")("url")
|
||||
URL = AT + URI
|
||||
|
||||
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
|
||||
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
|
||||
|
||||
VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
|
||||
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
|
||||
VERSION_MANY = Combine(
|
||||
VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
|
||||
)("_raw_spec")
|
||||
_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)
|
||||
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
|
||||
|
||||
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
|
||||
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
|
||||
|
||||
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
|
||||
MARKER_EXPR.setParseAction(
|
||||
lambda s, l, t: Marker(s[t._original_start : t._original_end])
|
||||
)
|
||||
MARKER_SEPARATOR = SEMICOLON
|
||||
MARKER = MARKER_SEPARATOR + MARKER_EXPR
|
||||
|
||||
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
|
||||
URL_AND_MARKER = URL + Optional(MARKER)
|
||||
|
||||
NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
|
||||
|
||||
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
|
||||
# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see
|
||||
# issue #104
|
||||
REQUIREMENT.parseString("x[]")
|
||||
|
||||
|
||||
class Requirement:
|
||||
"""Parse a requirement.
|
||||
|
||||
Parse a given requirement string into its parts, such as name, specifier,
|
||||
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
|
||||
string.
|
||||
"""
|
||||
|
||||
# TODO: Can we test whether something is contained within a requirement?
|
||||
# If so how do we do that? Do we need to test against the _name_ of
|
||||
# the thing as well as the version? What about the markers?
|
||||
# TODO: Can we normalize the name and extra name?
|
||||
|
||||
def __init__(self, requirement_string: str) -> None:
|
||||
try:
|
||||
req = REQUIREMENT.parseString(requirement_string)
|
||||
except ParseException as e:
|
||||
raise InvalidRequirement(
|
||||
f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}'
|
||||
)
|
||||
|
||||
self.name: str = req.name
|
||||
if req.url:
|
||||
parsed_url = urllib.parse.urlparse(req.url)
|
||||
if parsed_url.scheme == "file":
|
||||
if urllib.parse.urlunparse(parsed_url) != req.url:
|
||||
raise InvalidRequirement("Invalid URL given")
|
||||
elif not (parsed_url.scheme and parsed_url.netloc) or (
|
||||
not parsed_url.scheme and not parsed_url.netloc
|
||||
):
|
||||
raise InvalidRequirement(f"Invalid URL: {req.url}")
|
||||
self.url: TOptional[str] = req.url
|
||||
else:
|
||||
self.url = None
|
||||
self.extras: Set[str] = set(req.extras.asList() if req.extras else [])
|
||||
self.specifier: SpecifierSet = SpecifierSet(req.specifier)
|
||||
self.marker: TOptional[Marker] = req.marker if req.marker else None
|
||||
|
||||
def __str__(self) -> str:
|
||||
parts: List[str] = [self.name]
|
||||
|
||||
if self.extras:
|
||||
formatted_extras = ",".join(sorted(self.extras))
|
||||
parts.append(f"[{formatted_extras}]")
|
||||
|
||||
if self.specifier:
|
||||
parts.append(str(self.specifier))
|
||||
|
||||
if self.url:
|
||||
parts.append(f"@ {self.url}")
|
||||
if self.marker:
|
||||
parts.append(" ")
|
||||
|
||||
if self.marker:
|
||||
parts.append(f"; {self.marker}")
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Requirement('{self}')>"
|
@ -0,0 +1,828 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import abc
|
||||
import functools
|
||||
import itertools
|
||||
import re
|
||||
import warnings
|
||||
from typing import (
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Pattern,
|
||||
Set,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
from .utils import canonicalize_version
|
||||
from .version import LegacyVersion, Version, parse
|
||||
|
||||
ParsedVersion = Union[Version, LegacyVersion]
|
||||
UnparsedVersion = Union[Version, LegacyVersion, str]
|
||||
VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion)
|
||||
CallableOperator = Callable[[ParsedVersion, str], bool]
|
||||
|
||||
|
||||
class InvalidSpecifier(ValueError):
|
||||
"""
|
||||
An invalid specifier was found, users should refer to PEP 440.
|
||||
"""
|
||||
|
||||
|
||||
class BaseSpecifier(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
Returns the str representation of this Specifier like object. This
|
||||
should be representative of the Specifier itself.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __hash__(self) -> int:
|
||||
"""
|
||||
Returns a hash value for this Specifier like object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __eq__(self, other: object) -> bool:
|
||||
"""
|
||||
Returns a boolean representing whether or not the two Specifier like
|
||||
objects are equal.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __ne__(self, other: object) -> bool:
|
||||
"""
|
||||
Returns a boolean representing whether or not the two Specifier like
|
||||
objects are not equal.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def prereleases(self) -> Optional[bool]:
|
||||
"""
|
||||
Returns whether or not pre-releases as a whole are allowed by this
|
||||
specifier.
|
||||
"""
|
||||
|
||||
@prereleases.setter
|
||||
def prereleases(self, value: bool) -> None:
|
||||
"""
|
||||
Sets whether or not pre-releases as a whole are allowed by this
|
||||
specifier.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
|
||||
"""
|
||||
Determines if the given item is contained within this specifier.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def filter(
|
||||
self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
|
||||
) -> Iterable[VersionTypeVar]:
|
||||
"""
|
||||
Takes an iterable of items and filters them so that only items which
|
||||
are contained within this specifier are allowed in it.
|
||||
"""
|
||||
|
||||
|
||||
class _IndividualSpecifier(BaseSpecifier):
|
||||
|
||||
_operators: Dict[str, str] = {}
|
||||
_regex: Pattern[str]
|
||||
|
||||
def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
|
||||
match = self._regex.search(spec)
|
||||
if not match:
|
||||
raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
|
||||
|
||||
self._spec: Tuple[str, str] = (
|
||||
match.group("operator").strip(),
|
||||
match.group("version").strip(),
|
||||
)
|
||||
|
||||
# Store whether or not this Specifier should accept prereleases
|
||||
self._prereleases = prereleases
|
||||
|
||||
def __repr__(self) -> str:
|
||||
pre = (
|
||||
f", prereleases={self.prereleases!r}"
|
||||
if self._prereleases is not None
|
||||
else ""
|
||||
)
|
||||
|
||||
return "<{}({!r}{})>".format(self.__class__.__name__, str(self), pre)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "{}{}".format(*self._spec)
|
||||
|
||||
@property
|
||||
def _canonical_spec(self) -> Tuple[str, str]:
|
||||
return self._spec[0], canonicalize_version(self._spec[1])
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self._canonical_spec)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, str):
|
||||
try:
|
||||
other = self.__class__(str(other))
|
||||
except InvalidSpecifier:
|
||||
return NotImplemented
|
||||
elif not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
|
||||
return self._canonical_spec == other._canonical_spec
|
||||
|
||||
def __ne__(self, other: object) -> bool:
|
||||
if isinstance(other, str):
|
||||
try:
|
||||
other = self.__class__(str(other))
|
||||
except InvalidSpecifier:
|
||||
return NotImplemented
|
||||
elif not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
|
||||
return self._spec != other._spec
|
||||
|
||||
def _get_operator(self, op: str) -> CallableOperator:
|
||||
operator_callable: CallableOperator = getattr(
|
||||
self, f"_compare_{self._operators[op]}"
|
||||
)
|
||||
return operator_callable
|
||||
|
||||
def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion:
|
||||
if not isinstance(version, (LegacyVersion, Version)):
|
||||
version = parse(version)
|
||||
return version
|
||||
|
||||
@property
|
||||
def operator(self) -> str:
|
||||
return self._spec[0]
|
||||
|
||||
@property
|
||||
def version(self) -> str:
|
||||
return self._spec[1]
|
||||
|
||||
@property
|
||||
def prereleases(self) -> Optional[bool]:
|
||||
return self._prereleases
|
||||
|
||||
@prereleases.setter
|
||||
def prereleases(self, value: bool) -> None:
|
||||
self._prereleases = value
|
||||
|
||||
def __contains__(self, item: str) -> bool:
|
||||
return self.contains(item)
|
||||
|
||||
def contains(
|
||||
self, item: UnparsedVersion, prereleases: Optional[bool] = None
|
||||
) -> bool:
|
||||
|
||||
# Determine if prereleases are to be allowed or not.
|
||||
if prereleases is None:
|
||||
prereleases = self.prereleases
|
||||
|
||||
# Normalize item to a Version or LegacyVersion, this allows us to have
|
||||
# a shortcut for ``"2.0" in Specifier(">=2")
|
||||
normalized_item = self._coerce_version(item)
|
||||
|
||||
# Determine if we should be supporting prereleases in this specifier
|
||||
# or not, if we do not support prereleases than we can short circuit
|
||||
# logic if this version is a prereleases.
|
||||
if normalized_item.is_prerelease and not prereleases:
|
||||
return False
|
||||
|
||||
# Actually do the comparison to determine if this item is contained
|
||||
# within this Specifier or not.
|
||||
operator_callable: CallableOperator = self._get_operator(self.operator)
|
||||
return operator_callable(normalized_item, self.version)
|
||||
|
||||
def filter(
|
||||
self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
|
||||
) -> Iterable[VersionTypeVar]:
|
||||
|
||||
yielded = False
|
||||
found_prereleases = []
|
||||
|
||||
kw = {"prereleases": prereleases if prereleases is not None else True}
|
||||
|
||||
# Attempt to iterate over all the values in the iterable and if any of
|
||||
# them match, yield them.
|
||||
for version in iterable:
|
||||
parsed_version = self._coerce_version(version)
|
||||
|
||||
if self.contains(parsed_version, **kw):
|
||||
# If our version is a prerelease, and we were not set to allow
|
||||
# prereleases, then we'll store it for later in case nothing
|
||||
# else matches this specifier.
|
||||
if parsed_version.is_prerelease and not (
|
||||
prereleases or self.prereleases
|
||||
):
|
||||
found_prereleases.append(version)
|
||||
# Either this is not a prerelease, or we should have been
|
||||
# accepting prereleases from the beginning.
|
||||
else:
|
||||
yielded = True
|
||||
yield version
|
||||
|
||||
# Now that we've iterated over everything, determine if we've yielded
|
||||
# any values, and if we have not and we have any prereleases stored up
|
||||
# then we will go ahead and yield the prereleases.
|
||||
if not yielded and found_prereleases:
|
||||
for version in found_prereleases:
|
||||
yield version
|
||||
|
||||
|
||||
class LegacySpecifier(_IndividualSpecifier):
|
||||
|
||||
_regex_str = r"""
|
||||
(?P<operator>(==|!=|<=|>=|<|>))
|
||||
\s*
|
||||
(?P<version>
|
||||
[^,;\s)]* # Since this is a "legacy" specifier, and the version
|
||||
# string can be just about anything, we match everything
|
||||
# except for whitespace, a semi-colon for marker support,
|
||||
# a closing paren since versions can be enclosed in
|
||||
# them, and a comma since it's a version separator.
|
||||
)
|
||||
"""
|
||||
|
||||
_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
_operators = {
|
||||
"==": "equal",
|
||||
"!=": "not_equal",
|
||||
"<=": "less_than_equal",
|
||||
">=": "greater_than_equal",
|
||||
"<": "less_than",
|
||||
">": "greater_than",
|
||||
}
|
||||
|
||||
def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
|
||||
super().__init__(spec, prereleases)
|
||||
|
||||
warnings.warn(
|
||||
"Creating a LegacyVersion has been deprecated and will be "
|
||||
"removed in the next major release",
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion:
|
||||
if not isinstance(version, LegacyVersion):
|
||||
version = LegacyVersion(str(version))
|
||||
return version
|
||||
|
||||
def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool:
|
||||
return prospective == self._coerce_version(spec)
|
||||
|
||||
def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool:
|
||||
return prospective != self._coerce_version(spec)
|
||||
|
||||
def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool:
|
||||
return prospective <= self._coerce_version(spec)
|
||||
|
||||
def _compare_greater_than_equal(
|
||||
self, prospective: LegacyVersion, spec: str
|
||||
) -> bool:
|
||||
return prospective >= self._coerce_version(spec)
|
||||
|
||||
def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool:
|
||||
return prospective < self._coerce_version(spec)
|
||||
|
||||
def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool:
|
||||
return prospective > self._coerce_version(spec)
|
||||
|
||||
|
||||
def _require_version_compare(
|
||||
fn: Callable[["Specifier", ParsedVersion, str], bool]
|
||||
) -> Callable[["Specifier", ParsedVersion, str], bool]:
|
||||
@functools.wraps(fn)
|
||||
def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool:
|
||||
if not isinstance(prospective, Version):
|
||||
return False
|
||||
return fn(self, prospective, spec)
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
class Specifier(_IndividualSpecifier):
|
||||
|
||||
_regex_str = r"""
|
||||
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
||||
(?P<version>
|
||||
(?:
|
||||
# The identity operators allow for an escape hatch that will
|
||||
# do an exact string match of the version you wish to install.
|
||||
# This will not be parsed by PEP 440 and we cannot determine
|
||||
# any semantic meaning from it. This operator is discouraged
|
||||
# but included entirely as an escape hatch.
|
||||
(?<====) # Only match for the identity operator
|
||||
\s*
|
||||
[^\s]* # We just match everything, except for whitespace
|
||||
# since we are only testing for strict identity.
|
||||
)
|
||||
|
|
||||
(?:
|
||||
# The (non)equality operators allow for wild card and local
|
||||
# versions to be specified so we have to define these two
|
||||
# operators separately to enable that.
|
||||
(?<===|!=) # Only match for equals and not equals
|
||||
|
||||
\s*
|
||||
v?
|
||||
(?:[0-9]+!)? # epoch
|
||||
[0-9]+(?:\.[0-9]+)* # release
|
||||
(?: # pre release
|
||||
[-_\.]?
|
||||
(a|b|c|rc|alpha|beta|pre|preview)
|
||||
[-_\.]?
|
||||
[0-9]*
|
||||
)?
|
||||
(?: # post release
|
||||
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
||||
)?
|
||||
|
||||
# You cannot use a wild card and a dev or local version
|
||||
# together so group them with a | and make them optional.
|
||||
(?:
|
||||
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
||||
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
||||
|
|
||||
\.\* # Wild card syntax of .*
|
||||
)?
|
||||
)
|
||||
|
|
||||
(?:
|
||||
# The compatible operator requires at least two digits in the
|
||||
# release segment.
|
||||
(?<=~=) # Only match for the compatible operator
|
||||
|
||||
\s*
|
||||
v?
|
||||
(?:[0-9]+!)? # epoch
|
||||
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
|
||||
(?: # pre release
|
||||
[-_\.]?
|
||||
(a|b|c|rc|alpha|beta|pre|preview)
|
||||
[-_\.]?
|
||||
[0-9]*
|
||||
)?
|
||||
(?: # post release
|
||||
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
||||
)?
|
||||
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
||||
)
|
||||
|
|
||||
(?:
|
||||
# All other operators only allow a sub set of what the
|
||||
# (non)equality operators do. Specifically they do not allow
|
||||
# local versions to be specified nor do they allow the prefix
|
||||
# matching wild cards.
|
||||
(?<!==|!=|~=) # We have special cases for these
|
||||
# operators so we want to make sure they
|
||||
# don't match here.
|
||||
|
||||
\s*
|
||||
v?
|
||||
(?:[0-9]+!)? # epoch
|
||||
[0-9]+(?:\.[0-9]+)* # release
|
||||
(?: # pre release
|
||||
[-_\.]?
|
||||
(a|b|c|rc|alpha|beta|pre|preview)
|
||||
[-_\.]?
|
||||
[0-9]*
|
||||
)?
|
||||
(?: # post release
|
||||
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
||||
)?
|
||||
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
||||
)
|
||||
)
|
||||
"""
|
||||
|
||||
_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
_operators = {
|
||||
"~=": "compatible",
|
||||
"==": "equal",
|
||||
"!=": "not_equal",
|
||||
"<=": "less_than_equal",
|
||||
">=": "greater_than_equal",
|
||||
"<": "less_than",
|
||||
">": "greater_than",
|
||||
"===": "arbitrary",
|
||||
}
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool:
|
||||
|
||||
# Compatible releases have an equivalent combination of >= and ==. That
|
||||
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
|
||||
# implement this in terms of the other specifiers instead of
|
||||
# implementing it ourselves. The only thing we need to do is construct
|
||||
# the other specifiers.
|
||||
|
||||
# We want everything but the last item in the version, but we want to
|
||||
# ignore suffix segments.
|
||||
prefix = ".".join(
|
||||
list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
|
||||
)
|
||||
|
||||
# Add the prefix notation to the end of our string
|
||||
prefix += ".*"
|
||||
|
||||
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
|
||||
prospective, prefix
|
||||
)
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool:
|
||||
|
||||
# We need special logic to handle prefix matching
|
||||
if spec.endswith(".*"):
|
||||
# In the case of prefix matching we want to ignore local segment.
|
||||
prospective = Version(prospective.public)
|
||||
# Split the spec out by dots, and pretend that there is an implicit
|
||||
# dot in between a release segment and a pre-release segment.
|
||||
split_spec = _version_split(spec[:-2]) # Remove the trailing .*
|
||||
|
||||
# Split the prospective version out by dots, and pretend that there
|
||||
# is an implicit dot in between a release segment and a pre-release
|
||||
# segment.
|
||||
split_prospective = _version_split(str(prospective))
|
||||
|
||||
# Shorten the prospective version to be the same length as the spec
|
||||
# so that we can determine if the specifier is a prefix of the
|
||||
# prospective version or not.
|
||||
shortened_prospective = split_prospective[: len(split_spec)]
|
||||
|
||||
# Pad out our two sides with zeros so that they both equal the same
|
||||
# length.
|
||||
padded_spec, padded_prospective = _pad_version(
|
||||
split_spec, shortened_prospective
|
||||
)
|
||||
|
||||
return padded_prospective == padded_spec
|
||||
else:
|
||||
# Convert our spec string into a Version
|
||||
spec_version = Version(spec)
|
||||
|
||||
# If the specifier does not have a local segment, then we want to
|
||||
# act as if the prospective version also does not have a local
|
||||
# segment.
|
||||
if not spec_version.local:
|
||||
prospective = Version(prospective.public)
|
||||
|
||||
return prospective == spec_version
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool:
|
||||
return not self._compare_equal(prospective, spec)
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool:
|
||||
|
||||
# NB: Local version identifiers are NOT permitted in the version
|
||||
# specifier, so local version labels can be universally removed from
|
||||
# the prospective version.
|
||||
return Version(prospective.public) <= Version(spec)
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_greater_than_equal(
|
||||
self, prospective: ParsedVersion, spec: str
|
||||
) -> bool:
|
||||
|
||||
# NB: Local version identifiers are NOT permitted in the version
|
||||
# specifier, so local version labels can be universally removed from
|
||||
# the prospective version.
|
||||
return Version(prospective.public) >= Version(spec)
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
|
||||
|
||||
# Convert our spec to a Version instance, since we'll want to work with
|
||||
# it as a version.
|
||||
spec = Version(spec_str)
|
||||
|
||||
# Check to see if the prospective version is less than the spec
|
||||
# version. If it's not we can short circuit and just return False now
|
||||
# instead of doing extra unneeded work.
|
||||
if not prospective < spec:
|
||||
return False
|
||||
|
||||
# This special case is here so that, unless the specifier itself
|
||||
# includes is a pre-release version, that we do not accept pre-release
|
||||
# versions for the version mentioned in the specifier (e.g. <3.1 should
|
||||
# not match 3.1.dev0, but should match 3.0.dev0).
|
||||
if not spec.is_prerelease and prospective.is_prerelease:
|
||||
if Version(prospective.base_version) == Version(spec.base_version):
|
||||
return False
|
||||
|
||||
# If we've gotten to here, it means that prospective version is both
|
||||
# less than the spec version *and* it's not a pre-release of the same
|
||||
# version in the spec.
|
||||
return True
|
||||
|
||||
@_require_version_compare
|
||||
def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
|
||||
|
||||
# Convert our spec to a Version instance, since we'll want to work with
|
||||
# it as a version.
|
||||
spec = Version(spec_str)
|
||||
|
||||
# Check to see if the prospective version is greater than the spec
|
||||
# version. If it's not we can short circuit and just return False now
|
||||
# instead of doing extra unneeded work.
|
||||
if not prospective > spec:
|
||||
return False
|
||||
|
||||
# This special case is here so that, unless the specifier itself
|
||||
# includes is a post-release version, that we do not accept
|
||||
# post-release versions for the version mentioned in the specifier
|
||||
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
|
||||
if not spec.is_postrelease and prospective.is_postrelease:
|
||||
if Version(prospective.base_version) == Version(spec.base_version):
|
||||
return False
|
||||
|
||||
# Ensure that we do not allow a local version of the version mentioned
|
||||
# in the specifier, which is technically greater than, to match.
|
||||
if prospective.local is not None:
|
||||
if Version(prospective.base_version) == Version(spec.base_version):
|
||||
return False
|
||||
|
||||
# If we've gotten to here, it means that prospective version is both
|
||||
# greater than the spec version *and* it's not a pre-release of the
|
||||
# same version in the spec.
|
||||
return True
|
||||
|
||||
def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
|
||||
return str(prospective).lower() == str(spec).lower()
|
||||
|
||||
@property
|
||||
def prereleases(self) -> bool:
|
||||
|
||||
# If there is an explicit prereleases set for this, then we'll just
|
||||
# blindly use that.
|
||||
if self._prereleases is not None:
|
||||
return self._prereleases
|
||||
|
||||
# Look at all of our specifiers and determine if they are inclusive
|
||||
# operators, and if they are if they are including an explicit
|
||||
# prerelease.
|
||||
operator, version = self._spec
|
||||
if operator in ["==", ">=", "<=", "~=", "==="]:
|
||||
# The == specifier can include a trailing .*, if it does we
|
||||
# want to remove before parsing.
|
||||
if operator == "==" and version.endswith(".*"):
|
||||
version = version[:-2]
|
||||
|
||||
# Parse the version, and if it is a pre-release than this
|
||||
# specifier allows pre-releases.
|
||||
if parse(version).is_prerelease:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@prereleases.setter
|
||||
def prereleases(self, value: bool) -> None:
|
||||
self._prereleases = value
|
||||
|
||||
|
||||
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
||||
|
||||
|
||||
def _version_split(version: str) -> List[str]:
|
||||
result: List[str] = []
|
||||
for item in version.split("."):
|
||||
match = _prefix_regex.search(item)
|
||||
if match:
|
||||
result.extend(match.groups())
|
||||
else:
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
def _is_not_suffix(segment: str) -> bool:
|
||||
return not any(
|
||||
segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
|
||||
)
|
||||
|
||||
|
||||
def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
|
||||
left_split, right_split = [], []
|
||||
|
||||
# Get the release segment of our versions
|
||||
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
||||
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
||||
|
||||
# Get the rest of our versions
|
||||
left_split.append(left[len(left_split[0]) :])
|
||||
right_split.append(right[len(right_split[0]) :])
|
||||
|
||||
# Insert our padding
|
||||
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
|
||||
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
|
||||
|
||||
return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
|
||||
|
||||
|
||||
class SpecifierSet(BaseSpecifier):
|
||||
def __init__(
|
||||
self, specifiers: str = "", prereleases: Optional[bool] = None
|
||||
) -> None:
|
||||
|
||||
# Split on , to break each individual specifier into it's own item, and
|
||||
# strip each item to remove leading/trailing whitespace.
|
||||
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
|
||||
|
||||
# Parsed each individual specifier, attempting first to make it a
|
||||
# Specifier and falling back to a LegacySpecifier.
|
||||
parsed: Set[_IndividualSpecifier] = set()
|
||||
for specifier in split_specifiers:
|
||||
try:
|
||||
parsed.add(Specifier(specifier))
|
||||
except InvalidSpecifier:
|
||||
parsed.add(LegacySpecifier(specifier))
|
||||
|
||||
# Turn our parsed specifiers into a frozen set and save them for later.
|
||||
self._specs = frozenset(parsed)
|
||||
|
||||
# Store our prereleases value so we can use it later to determine if
|
||||
# we accept prereleases or not.
|
||||
self._prereleases = prereleases
|
||||
|
||||
def __repr__(self) -> str:
|
||||
pre = (
|
||||
f", prereleases={self.prereleases!r}"
|
||||
if self._prereleases is not None
|
||||
else ""
|
||||
)
|
||||
|
||||
return "<SpecifierSet({!r}{})>".format(str(self), pre)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return ",".join(sorted(str(s) for s in self._specs))
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self._specs)
|
||||
|
||||
def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
|
||||
if isinstance(other, str):
|
||||
other = SpecifierSet(other)
|
||||
elif not isinstance(other, SpecifierSet):
|
||||
return NotImplemented
|
||||
|
||||
specifier = SpecifierSet()
|
||||
specifier._specs = frozenset(self._specs | other._specs)
|
||||
|
||||
if self._prereleases is None and other._prereleases is not None:
|
||||
specifier._prereleases = other._prereleases
|
||||
elif self._prereleases is not None and other._prereleases is None:
|
||||
specifier._prereleases = self._prereleases
|
||||
elif self._prereleases == other._prereleases:
|
||||
specifier._prereleases = self._prereleases
|
||||
else:
|
||||
raise ValueError(
|
||||
"Cannot combine SpecifierSets with True and False prerelease "
|
||||
"overrides."
|
||||
)
|
||||
|
||||
return specifier
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, (str, _IndividualSpecifier)):
|
||||
other = SpecifierSet(str(other))
|
||||
elif not isinstance(other, SpecifierSet):
|
||||
return NotImplemented
|
||||
|
||||
return self._specs == other._specs
|
||||
|
||||
def __ne__(self, other: object) -> bool:
|
||||
if isinstance(other, (str, _IndividualSpecifier)):
|
||||
other = SpecifierSet(str(other))
|
||||
elif not isinstance(other, SpecifierSet):
|
||||
return NotImplemented
|
||||
|
||||
return self._specs != other._specs
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._specs)
|
||||
|
||||
def __iter__(self) -> Iterator[_IndividualSpecifier]:
|
||||
return iter(self._specs)
|
||||
|
||||
@property
|
||||
def prereleases(self) -> Optional[bool]:
|
||||
|
||||
# If we have been given an explicit prerelease modifier, then we'll
|
||||
# pass that through here.
|
||||
if self._prereleases is not None:
|
||||
return self._prereleases
|
||||
|
||||
# If we don't have any specifiers, and we don't have a forced value,
|
||||
# then we'll just return None since we don't know if this should have
|
||||
# pre-releases or not.
|
||||
if not self._specs:
|
||||
return None
|
||||
|
||||
# Otherwise we'll see if any of the given specifiers accept
|
||||
# prereleases, if any of them do we'll return True, otherwise False.
|
||||
return any(s.prereleases for s in self._specs)
|
||||
|
||||
@prereleases.setter
|
||||
def prereleases(self, value: bool) -> None:
|
||||
self._prereleases = value
|
||||
|
||||
def __contains__(self, item: UnparsedVersion) -> bool:
|
||||
return self.contains(item)
|
||||
|
||||
def contains(
|
||||
self, item: UnparsedVersion, prereleases: Optional[bool] = None
|
||||
) -> bool:
|
||||
|
||||
# Ensure that our item is a Version or LegacyVersion instance.
|
||||
if not isinstance(item, (LegacyVersion, Version)):
|
||||
item = parse(item)
|
||||
|
||||
# Determine if we're forcing a prerelease or not, if we're not forcing
|
||||
# one for this particular filter call, then we'll use whatever the
|
||||
# SpecifierSet thinks for whether or not we should support prereleases.
|
||||
if prereleases is None:
|
||||
prereleases = self.prereleases
|
||||
|
||||
# We can determine if we're going to allow pre-releases by looking to
|
||||
# see if any of the underlying items supports them. If none of them do
|
||||
# and this item is a pre-release then we do not allow it and we can
|
||||
# short circuit that here.
|
||||
# Note: This means that 1.0.dev1 would not be contained in something
|
||||
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
|
||||
if not prereleases and item.is_prerelease:
|
||||
return False
|
||||
|
||||
# We simply dispatch to the underlying specs here to make sure that the
|
||||
# given version is contained within all of them.
|
||||
# Note: This use of all() here means that an empty set of specifiers
|
||||
# will always return True, this is an explicit design decision.
|
||||
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
|
||||
|
||||
def filter(
|
||||
self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
|
||||
) -> Iterable[VersionTypeVar]:
|
||||
|
||||
# Determine if we're forcing a prerelease or not, if we're not forcing
|
||||
# one for this particular filter call, then we'll use whatever the
|
||||
# SpecifierSet thinks for whether or not we should support prereleases.
|
||||
if prereleases is None:
|
||||
prereleases = self.prereleases
|
||||
|
||||
# If we have any specifiers, then we want to wrap our iterable in the
|
||||
# filter method for each one, this will act as a logical AND amongst
|
||||
# each specifier.
|
||||
if self._specs:
|
||||
for spec in self._specs:
|
||||
iterable = spec.filter(iterable, prereleases=bool(prereleases))
|
||||
return iterable
|
||||
# If we do not have any specifiers, then we need to have a rough filter
|
||||
# which will filter out any pre-releases, unless there are no final
|
||||
# releases, and which will filter out LegacyVersion in general.
|
||||
else:
|
||||
filtered: List[VersionTypeVar] = []
|
||||
found_prereleases: List[VersionTypeVar] = []
|
||||
|
||||
item: UnparsedVersion
|
||||
parsed_version: Union[Version, LegacyVersion]
|
||||
|
||||
for item in iterable:
|
||||
# Ensure that we some kind of Version class for this item.
|
||||
if not isinstance(item, (LegacyVersion, Version)):
|
||||
parsed_version = parse(item)
|
||||
else:
|
||||
parsed_version = item
|
||||
|
||||
# Filter out any item which is parsed as a LegacyVersion
|
||||
if isinstance(parsed_version, LegacyVersion):
|
||||
continue
|
||||
|
||||
# Store any item which is a pre-release for later unless we've
|
||||
# already found a final version or we are accepting prereleases
|
||||
if parsed_version.is_prerelease and not prereleases:
|
||||
if not filtered:
|
||||
found_prereleases.append(item)
|
||||
else:
|
||||
filtered.append(item)
|
||||
|
||||
# If we've found no items except for pre-releases, then we'll go
|
||||
# ahead and use the pre-releases
|
||||
if not filtered and found_prereleases and prereleases is None:
|
||||
return found_prereleases
|
||||
|
||||
return filtered
|
@ -0,0 +1,484 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import logging
|
||||
import platform
|
||||
import sys
|
||||
import sysconfig
|
||||
from importlib.machinery import EXTENSION_SUFFIXES
|
||||
from typing import (
|
||||
Dict,
|
||||
FrozenSet,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from . import _manylinux, _musllinux
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
PythonVersion = Sequence[int]
|
||||
MacVersion = Tuple[int, int]
|
||||
|
||||
INTERPRETER_SHORT_NAMES: Dict[str, str] = {
|
||||
"python": "py", # Generic.
|
||||
"cpython": "cp",
|
||||
"pypy": "pp",
|
||||
"ironpython": "ip",
|
||||
"jython": "jy",
|
||||
}
|
||||
|
||||
|
||||
_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
|
||||
|
||||
|
||||
class Tag:
|
||||
"""
|
||||
A representation of the tag triple for a wheel.
|
||||
|
||||
Instances are considered immutable and thus are hashable. Equality checking
|
||||
is also supported.
|
||||
"""
|
||||
|
||||
__slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
|
||||
|
||||
def __init__(self, interpreter: str, abi: str, platform: str) -> None:
|
||||
self._interpreter = interpreter.lower()
|
||||
self._abi = abi.lower()
|
||||
self._platform = platform.lower()
|
||||
# The __hash__ of every single element in a Set[Tag] will be evaluated each time
|
||||
# that a set calls its `.disjoint()` method, which may be called hundreds of
|
||||
# times when scanning a page of links for packages with tags matching that
|
||||
# Set[Tag]. Pre-computing the value here produces significant speedups for
|
||||
# downstream consumers.
|
||||
self._hash = hash((self._interpreter, self._abi, self._platform))
|
||||
|
||||
@property
|
||||
def interpreter(self) -> str:
|
||||
return self._interpreter
|
||||
|
||||
@property
|
||||
def abi(self) -> str:
|
||||
return self._abi
|
||||
|
||||
@property
|
||||
def platform(self) -> str:
|
||||
return self._platform
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, Tag):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
(self._hash == other._hash) # Short-circuit ASAP for perf reasons.
|
||||
and (self._platform == other._platform)
|
||||
and (self._abi == other._abi)
|
||||
and (self._interpreter == other._interpreter)
|
||||
)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return self._hash
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self._interpreter}-{self._abi}-{self._platform}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<{self} @ {self_id}>".format(self=self, self_id=id(self))
|
||||
|
||||
|
||||
def parse_tag(tag: str) -> FrozenSet[Tag]:
|
||||
"""
|
||||
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
|
||||
|
||||
Returning a set is required due to the possibility that the tag is a
|
||||
compressed tag set.
|
||||
"""
|
||||
tags = set()
|
||||
interpreters, abis, platforms = tag.split("-")
|
||||
for interpreter in interpreters.split("."):
|
||||
for abi in abis.split("."):
|
||||
for platform_ in platforms.split("."):
|
||||
tags.add(Tag(interpreter, abi, platform_))
|
||||
return frozenset(tags)
|
||||
|
||||
|
||||
def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
|
||||
value = sysconfig.get_config_var(name)
|
||||
if value is None and warn:
|
||||
logger.debug(
|
||||
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def _normalize_string(string: str) -> str:
|
||||
return string.replace(".", "_").replace("-", "_")
|
||||
|
||||
|
||||
def _abi3_applies(python_version: PythonVersion) -> bool:
|
||||
"""
|
||||
Determine if the Python version supports abi3.
|
||||
|
||||
PEP 384 was first implemented in Python 3.2.
|
||||
"""
|
||||
return len(python_version) > 1 and tuple(python_version) >= (3, 2)
|
||||
|
||||
|
||||
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
|
||||
py_version = tuple(py_version) # To allow for version comparison.
|
||||
abis = []
|
||||
version = _version_nodot(py_version[:2])
|
||||
debug = pymalloc = ucs4 = ""
|
||||
with_debug = _get_config_var("Py_DEBUG", warn)
|
||||
has_refcount = hasattr(sys, "gettotalrefcount")
|
||||
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
|
||||
# extension modules is the best option.
|
||||
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
|
||||
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
|
||||
if with_debug or (with_debug is None and (has_refcount or has_ext)):
|
||||
debug = "d"
|
||||
if py_version < (3, 8):
|
||||
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
|
||||
if with_pymalloc or with_pymalloc is None:
|
||||
pymalloc = "m"
|
||||
if py_version < (3, 3):
|
||||
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
|
||||
if unicode_size == 4 or (
|
||||
unicode_size is None and sys.maxunicode == 0x10FFFF
|
||||
):
|
||||
ucs4 = "u"
|
||||
elif debug:
|
||||
# Debug builds can also load "normal" extension modules.
|
||||
# We can also assume no UCS-4 or pymalloc requirement.
|
||||
abis.append(f"cp{version}")
|
||||
abis.insert(
|
||||
0,
|
||||
"cp{version}{debug}{pymalloc}{ucs4}".format(
|
||||
version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
|
||||
),
|
||||
)
|
||||
return abis
|
||||
|
||||
|
||||
def cpython_tags(
|
||||
python_version: Optional[PythonVersion] = None,
|
||||
abis: Optional[Iterable[str]] = None,
|
||||
platforms: Optional[Iterable[str]] = None,
|
||||
*,
|
||||
warn: bool = False,
|
||||
) -> Iterator[Tag]:
|
||||
"""
|
||||
Yields the tags for a CPython interpreter.
|
||||
|
||||
The tags consist of:
|
||||
- cp<python_version>-<abi>-<platform>
|
||||
- cp<python_version>-abi3-<platform>
|
||||
- cp<python_version>-none-<platform>
|
||||
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
|
||||
|
||||
If python_version only specifies a major version then user-provided ABIs and
|
||||
the 'none' ABItag will be used.
|
||||
|
||||
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
|
||||
their normal position and not at the beginning.
|
||||
"""
|
||||
if not python_version:
|
||||
python_version = sys.version_info[:2]
|
||||
|
||||
interpreter = "cp{}".format(_version_nodot(python_version[:2]))
|
||||
|
||||
if abis is None:
|
||||
if len(python_version) > 1:
|
||||
abis = _cpython_abis(python_version, warn)
|
||||
else:
|
||||
abis = []
|
||||
abis = list(abis)
|
||||
# 'abi3' and 'none' are explicitly handled later.
|
||||
for explicit_abi in ("abi3", "none"):
|
||||
try:
|
||||
abis.remove(explicit_abi)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
platforms = list(platforms or platform_tags())
|
||||
for abi in abis:
|
||||
for platform_ in platforms:
|
||||
yield Tag(interpreter, abi, platform_)
|
||||
if _abi3_applies(python_version):
|
||||
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
|
||||
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
|
||||
|
||||
if _abi3_applies(python_version):
|
||||
for minor_version in range(python_version[1] - 1, 1, -1):
|
||||
for platform_ in platforms:
|
||||
interpreter = "cp{version}".format(
|
||||
version=_version_nodot((python_version[0], minor_version))
|
||||
)
|
||||
yield Tag(interpreter, "abi3", platform_)
|
||||
|
||||
|
||||
def _generic_abi() -> Iterator[str]:
|
||||
abi = sysconfig.get_config_var("SOABI")
|
||||
if abi:
|
||||
yield _normalize_string(abi)
|
||||
|
||||
|
||||
def generic_tags(
|
||||
interpreter: Optional[str] = None,
|
||||
abis: Optional[Iterable[str]] = None,
|
||||
platforms: Optional[Iterable[str]] = None,
|
||||
*,
|
||||
warn: bool = False,
|
||||
) -> Iterator[Tag]:
|
||||
"""
|
||||
Yields the tags for a generic interpreter.
|
||||
|
||||
The tags consist of:
|
||||
- <interpreter>-<abi>-<platform>
|
||||
|
||||
The "none" ABI will be added if it was not explicitly provided.
|
||||
"""
|
||||
if not interpreter:
|
||||
interp_name = interpreter_name()
|
||||
interp_version = interpreter_version(warn=warn)
|
||||
interpreter = "".join([interp_name, interp_version])
|
||||
if abis is None:
|
||||
abis = _generic_abi()
|
||||
platforms = list(platforms or platform_tags())
|
||||
abis = list(abis)
|
||||
if "none" not in abis:
|
||||
abis.append("none")
|
||||
for abi in abis:
|
||||
for platform_ in platforms:
|
||||
yield Tag(interpreter, abi, platform_)
|
||||
|
||||
|
||||
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
|
||||
"""
|
||||
Yields Python versions in descending order.
|
||||
|
||||
After the latest version, the major-only version will be yielded, and then
|
||||
all previous versions of that major version.
|
||||
"""
|
||||
if len(py_version) > 1:
|
||||
yield "py{version}".format(version=_version_nodot(py_version[:2]))
|
||||
yield "py{major}".format(major=py_version[0])
|
||||
if len(py_version) > 1:
|
||||
for minor in range(py_version[1] - 1, -1, -1):
|
||||
yield "py{version}".format(version=_version_nodot((py_version[0], minor)))
|
||||
|
||||
|
||||
def compatible_tags(
|
||||
python_version: Optional[PythonVersion] = None,
|
||||
interpreter: Optional[str] = None,
|
||||
platforms: Optional[Iterable[str]] = None,
|
||||
) -> Iterator[Tag]:
|
||||
"""
|
||||
Yields the sequence of tags that are compatible with a specific version of Python.
|
||||
|
||||
The tags consist of:
|
||||
- py*-none-<platform>
|
||||
- <interpreter>-none-any # ... if `interpreter` is provided.
|
||||
- py*-none-any
|
||||
"""
|
||||
if not python_version:
|
||||
python_version = sys.version_info[:2]
|
||||
platforms = list(platforms or platform_tags())
|
||||
for version in _py_interpreter_range(python_version):
|
||||
for platform_ in platforms:
|
||||
yield Tag(version, "none", platform_)
|
||||
if interpreter:
|
||||
yield Tag(interpreter, "none", "any")
|
||||
for version in _py_interpreter_range(python_version):
|
||||
yield Tag(version, "none", "any")
|
||||
|
||||
|
||||
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
|
||||
if not is_32bit:
|
||||
return arch
|
||||
|
||||
if arch.startswith("ppc"):
|
||||
return "ppc"
|
||||
|
||||
return "i386"
|
||||
|
||||
|
||||
def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
|
||||
formats = [cpu_arch]
|
||||
if cpu_arch == "x86_64":
|
||||
if version < (10, 4):
|
||||
return []
|
||||
formats.extend(["intel", "fat64", "fat32"])
|
||||
|
||||
elif cpu_arch == "i386":
|
||||
if version < (10, 4):
|
||||
return []
|
||||
formats.extend(["intel", "fat32", "fat"])
|
||||
|
||||
elif cpu_arch == "ppc64":
|
||||
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
|
||||
if version > (10, 5) or version < (10, 4):
|
||||
return []
|
||||
formats.append("fat64")
|
||||
|
||||
elif cpu_arch == "ppc":
|
||||
if version > (10, 6):
|
||||
return []
|
||||
formats.extend(["fat32", "fat"])
|
||||
|
||||
if cpu_arch in {"arm64", "x86_64"}:
|
||||
formats.append("universal2")
|
||||
|
||||
if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
|
||||
formats.append("universal")
|
||||
|
||||
return formats
|
||||
|
||||
|
||||
def mac_platforms(
|
||||
version: Optional[MacVersion] = None, arch: Optional[str] = None
|
||||
) -> Iterator[str]:
|
||||
"""
|
||||
Yields the platform tags for a macOS system.
|
||||
|
||||
The `version` parameter is a two-item tuple specifying the macOS version to
|
||||
generate platform tags for. The `arch` parameter is the CPU architecture to
|
||||
generate platform tags for. Both parameters default to the appropriate value
|
||||
for the current system.
|
||||
"""
|
||||
version_str, _, cpu_arch = platform.mac_ver()
|
||||
if version is None:
|
||||
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
||||
else:
|
||||
version = version
|
||||
if arch is None:
|
||||
arch = _mac_arch(cpu_arch)
|
||||
else:
|
||||
arch = arch
|
||||
|
||||
if (10, 0) <= version and version < (11, 0):
|
||||
# Prior to Mac OS 11, each yearly release of Mac OS bumped the
|
||||
# "minor" version number. The major version was always 10.
|
||||
for minor_version in range(version[1], -1, -1):
|
||||
compat_version = 10, minor_version
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=10, minor=minor_version, binary_format=binary_format
|
||||
)
|
||||
|
||||
if version >= (11, 0):
|
||||
# Starting with Mac OS 11, each yearly release bumps the major version
|
||||
# number. The minor versions are now the midyear updates.
|
||||
for major_version in range(version[0], 10, -1):
|
||||
compat_version = major_version, 0
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=major_version, minor=0, binary_format=binary_format
|
||||
)
|
||||
|
||||
if version >= (11, 0):
|
||||
# Mac OS 11 on x86_64 is compatible with binaries from previous releases.
|
||||
# Arm64 support was introduced in 11.0, so no Arm binaries from previous
|
||||
# releases exist.
|
||||
#
|
||||
# However, the "universal2" binary format can have a
|
||||
# macOS version earlier than 11.0 when the x86_64 part of the binary supports
|
||||
# that version of macOS.
|
||||
if arch == "x86_64":
|
||||
for minor_version in range(16, 3, -1):
|
||||
compat_version = 10, minor_version
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=compat_version[0],
|
||||
minor=compat_version[1],
|
||||
binary_format=binary_format,
|
||||
)
|
||||
else:
|
||||
for minor_version in range(16, 3, -1):
|
||||
compat_version = 10, minor_version
|
||||
binary_format = "universal2"
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=compat_version[0],
|
||||
minor=compat_version[1],
|
||||
binary_format=binary_format,
|
||||
)
|
||||
|
||||
|
||||
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
|
||||
linux = _normalize_string(sysconfig.get_platform())
|
||||
if is_32bit:
|
||||
if linux == "linux_x86_64":
|
||||
linux = "linux_i686"
|
||||
elif linux == "linux_aarch64":
|
||||
linux = "linux_armv7l"
|
||||
_, arch = linux.split("_", 1)
|
||||
yield from _manylinux.platform_tags(linux, arch)
|
||||
yield from _musllinux.platform_tags(arch)
|
||||
yield linux
|
||||
|
||||
|
||||
def _generic_platforms() -> Iterator[str]:
|
||||
yield _normalize_string(sysconfig.get_platform())
|
||||
|
||||
|
||||
def platform_tags() -> Iterator[str]:
|
||||
"""
|
||||
Provides the platform tags for this installation.
|
||||
"""
|
||||
if platform.system() == "Darwin":
|
||||
return mac_platforms()
|
||||
elif platform.system() == "Linux":
|
||||
return _linux_platforms()
|
||||
else:
|
||||
return _generic_platforms()
|
||||
|
||||
|
||||
def interpreter_name() -> str:
|
||||
"""
|
||||
Returns the name of the running interpreter.
|
||||
"""
|
||||
name = sys.implementation.name
|
||||
return INTERPRETER_SHORT_NAMES.get(name) or name
|
||||
|
||||
|
||||
def interpreter_version(*, warn: bool = False) -> str:
|
||||
"""
|
||||
Returns the version of the running interpreter.
|
||||
"""
|
||||
version = _get_config_var("py_version_nodot", warn=warn)
|
||||
if version:
|
||||
version = str(version)
|
||||
else:
|
||||
version = _version_nodot(sys.version_info[:2])
|
||||
return version
|
||||
|
||||
|
||||
def _version_nodot(version: PythonVersion) -> str:
|
||||
return "".join(map(str, version))
|
||||
|
||||
|
||||
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
|
||||
"""
|
||||
Returns the sequence of tag triples for the running interpreter.
|
||||
|
||||
The order of the sequence corresponds to priority order for the
|
||||
interpreter, from most to least important.
|
||||
"""
|
||||
|
||||
interp_name = interpreter_name()
|
||||
if interp_name == "cp":
|
||||
yield from cpython_tags(warn=warn)
|
||||
else:
|
||||
yield from generic_tags()
|
||||
|
||||
yield from compatible_tags()
|
@ -0,0 +1,136 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import re
|
||||
from typing import FrozenSet, NewType, Tuple, Union, cast
|
||||
|
||||
from .tags import Tag, parse_tag
|
||||
from .version import InvalidVersion, Version
|
||||
|
||||
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
||||
NormalizedName = NewType("NormalizedName", str)
|
||||
|
||||
|
||||
class InvalidWheelFilename(ValueError):
|
||||
"""
|
||||
An invalid wheel filename was found, users should refer to PEP 427.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidSdistFilename(ValueError):
|
||||
"""
|
||||
An invalid sdist filename was found, users should refer to the packaging user guide.
|
||||
"""
|
||||
|
||||
|
||||
_canonicalize_regex = re.compile(r"[-_.]+")
|
||||
# PEP 427: The build number must start with a digit.
|
||||
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
||||
|
||||
|
||||
def canonicalize_name(name: str) -> NormalizedName:
|
||||
# This is taken from PEP 503.
|
||||
value = _canonicalize_regex.sub("-", name).lower()
|
||||
return cast(NormalizedName, value)
|
||||
|
||||
|
||||
def canonicalize_version(version: Union[Version, str]) -> str:
|
||||
"""
|
||||
This is very similar to Version.__str__, but has one subtle difference
|
||||
with the way it handles the release segment.
|
||||
"""
|
||||
if isinstance(version, str):
|
||||
try:
|
||||
parsed = Version(version)
|
||||
except InvalidVersion:
|
||||
# Legacy versions cannot be normalized
|
||||
return version
|
||||
else:
|
||||
parsed = version
|
||||
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if parsed.epoch != 0:
|
||||
parts.append(f"{parsed.epoch}!")
|
||||
|
||||
# Release segment
|
||||
# NB: This strips trailing '.0's to normalize
|
||||
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release)))
|
||||
|
||||
# Pre-release
|
||||
if parsed.pre is not None:
|
||||
parts.append("".join(str(x) for x in parsed.pre))
|
||||
|
||||
# Post-release
|
||||
if parsed.post is not None:
|
||||
parts.append(f".post{parsed.post}")
|
||||
|
||||
# Development release
|
||||
if parsed.dev is not None:
|
||||
parts.append(f".dev{parsed.dev}")
|
||||
|
||||
# Local version segment
|
||||
if parsed.local is not None:
|
||||
parts.append(f"+{parsed.local}")
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
|
||||
def parse_wheel_filename(
|
||||
filename: str,
|
||||
) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
|
||||
if not filename.endswith(".whl"):
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid wheel filename (extension must be '.whl'): {filename}"
|
||||
)
|
||||
|
||||
filename = filename[:-4]
|
||||
dashes = filename.count("-")
|
||||
if dashes not in (4, 5):
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid wheel filename (wrong number of parts): {filename}"
|
||||
)
|
||||
|
||||
parts = filename.split("-", dashes - 2)
|
||||
name_part = parts[0]
|
||||
# See PEP 427 for the rules on escaping the project name
|
||||
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
||||
raise InvalidWheelFilename(f"Invalid project name: {filename}")
|
||||
name = canonicalize_name(name_part)
|
||||
version = Version(parts[1])
|
||||
if dashes == 5:
|
||||
build_part = parts[2]
|
||||
build_match = _build_tag_regex.match(build_part)
|
||||
if build_match is None:
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid build number: {build_part} in '{filename}'"
|
||||
)
|
||||
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
|
||||
else:
|
||||
build = ()
|
||||
tags = parse_tag(parts[-1])
|
||||
return (name, version, build, tags)
|
||||
|
||||
|
||||
def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
|
||||
if filename.endswith(".tar.gz"):
|
||||
file_stem = filename[: -len(".tar.gz")]
|
||||
elif filename.endswith(".zip"):
|
||||
file_stem = filename[: -len(".zip")]
|
||||
else:
|
||||
raise InvalidSdistFilename(
|
||||
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
|
||||
f" {filename}"
|
||||
)
|
||||
|
||||
# We are requiring a PEP 440 version, which cannot contain dashes,
|
||||
# so we split on the last dash.
|
||||
name_part, sep, version_part = file_stem.rpartition("-")
|
||||
if not sep:
|
||||
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
|
||||
|
||||
name = canonicalize_name(name_part)
|
||||
version = Version(version_part)
|
||||
return (name, version)
|
@ -0,0 +1,504 @@
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import re
|
||||
import warnings
|
||||
from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
|
||||
|
||||
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
|
||||
|
||||
__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
|
||||
|
||||
InfiniteTypes = Union[InfinityType, NegativeInfinityType]
|
||||
PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
|
||||
SubLocalType = Union[InfiniteTypes, int, str]
|
||||
LocalType = Union[
|
||||
NegativeInfinityType,
|
||||
Tuple[
|
||||
Union[
|
||||
SubLocalType,
|
||||
Tuple[SubLocalType, str],
|
||||
Tuple[NegativeInfinityType, SubLocalType],
|
||||
],
|
||||
...,
|
||||
],
|
||||
]
|
||||
CmpKey = Tuple[
|
||||
int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
|
||||
]
|
||||
LegacyCmpKey = Tuple[int, Tuple[str, ...]]
|
||||
VersionComparisonMethod = Callable[
|
||||
[Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
|
||||
]
|
||||
|
||||
_Version = collections.namedtuple(
|
||||
"_Version", ["epoch", "release", "dev", "pre", "post", "local"]
|
||||
)
|
||||
|
||||
|
||||
def parse(version: str) -> Union["LegacyVersion", "Version"]:
|
||||
"""
|
||||
Parse the given version string and return either a :class:`Version` object
|
||||
or a :class:`LegacyVersion` object depending on if the given version is
|
||||
a valid PEP 440 version or a legacy version.
|
||||
"""
|
||||
try:
|
||||
return Version(version)
|
||||
except InvalidVersion:
|
||||
return LegacyVersion(version)
|
||||
|
||||
|
||||
class InvalidVersion(ValueError):
|
||||
"""
|
||||
An invalid version was found, users should refer to PEP 440.
|
||||
"""
|
||||
|
||||
|
||||
class _BaseVersion:
|
||||
_key: Union[CmpKey, LegacyCmpKey]
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self._key)
|
||||
|
||||
# Please keep the duplicated `isinstance` check
|
||||
# in the six comparisons hereunder
|
||||
# unless you find a way to avoid adding overhead function calls.
|
||||
def __lt__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key < other._key
|
||||
|
||||
def __le__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key <= other._key
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key == other._key
|
||||
|
||||
def __ge__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key >= other._key
|
||||
|
||||
def __gt__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key > other._key
|
||||
|
||||
def __ne__(self, other: object) -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key != other._key
|
||||
|
||||
|
||||
class LegacyVersion(_BaseVersion):
|
||||
def __init__(self, version: str) -> None:
|
||||
self._version = str(version)
|
||||
self._key = _legacy_cmpkey(self._version)
|
||||
|
||||
warnings.warn(
|
||||
"Creating a LegacyVersion has been deprecated and will be "
|
||||
"removed in the next major release",
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self._version
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<LegacyVersion('{self}')>"
|
||||
|
||||
@property
|
||||
def public(self) -> str:
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def base_version(self) -> str:
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def epoch(self) -> int:
|
||||
return -1
|
||||
|
||||
@property
|
||||
def release(self) -> None:
|
||||
return None
|
||||
|
||||
@property
|
||||
def pre(self) -> None:
|
||||
return None
|
||||
|
||||
@property
|
||||
def post(self) -> None:
|
||||
return None
|
||||
|
||||
@property
|
||||
def dev(self) -> None:
|
||||
return None
|
||||
|
||||
@property
|
||||
def local(self) -> None:
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_prerelease(self) -> bool:
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_postrelease(self) -> bool:
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_devrelease(self) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
|
||||
|
||||
_legacy_version_replacement_map = {
|
||||
"pre": "c",
|
||||
"preview": "c",
|
||||
"-": "final-",
|
||||
"rc": "c",
|
||||
"dev": "@",
|
||||
}
|
||||
|
||||
|
||||
def _parse_version_parts(s: str) -> Iterator[str]:
|
||||
for part in _legacy_version_component_re.split(s):
|
||||
part = _legacy_version_replacement_map.get(part, part)
|
||||
|
||||
if not part or part == ".":
|
||||
continue
|
||||
|
||||
if part[:1] in "0123456789":
|
||||
# pad for numeric comparison
|
||||
yield part.zfill(8)
|
||||
else:
|
||||
yield "*" + part
|
||||
|
||||
# ensure that alpha/beta/candidate are before final
|
||||
yield "*final"
|
||||
|
||||
|
||||
def _legacy_cmpkey(version: str) -> LegacyCmpKey:
|
||||
|
||||
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
|
||||
# greater than or equal to 0. This will effectively put the LegacyVersion,
|
||||
# which uses the defacto standard originally implemented by setuptools,
|
||||
# as before all PEP 440 versions.
|
||||
epoch = -1
|
||||
|
||||
# This scheme is taken from pkg_resources.parse_version setuptools prior to
|
||||
# it's adoption of the packaging library.
|
||||
parts: List[str] = []
|
||||
for part in _parse_version_parts(version.lower()):
|
||||
if part.startswith("*"):
|
||||
# remove "-" before a prerelease tag
|
||||
if part < "*final":
|
||||
while parts and parts[-1] == "*final-":
|
||||
parts.pop()
|
||||
|
||||
# remove trailing zeros from each series of numeric parts
|
||||
while parts and parts[-1] == "00000000":
|
||||
parts.pop()
|
||||
|
||||
parts.append(part)
|
||||
|
||||
return epoch, tuple(parts)
|
||||
|
||||
|
||||
# Deliberately not anchored to the start and end of the string, to make it
|
||||
# easier for 3rd party code to reuse
|
||||
VERSION_PATTERN = r"""
|
||||
v?
|
||||
(?:
|
||||
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
(?P<post> # post release
|
||||
(?:-(?P<post_n1>[0-9]+))
|
||||
|
|
||||
(?:
|
||||
[-_\.]?
|
||||
(?P<post_l>post|rev|r)
|
||||
[-_\.]?
|
||||
(?P<post_n2>[0-9]+)?
|
||||
)
|
||||
)?
|
||||
(?P<dev> # dev release
|
||||
[-_\.]?
|
||||
(?P<dev_l>dev)
|
||||
[-_\.]?
|
||||
(?P<dev_n>[0-9]+)?
|
||||
)?
|
||||
)
|
||||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||
"""
|
||||
|
||||
|
||||
class Version(_BaseVersion):
|
||||
|
||||
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
def __init__(self, version: str) -> None:
|
||||
|
||||
# Validate the version and parse it into pieces
|
||||
match = self._regex.search(version)
|
||||
if not match:
|
||||
raise InvalidVersion(f"Invalid version: '{version}'")
|
||||
|
||||
# Store the parsed out pieces of the version
|
||||
self._version = _Version(
|
||||
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
||||
release=tuple(int(i) for i in match.group("release").split(".")),
|
||||
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
||||
post=_parse_letter_version(
|
||||
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
||||
),
|
||||
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
||||
local=_parse_local_version(match.group("local")),
|
||||
)
|
||||
|
||||
# Generate a key which will be used for sorting
|
||||
self._key = _cmpkey(
|
||||
self._version.epoch,
|
||||
self._version.release,
|
||||
self._version.pre,
|
||||
self._version.post,
|
||||
self._version.dev,
|
||||
self._version.local,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Version('{self}')>"
|
||||
|
||||
def __str__(self) -> str:
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append(f"{self.epoch}!")
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
# Pre-release
|
||||
if self.pre is not None:
|
||||
parts.append("".join(str(x) for x in self.pre))
|
||||
|
||||
# Post-release
|
||||
if self.post is not None:
|
||||
parts.append(f".post{self.post}")
|
||||
|
||||
# Development release
|
||||
if self.dev is not None:
|
||||
parts.append(f".dev{self.dev}")
|
||||
|
||||
# Local version segment
|
||||
if self.local is not None:
|
||||
parts.append(f"+{self.local}")
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def epoch(self) -> int:
|
||||
_epoch: int = self._version.epoch
|
||||
return _epoch
|
||||
|
||||
@property
|
||||
def release(self) -> Tuple[int, ...]:
|
||||
_release: Tuple[int, ...] = self._version.release
|
||||
return _release
|
||||
|
||||
@property
|
||||
def pre(self) -> Optional[Tuple[str, int]]:
|
||||
_pre: Optional[Tuple[str, int]] = self._version.pre
|
||||
return _pre
|
||||
|
||||
@property
|
||||
def post(self) -> Optional[int]:
|
||||
return self._version.post[1] if self._version.post else None
|
||||
|
||||
@property
|
||||
def dev(self) -> Optional[int]:
|
||||
return self._version.dev[1] if self._version.dev else None
|
||||
|
||||
@property
|
||||
def local(self) -> Optional[str]:
|
||||
if self._version.local:
|
||||
return ".".join(str(x) for x in self._version.local)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def public(self) -> str:
|
||||
return str(self).split("+", 1)[0]
|
||||
|
||||
@property
|
||||
def base_version(self) -> str:
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append(f"{self.epoch}!")
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def is_prerelease(self) -> bool:
|
||||
return self.dev is not None or self.pre is not None
|
||||
|
||||
@property
|
||||
def is_postrelease(self) -> bool:
|
||||
return self.post is not None
|
||||
|
||||
@property
|
||||
def is_devrelease(self) -> bool:
|
||||
return self.dev is not None
|
||||
|
||||
@property
|
||||
def major(self) -> int:
|
||||
return self.release[0] if len(self.release) >= 1 else 0
|
||||
|
||||
@property
|
||||
def minor(self) -> int:
|
||||
return self.release[1] if len(self.release) >= 2 else 0
|
||||
|
||||
@property
|
||||
def micro(self) -> int:
|
||||
return self.release[2] if len(self.release) >= 3 else 0
|
||||
|
||||
|
||||
def _parse_letter_version(
|
||||
letter: str, number: Union[str, bytes, SupportsInt]
|
||||
) -> Optional[Tuple[str, int]]:
|
||||
|
||||
if letter:
|
||||
# We consider there to be an implicit 0 in a pre-release if there is
|
||||
# not a numeral associated with it.
|
||||
if number is None:
|
||||
number = 0
|
||||
|
||||
# We normalize any letters to their lower case form
|
||||
letter = letter.lower()
|
||||
|
||||
# We consider some words to be alternate spellings of other words and
|
||||
# in those cases we want to normalize the spellings to our preferred
|
||||
# spelling.
|
||||
if letter == "alpha":
|
||||
letter = "a"
|
||||
elif letter == "beta":
|
||||
letter = "b"
|
||||
elif letter in ["c", "pre", "preview"]:
|
||||
letter = "rc"
|
||||
elif letter in ["rev", "r"]:
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
if not letter and number:
|
||||
# We assume if we are given a number, but we are not given a letter
|
||||
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
_local_version_separators = re.compile(r"[\._-]")
|
||||
|
||||
|
||||
def _parse_local_version(local: str) -> Optional[LocalType]:
|
||||
"""
|
||||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||
"""
|
||||
if local is not None:
|
||||
return tuple(
|
||||
part.lower() if not part.isdigit() else int(part)
|
||||
for part in _local_version_separators.split(local)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def _cmpkey(
|
||||
epoch: int,
|
||||
release: Tuple[int, ...],
|
||||
pre: Optional[Tuple[str, int]],
|
||||
post: Optional[Tuple[str, int]],
|
||||
dev: Optional[Tuple[str, int]],
|
||||
local: Optional[Tuple[SubLocalType]],
|
||||
) -> CmpKey:
|
||||
|
||||
# When we compare a release version, we want to compare it with all of the
|
||||
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
||||
# leading zeros until we come to something non zero, then take the rest
|
||||
# re-reverse it back into the correct order and make it a tuple and use
|
||||
# that for our sorting key.
|
||||
_release = tuple(
|
||||
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
||||
)
|
||||
|
||||
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
||||
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
||||
# if there is not a pre or a post segment. If we have one of those then
|
||||
# the normal sorting rules will handle this case correctly.
|
||||
if pre is None and post is None and dev is not None:
|
||||
_pre: PrePostDevType = NegativeInfinity
|
||||
# Versions without a pre-release (except as noted above) should sort after
|
||||
# those with one.
|
||||
elif pre is None:
|
||||
_pre = Infinity
|
||||
else:
|
||||
_pre = pre
|
||||
|
||||
# Versions without a post segment should sort before those with one.
|
||||
if post is None:
|
||||
_post: PrePostDevType = NegativeInfinity
|
||||
|
||||
else:
|
||||
_post = post
|
||||
|
||||
# Versions without a development segment should sort after those with one.
|
||||
if dev is None:
|
||||
_dev: PrePostDevType = Infinity
|
||||
|
||||
else:
|
||||
_dev = dev
|
||||
|
||||
if local is None:
|
||||
# Versions without a local segment should sort before those with one.
|
||||
_local: LocalType = NegativeInfinity
|
||||
else:
|
||||
# Versions with a local segment need that segment parsed to implement
|
||||
# the sorting rules in PEP440.
|
||||
# - Alpha numeric segments sort before numeric segments
|
||||
# - Alpha numeric segments sort lexicographically
|
||||
# - Numeric segments sort numerically
|
||||
# - Shorter versions sort before longer versions when the prefixes
|
||||
# match exactly
|
||||
_local = tuple(
|
||||
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
||||
)
|
||||
|
||||
return epoch, _release, _pre, _post, _dev, _local
|
5742
utils/python-venv/Lib/site-packages/setuptools/_vendor/pyparsing.py
Normal file
5742
utils/python-venv/Lib/site-packages/setuptools/_vendor/pyparsing.py
Normal file
File diff suppressed because it is too large
Load Diff
205
utils/python-venv/Lib/site-packages/setuptools/archive_util.py
Normal file
205
utils/python-venv/Lib/site-packages/setuptools/archive_util.py
Normal file
@ -0,0 +1,205 @@
|
||||
"""Utilities for extracting common archive formats"""
|
||||
|
||||
import zipfile
|
||||
import tarfile
|
||||
import os
|
||||
import shutil
|
||||
import posixpath
|
||||
import contextlib
|
||||
from distutils.errors import DistutilsError
|
||||
|
||||
from pkg_resources import ensure_directory
|
||||
|
||||
__all__ = [
|
||||
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
|
||||
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
|
||||
]
|
||||
|
||||
|
||||
class UnrecognizedFormat(DistutilsError):
|
||||
"""Couldn't recognize the archive type"""
|
||||
|
||||
|
||||
def default_filter(src, dst):
|
||||
"""The default progress/filter callback; returns True for all files"""
|
||||
return dst
|
||||
|
||||
|
||||
def unpack_archive(
|
||||
filename, extract_dir, progress_filter=default_filter,
|
||||
drivers=None):
|
||||
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
|
||||
|
||||
`progress_filter` is a function taking two arguments: a source path
|
||||
internal to the archive ('/'-separated), and a filesystem path where it
|
||||
will be extracted. The callback must return the desired extract path
|
||||
(which may be the same as the one passed in), or else ``None`` to skip
|
||||
that file or directory. The callback can thus be used to report on the
|
||||
progress of the extraction, as well as to filter the items extracted or
|
||||
alter their extraction paths.
|
||||
|
||||
`drivers`, if supplied, must be a non-empty sequence of functions with the
|
||||
same signature as this function (minus the `drivers` argument), that raise
|
||||
``UnrecognizedFormat`` if they do not support extracting the designated
|
||||
archive type. The `drivers` are tried in sequence until one is found that
|
||||
does not raise an error, or until all are exhausted (in which case
|
||||
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
|
||||
drivers, the module's ``extraction_drivers`` constant will be used, which
|
||||
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
|
||||
order.
|
||||
"""
|
||||
for driver in drivers or extraction_drivers:
|
||||
try:
|
||||
driver(filename, extract_dir, progress_filter)
|
||||
except UnrecognizedFormat:
|
||||
continue
|
||||
else:
|
||||
return
|
||||
else:
|
||||
raise UnrecognizedFormat(
|
||||
"Not a recognized archive type: %s" % filename
|
||||
)
|
||||
|
||||
|
||||
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
|
||||
""""Unpack" a directory, using the same interface as for archives
|
||||
|
||||
Raises ``UnrecognizedFormat`` if `filename` is not a directory
|
||||
"""
|
||||
if not os.path.isdir(filename):
|
||||
raise UnrecognizedFormat("%s is not a directory" % filename)
|
||||
|
||||
paths = {
|
||||
filename: ('', extract_dir),
|
||||
}
|
||||
for base, dirs, files in os.walk(filename):
|
||||
src, dst = paths[base]
|
||||
for d in dirs:
|
||||
paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
|
||||
for f in files:
|
||||
target = os.path.join(dst, f)
|
||||
target = progress_filter(src + f, target)
|
||||
if not target:
|
||||
# skip non-files
|
||||
continue
|
||||
ensure_directory(target)
|
||||
f = os.path.join(base, f)
|
||||
shutil.copyfile(f, target)
|
||||
shutil.copystat(f, target)
|
||||
|
||||
|
||||
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
|
||||
"""Unpack zip `filename` to `extract_dir`
|
||||
|
||||
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
|
||||
by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
|
||||
of the `progress_filter` argument.
|
||||
"""
|
||||
|
||||
if not zipfile.is_zipfile(filename):
|
||||
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
|
||||
|
||||
with zipfile.ZipFile(filename) as z:
|
||||
for info in z.infolist():
|
||||
name = info.filename
|
||||
|
||||
# don't extract absolute paths or ones with .. in them
|
||||
if name.startswith('/') or '..' in name.split('/'):
|
||||
continue
|
||||
|
||||
target = os.path.join(extract_dir, *name.split('/'))
|
||||
target = progress_filter(name, target)
|
||||
if not target:
|
||||
continue
|
||||
if name.endswith('/'):
|
||||
# directory
|
||||
ensure_directory(target)
|
||||
else:
|
||||
# file
|
||||
ensure_directory(target)
|
||||
data = z.read(info.filename)
|
||||
with open(target, 'wb') as f:
|
||||
f.write(data)
|
||||
unix_attributes = info.external_attr >> 16
|
||||
if unix_attributes:
|
||||
os.chmod(target, unix_attributes)
|
||||
|
||||
|
||||
def _resolve_tar_file_or_dir(tar_obj, tar_member_obj):
|
||||
"""Resolve any links and extract link targets as normal files."""
|
||||
while tar_member_obj is not None and (
|
||||
tar_member_obj.islnk() or tar_member_obj.issym()):
|
||||
linkpath = tar_member_obj.linkname
|
||||
if tar_member_obj.issym():
|
||||
base = posixpath.dirname(tar_member_obj.name)
|
||||
linkpath = posixpath.join(base, linkpath)
|
||||
linkpath = posixpath.normpath(linkpath)
|
||||
tar_member_obj = tar_obj._getmember(linkpath)
|
||||
|
||||
is_file_or_dir = (
|
||||
tar_member_obj is not None and
|
||||
(tar_member_obj.isfile() or tar_member_obj.isdir())
|
||||
)
|
||||
if is_file_or_dir:
|
||||
return tar_member_obj
|
||||
|
||||
raise LookupError('Got unknown file type')
|
||||
|
||||
|
||||
def _iter_open_tar(tar_obj, extract_dir, progress_filter):
|
||||
"""Emit member-destination pairs from a tar archive."""
|
||||
# don't do any chowning!
|
||||
tar_obj.chown = lambda *args: None
|
||||
|
||||
with contextlib.closing(tar_obj):
|
||||
for member in tar_obj:
|
||||
name = member.name
|
||||
# don't extract absolute paths or ones with .. in them
|
||||
if name.startswith('/') or '..' in name.split('/'):
|
||||
continue
|
||||
|
||||
prelim_dst = os.path.join(extract_dir, *name.split('/'))
|
||||
|
||||
try:
|
||||
member = _resolve_tar_file_or_dir(tar_obj, member)
|
||||
except LookupError:
|
||||
continue
|
||||
|
||||
final_dst = progress_filter(name, prelim_dst)
|
||||
if not final_dst:
|
||||
continue
|
||||
|
||||
if final_dst.endswith(os.sep):
|
||||
final_dst = final_dst[:-1]
|
||||
|
||||
yield member, final_dst
|
||||
|
||||
|
||||
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
|
||||
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
|
||||
|
||||
Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
|
||||
by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
|
||||
of the `progress_filter` argument.
|
||||
"""
|
||||
try:
|
||||
tarobj = tarfile.open(filename)
|
||||
except tarfile.TarError as e:
|
||||
raise UnrecognizedFormat(
|
||||
"%s is not a compressed or uncompressed tar file" % (filename,)
|
||||
) from e
|
||||
|
||||
for member, final_dst in _iter_open_tar(
|
||||
tarobj, extract_dir, progress_filter,
|
||||
):
|
||||
try:
|
||||
# XXX Ugh
|
||||
tarobj._extract_member(member, final_dst)
|
||||
except tarfile.ExtractError:
|
||||
# chown/chmod/mkfifo/mknode/makedev failed
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
|
||||
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
|
290
utils/python-venv/Lib/site-packages/setuptools/build_meta.py
Normal file
290
utils/python-venv/Lib/site-packages/setuptools/build_meta.py
Normal file
@ -0,0 +1,290 @@
|
||||
"""A PEP 517 interface to setuptools
|
||||
|
||||
Previously, when a user or a command line tool (let's call it a "frontend")
|
||||
needed to make a request of setuptools to take a certain action, for
|
||||
example, generating a list of installation requirements, the frontend would
|
||||
would call "setup.py egg_info" or "setup.py bdist_wheel" on the command line.
|
||||
|
||||
PEP 517 defines a different method of interfacing with setuptools. Rather
|
||||
than calling "setup.py" directly, the frontend should:
|
||||
|
||||
1. Set the current directory to the directory with a setup.py file
|
||||
2. Import this module into a safe python interpreter (one in which
|
||||
setuptools can potentially set global variables or crash hard).
|
||||
3. Call one of the functions defined in PEP 517.
|
||||
|
||||
What each function does is defined in PEP 517. However, here is a "casual"
|
||||
definition of the functions (this definition should not be relied on for
|
||||
bug reports or API stability):
|
||||
|
||||
- `build_wheel`: build a wheel in the folder and return the basename
|
||||
- `get_requires_for_build_wheel`: get the `setup_requires` to build
|
||||
- `prepare_metadata_for_build_wheel`: get the `install_requires`
|
||||
- `build_sdist`: build an sdist in the folder and return the basename
|
||||
- `get_requires_for_build_sdist`: get the `setup_requires` to build
|
||||
|
||||
Again, this is not a formal definition! Just a "taste" of the module.
|
||||
"""
|
||||
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import tokenize
|
||||
import shutil
|
||||
import contextlib
|
||||
import tempfile
|
||||
import warnings
|
||||
|
||||
import setuptools
|
||||
import distutils
|
||||
|
||||
from pkg_resources import parse_requirements
|
||||
|
||||
__all__ = ['get_requires_for_build_sdist',
|
||||
'get_requires_for_build_wheel',
|
||||
'prepare_metadata_for_build_wheel',
|
||||
'build_wheel',
|
||||
'build_sdist',
|
||||
'__legacy__',
|
||||
'SetupRequirementsError']
|
||||
|
||||
|
||||
class SetupRequirementsError(BaseException):
|
||||
def __init__(self, specifiers):
|
||||
self.specifiers = specifiers
|
||||
|
||||
|
||||
class Distribution(setuptools.dist.Distribution):
|
||||
def fetch_build_eggs(self, specifiers):
|
||||
specifier_list = list(map(str, parse_requirements(specifiers)))
|
||||
|
||||
raise SetupRequirementsError(specifier_list)
|
||||
|
||||
@classmethod
|
||||
@contextlib.contextmanager
|
||||
def patch(cls):
|
||||
"""
|
||||
Replace
|
||||
distutils.dist.Distribution with this class
|
||||
for the duration of this context.
|
||||
"""
|
||||
orig = distutils.core.Distribution
|
||||
distutils.core.Distribution = cls
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
distutils.core.Distribution = orig
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def no_install_setup_requires():
|
||||
"""Temporarily disable installing setup_requires
|
||||
|
||||
Under PEP 517, the backend reports build dependencies to the frontend,
|
||||
and the frontend is responsible for ensuring they're installed.
|
||||
So setuptools (acting as a backend) should not try to install them.
|
||||
"""
|
||||
orig = setuptools._install_setup_requires
|
||||
setuptools._install_setup_requires = lambda attrs: None
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
setuptools._install_setup_requires = orig
|
||||
|
||||
|
||||
def _get_immediate_subdirectories(a_dir):
|
||||
return [name for name in os.listdir(a_dir)
|
||||
if os.path.isdir(os.path.join(a_dir, name))]
|
||||
|
||||
|
||||
def _file_with_extension(directory, extension):
|
||||
matching = (
|
||||
f for f in os.listdir(directory)
|
||||
if f.endswith(extension)
|
||||
)
|
||||
try:
|
||||
file, = matching
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
'No distribution was found. Ensure that `setup.py` '
|
||||
'is not empty and that it calls `setup()`.')
|
||||
return file
|
||||
|
||||
|
||||
def _open_setup_script(setup_script):
|
||||
if not os.path.exists(setup_script):
|
||||
# Supply a default setup.py
|
||||
return io.StringIO(u"from setuptools import setup; setup()")
|
||||
|
||||
return getattr(tokenize, 'open', open)(setup_script)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def suppress_known_deprecation():
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings('ignore', 'setup.py install is deprecated')
|
||||
yield
|
||||
|
||||
|
||||
class _BuildMetaBackend(object):
|
||||
|
||||
def _fix_config(self, config_settings):
|
||||
config_settings = config_settings or {}
|
||||
config_settings.setdefault('--global-option', [])
|
||||
return config_settings
|
||||
|
||||
def _get_build_requires(self, config_settings, requirements):
|
||||
config_settings = self._fix_config(config_settings)
|
||||
|
||||
sys.argv = sys.argv[:1] + ['egg_info'] + \
|
||||
config_settings["--global-option"]
|
||||
try:
|
||||
with Distribution.patch():
|
||||
self.run_setup()
|
||||
except SetupRequirementsError as e:
|
||||
requirements += e.specifiers
|
||||
|
||||
return requirements
|
||||
|
||||
def run_setup(self, setup_script='setup.py'):
|
||||
# Note that we can reuse our build directory between calls
|
||||
# Correctness comes first, then optimization later
|
||||
__file__ = setup_script
|
||||
__name__ = '__main__'
|
||||
|
||||
with _open_setup_script(__file__) as f:
|
||||
code = f.read().replace(r'\r\n', r'\n')
|
||||
|
||||
exec(compile(code, __file__, 'exec'), locals())
|
||||
|
||||
def get_requires_for_build_wheel(self, config_settings=None):
|
||||
config_settings = self._fix_config(config_settings)
|
||||
return self._get_build_requires(
|
||||
config_settings, requirements=['wheel'])
|
||||
|
||||
def get_requires_for_build_sdist(self, config_settings=None):
|
||||
config_settings = self._fix_config(config_settings)
|
||||
return self._get_build_requires(config_settings, requirements=[])
|
||||
|
||||
def prepare_metadata_for_build_wheel(self, metadata_directory,
|
||||
config_settings=None):
|
||||
sys.argv = sys.argv[:1] + [
|
||||
'dist_info', '--egg-base', metadata_directory]
|
||||
with no_install_setup_requires():
|
||||
self.run_setup()
|
||||
|
||||
dist_info_directory = metadata_directory
|
||||
while True:
|
||||
dist_infos = [f for f in os.listdir(dist_info_directory)
|
||||
if f.endswith('.dist-info')]
|
||||
|
||||
if (
|
||||
len(dist_infos) == 0 and
|
||||
len(_get_immediate_subdirectories(dist_info_directory)) == 1
|
||||
):
|
||||
|
||||
dist_info_directory = os.path.join(
|
||||
dist_info_directory, os.listdir(dist_info_directory)[0])
|
||||
continue
|
||||
|
||||
assert len(dist_infos) == 1
|
||||
break
|
||||
|
||||
# PEP 517 requires that the .dist-info directory be placed in the
|
||||
# metadata_directory. To comply, we MUST copy the directory to the root
|
||||
if dist_info_directory != metadata_directory:
|
||||
shutil.move(
|
||||
os.path.join(dist_info_directory, dist_infos[0]),
|
||||
metadata_directory)
|
||||
shutil.rmtree(dist_info_directory, ignore_errors=True)
|
||||
|
||||
return dist_infos[0]
|
||||
|
||||
def _build_with_temp_dir(self, setup_command, result_extension,
|
||||
result_directory, config_settings):
|
||||
config_settings = self._fix_config(config_settings)
|
||||
result_directory = os.path.abspath(result_directory)
|
||||
|
||||
# Build in a temporary directory, then copy to the target.
|
||||
os.makedirs(result_directory, exist_ok=True)
|
||||
with tempfile.TemporaryDirectory(dir=result_directory) as tmp_dist_dir:
|
||||
sys.argv = (sys.argv[:1] + setup_command +
|
||||
['--dist-dir', tmp_dist_dir] +
|
||||
config_settings["--global-option"])
|
||||
with no_install_setup_requires():
|
||||
self.run_setup()
|
||||
|
||||
result_basename = _file_with_extension(
|
||||
tmp_dist_dir, result_extension)
|
||||
result_path = os.path.join(result_directory, result_basename)
|
||||
if os.path.exists(result_path):
|
||||
# os.rename will fail overwriting on non-Unix.
|
||||
os.remove(result_path)
|
||||
os.rename(os.path.join(tmp_dist_dir, result_basename), result_path)
|
||||
|
||||
return result_basename
|
||||
|
||||
def build_wheel(self, wheel_directory, config_settings=None,
|
||||
metadata_directory=None):
|
||||
with suppress_known_deprecation():
|
||||
return self._build_with_temp_dir(['bdist_wheel'], '.whl',
|
||||
wheel_directory, config_settings)
|
||||
|
||||
def build_sdist(self, sdist_directory, config_settings=None):
|
||||
return self._build_with_temp_dir(['sdist', '--formats', 'gztar'],
|
||||
'.tar.gz', sdist_directory,
|
||||
config_settings)
|
||||
|
||||
|
||||
class _BuildMetaLegacyBackend(_BuildMetaBackend):
|
||||
"""Compatibility backend for setuptools
|
||||
|
||||
This is a version of setuptools.build_meta that endeavors
|
||||
to maintain backwards
|
||||
compatibility with pre-PEP 517 modes of invocation. It
|
||||
exists as a temporary
|
||||
bridge between the old packaging mechanism and the new
|
||||
packaging mechanism,
|
||||
and will eventually be removed.
|
||||
"""
|
||||
def run_setup(self, setup_script='setup.py'):
|
||||
# In order to maintain compatibility with scripts assuming that
|
||||
# the setup.py script is in a directory on the PYTHONPATH, inject
|
||||
# '' into sys.path. (pypa/setuptools#1642)
|
||||
sys_path = list(sys.path) # Save the original path
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(setup_script))
|
||||
if script_dir not in sys.path:
|
||||
sys.path.insert(0, script_dir)
|
||||
|
||||
# Some setup.py scripts (e.g. in pygame and numpy) use sys.argv[0] to
|
||||
# get the directory of the source code. They expect it to refer to the
|
||||
# setup.py script.
|
||||
sys_argv_0 = sys.argv[0]
|
||||
sys.argv[0] = setup_script
|
||||
|
||||
try:
|
||||
super(_BuildMetaLegacyBackend,
|
||||
self).run_setup(setup_script=setup_script)
|
||||
finally:
|
||||
# While PEP 517 frontends should be calling each hook in a fresh
|
||||
# subprocess according to the standard (and thus it should not be
|
||||
# strictly necessary to restore the old sys.path), we'll restore
|
||||
# the original path so that the path manipulation does not persist
|
||||
# within the hook after run_setup is called.
|
||||
sys.path[:] = sys_path
|
||||
sys.argv[0] = sys_argv_0
|
||||
|
||||
|
||||
# The primary backend
|
||||
_BACKEND = _BuildMetaBackend()
|
||||
|
||||
get_requires_for_build_wheel = _BACKEND.get_requires_for_build_wheel
|
||||
get_requires_for_build_sdist = _BACKEND.get_requires_for_build_sdist
|
||||
prepare_metadata_for_build_wheel = _BACKEND.prepare_metadata_for_build_wheel
|
||||
build_wheel = _BACKEND.build_wheel
|
||||
build_sdist = _BACKEND.build_sdist
|
||||
|
||||
|
||||
# The legacy backend
|
||||
__legacy__ = _BuildMetaLegacyBackend()
|
BIN
utils/python-venv/Lib/site-packages/setuptools/cli-32.exe
Normal file
BIN
utils/python-venv/Lib/site-packages/setuptools/cli-32.exe
Normal file
Binary file not shown.
BIN
utils/python-venv/Lib/site-packages/setuptools/cli-64.exe
Normal file
BIN
utils/python-venv/Lib/site-packages/setuptools/cli-64.exe
Normal file
Binary file not shown.
BIN
utils/python-venv/Lib/site-packages/setuptools/cli-arm64.exe
Normal file
BIN
utils/python-venv/Lib/site-packages/setuptools/cli-arm64.exe
Normal file
Binary file not shown.
BIN
utils/python-venv/Lib/site-packages/setuptools/cli.exe
Normal file
BIN
utils/python-venv/Lib/site-packages/setuptools/cli.exe
Normal file
Binary file not shown.
@ -0,0 +1,8 @@
|
||||
from distutils.command.bdist import bdist
|
||||
import sys
|
||||
|
||||
if 'egg' not in bdist.format_commands:
|
||||
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
|
||||
bdist.format_commands.append('egg')
|
||||
|
||||
del bdist, sys
|
@ -0,0 +1,78 @@
|
||||
from distutils.errors import DistutilsOptionError
|
||||
|
||||
from setuptools.command.setopt import edit_config, option_base, config_file
|
||||
|
||||
|
||||
def shquote(arg):
|
||||
"""Quote an argument for later parsing by shlex.split()"""
|
||||
for c in '"', "'", "\\", "#":
|
||||
if c in arg:
|
||||
return repr(arg)
|
||||
if arg.split() != [arg]:
|
||||
return repr(arg)
|
||||
return arg
|
||||
|
||||
|
||||
class alias(option_base):
|
||||
"""Define a shortcut that invokes one or more commands"""
|
||||
|
||||
description = "define a shortcut to invoke one or more commands"
|
||||
command_consumes_arguments = True
|
||||
|
||||
user_options = [
|
||||
('remove', 'r', 'remove (unset) the alias'),
|
||||
] + option_base.user_options
|
||||
|
||||
boolean_options = option_base.boolean_options + ['remove']
|
||||
|
||||
def initialize_options(self):
|
||||
option_base.initialize_options(self)
|
||||
self.args = None
|
||||
self.remove = None
|
||||
|
||||
def finalize_options(self):
|
||||
option_base.finalize_options(self)
|
||||
if self.remove and len(self.args) != 1:
|
||||
raise DistutilsOptionError(
|
||||
"Must specify exactly one argument (the alias name) when "
|
||||
"using --remove"
|
||||
)
|
||||
|
||||
def run(self):
|
||||
aliases = self.distribution.get_option_dict('aliases')
|
||||
|
||||
if not self.args:
|
||||
print("Command Aliases")
|
||||
print("---------------")
|
||||
for alias in aliases:
|
||||
print("setup.py alias", format_alias(alias, aliases))
|
||||
return
|
||||
|
||||
elif len(self.args) == 1:
|
||||
alias, = self.args
|
||||
if self.remove:
|
||||
command = None
|
||||
elif alias in aliases:
|
||||
print("setup.py alias", format_alias(alias, aliases))
|
||||
return
|
||||
else:
|
||||
print("No alias definition found for %r" % alias)
|
||||
return
|
||||
else:
|
||||
alias = self.args[0]
|
||||
command = ' '.join(map(shquote, self.args[1:]))
|
||||
|
||||
edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
|
||||
|
||||
|
||||
def format_alias(name, aliases):
|
||||
source, command = aliases[name]
|
||||
if source == config_file('global'):
|
||||
source = '--global-config '
|
||||
elif source == config_file('user'):
|
||||
source = '--user-config '
|
||||
elif source == config_file('local'):
|
||||
source = ''
|
||||
else:
|
||||
source = '--filename=%r' % source
|
||||
return source + name + ' ' + command
|
@ -0,0 +1,456 @@
|
||||
"""setuptools.command.bdist_egg
|
||||
|
||||
Build .egg distributions"""
|
||||
|
||||
from distutils.dir_util import remove_tree, mkpath
|
||||
from distutils import log
|
||||
from types import CodeType
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import textwrap
|
||||
import marshal
|
||||
|
||||
from pkg_resources import get_build_platform, Distribution, ensure_directory
|
||||
from setuptools.extension import Library
|
||||
from setuptools import Command
|
||||
|
||||
from sysconfig import get_path, get_python_version
|
||||
|
||||
|
||||
def _get_purelib():
|
||||
return get_path("purelib")
|
||||
|
||||
|
||||
def strip_module(filename):
|
||||
if '.' in filename:
|
||||
filename = os.path.splitext(filename)[0]
|
||||
if filename.endswith('module'):
|
||||
filename = filename[:-6]
|
||||
return filename
|
||||
|
||||
|
||||
def sorted_walk(dir):
|
||||
"""Do os.walk in a reproducible way,
|
||||
independent of indeterministic filesystem readdir order
|
||||
"""
|
||||
for base, dirs, files in os.walk(dir):
|
||||
dirs.sort()
|
||||
files.sort()
|
||||
yield base, dirs, files
|
||||
|
||||
|
||||
def write_stub(resource, pyfile):
|
||||
_stub_template = textwrap.dedent("""
|
||||
def __bootstrap__():
|
||||
global __bootstrap__, __loader__, __file__
|
||||
import sys, pkg_resources, importlib.util
|
||||
__file__ = pkg_resources.resource_filename(__name__, %r)
|
||||
__loader__ = None; del __bootstrap__, __loader__
|
||||
spec = importlib.util.spec_from_file_location(__name__,__file__)
|
||||
mod = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(mod)
|
||||
__bootstrap__()
|
||||
""").lstrip()
|
||||
with open(pyfile, 'w') as f:
|
||||
f.write(_stub_template % resource)
|
||||
|
||||
|
||||
class bdist_egg(Command):
|
||||
description = "create an \"egg\" distribution"
|
||||
|
||||
user_options = [
|
||||
('bdist-dir=', 'b',
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p', "platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_build_platform()),
|
||||
('exclude-source-files', None,
|
||||
"remove all .py files from the generated egg"),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
]
|
||||
|
||||
boolean_options = [
|
||||
'keep-temp', 'skip-build', 'exclude-source-files'
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.keep_temp = 0
|
||||
self.dist_dir = None
|
||||
self.skip_build = 0
|
||||
self.egg_output = None
|
||||
self.exclude_source_files = None
|
||||
|
||||
def finalize_options(self):
|
||||
ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
|
||||
self.egg_info = ei_cmd.egg_info
|
||||
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'egg')
|
||||
|
||||
if self.plat_name is None:
|
||||
self.plat_name = get_build_platform()
|
||||
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
|
||||
if self.egg_output is None:
|
||||
|
||||
# Compute filename of the output egg
|
||||
basename = Distribution(
|
||||
None, None, ei_cmd.egg_name, ei_cmd.egg_version,
|
||||
get_python_version(),
|
||||
self.distribution.has_ext_modules() and self.plat_name
|
||||
).egg_name()
|
||||
|
||||
self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
|
||||
|
||||
def do_install_data(self):
|
||||
# Hack for packages that install data to install's --install-lib
|
||||
self.get_finalized_command('install').install_lib = self.bdist_dir
|
||||
|
||||
site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
|
||||
old, self.distribution.data_files = self.distribution.data_files, []
|
||||
|
||||
for item in old:
|
||||
if isinstance(item, tuple) and len(item) == 2:
|
||||
if os.path.isabs(item[0]):
|
||||
realpath = os.path.realpath(item[0])
|
||||
normalized = os.path.normcase(realpath)
|
||||
if normalized == site_packages or normalized.startswith(
|
||||
site_packages + os.sep
|
||||
):
|
||||
item = realpath[len(site_packages) + 1:], item[1]
|
||||
# XXX else: raise ???
|
||||
self.distribution.data_files.append(item)
|
||||
|
||||
try:
|
||||
log.info("installing package data to %s", self.bdist_dir)
|
||||
self.call_command('install_data', force=0, root=None)
|
||||
finally:
|
||||
self.distribution.data_files = old
|
||||
|
||||
def get_outputs(self):
|
||||
return [self.egg_output]
|
||||
|
||||
def call_command(self, cmdname, **kw):
|
||||
"""Invoke reinitialized command `cmdname` with keyword args"""
|
||||
for dirname in INSTALL_DIRECTORY_ATTRS:
|
||||
kw.setdefault(dirname, self.bdist_dir)
|
||||
kw.setdefault('skip_build', self.skip_build)
|
||||
kw.setdefault('dry_run', self.dry_run)
|
||||
cmd = self.reinitialize_command(cmdname, **kw)
|
||||
self.run_command(cmdname)
|
||||
return cmd
|
||||
|
||||
def run(self): # noqa: C901 # is too complex (14) # FIXME
|
||||
# Generate metadata first
|
||||
self.run_command("egg_info")
|
||||
# We run install_lib before install_data, because some data hacks
|
||||
# pull their data path from the install_lib command.
|
||||
log.info("installing library code to %s", self.bdist_dir)
|
||||
instcmd = self.get_finalized_command('install')
|
||||
old_root = instcmd.root
|
||||
instcmd.root = None
|
||||
if self.distribution.has_c_libraries() and not self.skip_build:
|
||||
self.run_command('build_clib')
|
||||
cmd = self.call_command('install_lib', warn_dir=0)
|
||||
instcmd.root = old_root
|
||||
|
||||
all_outputs, ext_outputs = self.get_ext_outputs()
|
||||
self.stubs = []
|
||||
to_compile = []
|
||||
for (p, ext_name) in enumerate(ext_outputs):
|
||||
filename, ext = os.path.splitext(ext_name)
|
||||
pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
|
||||
'.py')
|
||||
self.stubs.append(pyfile)
|
||||
log.info("creating stub loader for %s", ext_name)
|
||||
if not self.dry_run:
|
||||
write_stub(os.path.basename(ext_name), pyfile)
|
||||
to_compile.append(pyfile)
|
||||
ext_outputs[p] = ext_name.replace(os.sep, '/')
|
||||
|
||||
if to_compile:
|
||||
cmd.byte_compile(to_compile)
|
||||
if self.distribution.data_files:
|
||||
self.do_install_data()
|
||||
|
||||
# Make the EGG-INFO directory
|
||||
archive_root = self.bdist_dir
|
||||
egg_info = os.path.join(archive_root, 'EGG-INFO')
|
||||
self.mkpath(egg_info)
|
||||
if self.distribution.scripts:
|
||||
script_dir = os.path.join(egg_info, 'scripts')
|
||||
log.info("installing scripts to %s", script_dir)
|
||||
self.call_command('install_scripts', install_dir=script_dir,
|
||||
no_ep=1)
|
||||
|
||||
self.copy_metadata_to(egg_info)
|
||||
native_libs = os.path.join(egg_info, "native_libs.txt")
|
||||
if all_outputs:
|
||||
log.info("writing %s", native_libs)
|
||||
if not self.dry_run:
|
||||
ensure_directory(native_libs)
|
||||
libs_file = open(native_libs, 'wt')
|
||||
libs_file.write('\n'.join(all_outputs))
|
||||
libs_file.write('\n')
|
||||
libs_file.close()
|
||||
elif os.path.isfile(native_libs):
|
||||
log.info("removing %s", native_libs)
|
||||
if not self.dry_run:
|
||||
os.unlink(native_libs)
|
||||
|
||||
write_safety_flag(
|
||||
os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
|
||||
)
|
||||
|
||||
if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
|
||||
log.warn(
|
||||
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
|
||||
"Use the install_requires/extras_require setup() args instead."
|
||||
)
|
||||
|
||||
if self.exclude_source_files:
|
||||
self.zap_pyfiles()
|
||||
|
||||
# Make the archive
|
||||
make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
|
||||
dry_run=self.dry_run, mode=self.gen_header())
|
||||
if not self.keep_temp:
|
||||
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
||||
|
||||
# Add to 'Distribution.dist_files' so that the "upload" command works
|
||||
getattr(self.distribution, 'dist_files', []).append(
|
||||
('bdist_egg', get_python_version(), self.egg_output))
|
||||
|
||||
def zap_pyfiles(self):
|
||||
log.info("Removing .py files from temporary directory")
|
||||
for base, dirs, files in walk_egg(self.bdist_dir):
|
||||
for name in files:
|
||||
path = os.path.join(base, name)
|
||||
|
||||
if name.endswith('.py'):
|
||||
log.debug("Deleting %s", path)
|
||||
os.unlink(path)
|
||||
|
||||
if base.endswith('__pycache__'):
|
||||
path_old = path
|
||||
|
||||
pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc'
|
||||
m = re.match(pattern, name)
|
||||
path_new = os.path.join(
|
||||
base, os.pardir, m.group('name') + '.pyc')
|
||||
log.info(
|
||||
"Renaming file from [%s] to [%s]"
|
||||
% (path_old, path_new))
|
||||
try:
|
||||
os.remove(path_new)
|
||||
except OSError:
|
||||
pass
|
||||
os.rename(path_old, path_new)
|
||||
|
||||
def zip_safe(self):
|
||||
safe = getattr(self.distribution, 'zip_safe', None)
|
||||
if safe is not None:
|
||||
return safe
|
||||
log.warn("zip_safe flag not set; analyzing archive contents...")
|
||||
return analyze_egg(self.bdist_dir, self.stubs)
|
||||
|
||||
def gen_header(self):
|
||||
return 'w'
|
||||
|
||||
def copy_metadata_to(self, target_dir):
|
||||
"Copy metadata (egg info) to the target_dir"
|
||||
# normalize the path (so that a forward-slash in egg_info will
|
||||
# match using startswith below)
|
||||
norm_egg_info = os.path.normpath(self.egg_info)
|
||||
prefix = os.path.join(norm_egg_info, '')
|
||||
for path in self.ei_cmd.filelist.files:
|
||||
if path.startswith(prefix):
|
||||
target = os.path.join(target_dir, path[len(prefix):])
|
||||
ensure_directory(target)
|
||||
self.copy_file(path, target)
|
||||
|
||||
def get_ext_outputs(self):
|
||||
"""Get a list of relative paths to C extensions in the output distro"""
|
||||
|
||||
all_outputs = []
|
||||
ext_outputs = []
|
||||
|
||||
paths = {self.bdist_dir: ''}
|
||||
for base, dirs, files in sorted_walk(self.bdist_dir):
|
||||
for filename in files:
|
||||
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
|
||||
all_outputs.append(paths[base] + filename)
|
||||
for filename in dirs:
|
||||
paths[os.path.join(base, filename)] = (paths[base] +
|
||||
filename + '/')
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
build_cmd = self.get_finalized_command('build_ext')
|
||||
for ext in build_cmd.extensions:
|
||||
if isinstance(ext, Library):
|
||||
continue
|
||||
fullname = build_cmd.get_ext_fullname(ext.name)
|
||||
filename = build_cmd.get_ext_filename(fullname)
|
||||
if not os.path.basename(filename).startswith('dl-'):
|
||||
if os.path.exists(os.path.join(self.bdist_dir, filename)):
|
||||
ext_outputs.append(filename)
|
||||
|
||||
return all_outputs, ext_outputs
|
||||
|
||||
|
||||
NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
|
||||
|
||||
|
||||
def walk_egg(egg_dir):
|
||||
"""Walk an unpacked egg's contents, skipping the metadata directory"""
|
||||
walker = sorted_walk(egg_dir)
|
||||
base, dirs, files = next(walker)
|
||||
if 'EGG-INFO' in dirs:
|
||||
dirs.remove('EGG-INFO')
|
||||
yield base, dirs, files
|
||||
for bdf in walker:
|
||||
yield bdf
|
||||
|
||||
|
||||
def analyze_egg(egg_dir, stubs):
|
||||
# check for existing flag in EGG-INFO
|
||||
for flag, fn in safety_flags.items():
|
||||
if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
|
||||
return flag
|
||||
if not can_scan():
|
||||
return False
|
||||
safe = True
|
||||
for base, dirs, files in walk_egg(egg_dir):
|
||||
for name in files:
|
||||
if name.endswith('.py') or name.endswith('.pyw'):
|
||||
continue
|
||||
elif name.endswith('.pyc') or name.endswith('.pyo'):
|
||||
# always scan, even if we already know we're not safe
|
||||
safe = scan_module(egg_dir, base, name, stubs) and safe
|
||||
return safe
|
||||
|
||||
|
||||
def write_safety_flag(egg_dir, safe):
|
||||
# Write or remove zip safety flag file(s)
|
||||
for flag, fn in safety_flags.items():
|
||||
fn = os.path.join(egg_dir, fn)
|
||||
if os.path.exists(fn):
|
||||
if safe is None or bool(safe) != flag:
|
||||
os.unlink(fn)
|
||||
elif safe is not None and bool(safe) == flag:
|
||||
f = open(fn, 'wt')
|
||||
f.write('\n')
|
||||
f.close()
|
||||
|
||||
|
||||
safety_flags = {
|
||||
True: 'zip-safe',
|
||||
False: 'not-zip-safe',
|
||||
}
|
||||
|
||||
|
||||
def scan_module(egg_dir, base, name, stubs):
|
||||
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
|
||||
|
||||
filename = os.path.join(base, name)
|
||||
if filename[:-1] in stubs:
|
||||
return True # Extension module
|
||||
pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
|
||||
module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
|
||||
if sys.version_info < (3, 7):
|
||||
skip = 12 # skip magic & date & file size
|
||||
else:
|
||||
skip = 16 # skip magic & reserved? & date & file size
|
||||
f = open(filename, 'rb')
|
||||
f.read(skip)
|
||||
code = marshal.load(f)
|
||||
f.close()
|
||||
safe = True
|
||||
symbols = dict.fromkeys(iter_symbols(code))
|
||||
for bad in ['__file__', '__path__']:
|
||||
if bad in symbols:
|
||||
log.warn("%s: module references %s", module, bad)
|
||||
safe = False
|
||||
if 'inspect' in symbols:
|
||||
for bad in [
|
||||
'getsource', 'getabsfile', 'getsourcefile', 'getfile'
|
||||
'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
|
||||
'getinnerframes', 'getouterframes', 'stack', 'trace'
|
||||
]:
|
||||
if bad in symbols:
|
||||
log.warn("%s: module MAY be using inspect.%s", module, bad)
|
||||
safe = False
|
||||
return safe
|
||||
|
||||
|
||||
def iter_symbols(code):
|
||||
"""Yield names and strings used by `code` and its nested code objects"""
|
||||
for name in code.co_names:
|
||||
yield name
|
||||
for const in code.co_consts:
|
||||
if isinstance(const, str):
|
||||
yield const
|
||||
elif isinstance(const, CodeType):
|
||||
for name in iter_symbols(const):
|
||||
yield name
|
||||
|
||||
|
||||
def can_scan():
|
||||
if not sys.platform.startswith('java') and sys.platform != 'cli':
|
||||
# CPython, PyPy, etc.
|
||||
return True
|
||||
log.warn("Unable to analyze compiled code on this platform.")
|
||||
log.warn("Please ask the author to include a 'zip_safe'"
|
||||
" setting (either True or False) in the package's setup.py")
|
||||
|
||||
|
||||
# Attribute names of options for commands that might need to be convinced to
|
||||
# install to the egg build directory
|
||||
|
||||
INSTALL_DIRECTORY_ATTRS = [
|
||||
'install_lib', 'install_dir', 'install_data', 'install_base'
|
||||
]
|
||||
|
||||
|
||||
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,
|
||||
mode='w'):
|
||||
"""Create a zip file from all the files under 'base_dir'. The output
|
||||
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
|
||||
Python module (if available) or the InfoZIP "zip" utility (if installed
|
||||
and found on the default search path). If neither tool is available,
|
||||
raises DistutilsExecError. Returns the name of the output zip file.
|
||||
"""
|
||||
import zipfile
|
||||
|
||||
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
||||
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
||||
|
||||
def visit(z, dirname, names):
|
||||
for name in names:
|
||||
path = os.path.normpath(os.path.join(dirname, name))
|
||||
if os.path.isfile(path):
|
||||
p = path[len(base_dir) + 1:]
|
||||
if not dry_run:
|
||||
z.write(path, p)
|
||||
log.debug("adding '%s'", p)
|
||||
|
||||
compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
|
||||
if not dry_run:
|
||||
z = zipfile.ZipFile(zip_filename, mode, compression=compression)
|
||||
for dirname, dirs, files in sorted_walk(base_dir):
|
||||
visit(z, dirname, files)
|
||||
z.close()
|
||||
else:
|
||||
for dirname, dirs, files in sorted_walk(base_dir):
|
||||
visit(None, dirname, files)
|
||||
return zip_filename
|
@ -0,0 +1,40 @@
|
||||
import distutils.command.bdist_rpm as orig
|
||||
import warnings
|
||||
|
||||
from setuptools import SetuptoolsDeprecationWarning
|
||||
|
||||
|
||||
class bdist_rpm(orig.bdist_rpm):
|
||||
"""
|
||||
Override the default bdist_rpm behavior to do the following:
|
||||
|
||||
1. Run egg_info to ensure the name and version are properly calculated.
|
||||
2. Always run 'install' using --single-version-externally-managed to
|
||||
disable eggs in RPM distributions.
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
warnings.warn(
|
||||
"bdist_rpm is deprecated and will be removed in a future "
|
||||
"version. Use bdist_wheel (wheel packages) instead.",
|
||||
SetuptoolsDeprecationWarning,
|
||||
)
|
||||
|
||||
# ensure distro name is up-to-date
|
||||
self.run_command('egg_info')
|
||||
|
||||
orig.bdist_rpm.run(self)
|
||||
|
||||
def _make_spec_file(self):
|
||||
spec = orig.bdist_rpm._make_spec_file(self)
|
||||
spec = [
|
||||
line.replace(
|
||||
"setup.py install ",
|
||||
"setup.py install --single-version-externally-managed "
|
||||
).replace(
|
||||
"%setup",
|
||||
"%setup -n %{name}-%{unmangled_version}"
|
||||
)
|
||||
for line in spec
|
||||
]
|
||||
return spec
|
@ -0,0 +1,101 @@
|
||||
import distutils.command.build_clib as orig
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils import log
|
||||
from setuptools.dep_util import newer_pairwise_group
|
||||
|
||||
|
||||
class build_clib(orig.build_clib):
|
||||
"""
|
||||
Override the default build_clib behaviour to do the following:
|
||||
|
||||
1. Implement a rudimentary timestamp-based dependency system
|
||||
so 'compile()' doesn't run every time.
|
||||
2. Add more keys to the 'build_info' dictionary:
|
||||
* obj_deps - specify dependencies for each object compiled.
|
||||
this should be a dictionary mapping a key
|
||||
with the source filename to a list of
|
||||
dependencies. Use an empty string for global
|
||||
dependencies.
|
||||
* cflags - specify a list of additional flags to pass to
|
||||
the compiler.
|
||||
"""
|
||||
|
||||
def build_libraries(self, libraries):
|
||||
for (lib_name, build_info) in libraries:
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % lib_name)
|
||||
sources = list(sources)
|
||||
|
||||
log.info("building '%s' library", lib_name)
|
||||
|
||||
# Make sure everything is the correct type.
|
||||
# obj_deps should be a dictionary of keys as sources
|
||||
# and a list/tuple of files that are its dependencies.
|
||||
obj_deps = build_info.get('obj_deps', dict())
|
||||
if not isinstance(obj_deps, dict):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'obj_deps' must be a dictionary of "
|
||||
"type 'source: list'" % lib_name)
|
||||
dependencies = []
|
||||
|
||||
# Get the global dependencies that are specified by the '' key.
|
||||
# These will go into every source's dependency list.
|
||||
global_deps = obj_deps.get('', list())
|
||||
if not isinstance(global_deps, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'obj_deps' must be a dictionary of "
|
||||
"type 'source: list'" % lib_name)
|
||||
|
||||
# Build the list to be used by newer_pairwise_group
|
||||
# each source will be auto-added to its dependencies.
|
||||
for source in sources:
|
||||
src_deps = [source]
|
||||
src_deps.extend(global_deps)
|
||||
extra_deps = obj_deps.get(source, list())
|
||||
if not isinstance(extra_deps, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'obj_deps' must be a dictionary of "
|
||||
"type 'source: list'" % lib_name)
|
||||
src_deps.extend(extra_deps)
|
||||
dependencies.append(src_deps)
|
||||
|
||||
expected_objects = self.compiler.object_filenames(
|
||||
sources,
|
||||
output_dir=self.build_temp,
|
||||
)
|
||||
|
||||
if (
|
||||
newer_pairwise_group(dependencies, expected_objects)
|
||||
!= ([], [])
|
||||
):
|
||||
# First, compile the source code to object files in the library
|
||||
# directory. (This should probably change to putting object
|
||||
# files in a temporary build directory.)
|
||||
macros = build_info.get('macros')
|
||||
include_dirs = build_info.get('include_dirs')
|
||||
cflags = build_info.get('cflags')
|
||||
self.compiler.compile(
|
||||
sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
extra_postargs=cflags,
|
||||
debug=self.debug
|
||||
)
|
||||
|
||||
# Now "link" the object files together into a static library.
|
||||
# (On Unix at least, this isn't really linking -- it just
|
||||
# builds an archive. Whatever.)
|
||||
self.compiler.create_static_lib(
|
||||
expected_objects,
|
||||
lib_name,
|
||||
output_dir=self.build_clib,
|
||||
debug=self.debug
|
||||
)
|
@ -0,0 +1,328 @@
|
||||
import os
|
||||
import sys
|
||||
import itertools
|
||||
from importlib.machinery import EXTENSION_SUFFIXES
|
||||
from distutils.command.build_ext import build_ext as _du_build_ext
|
||||
from distutils.file_util import copy_file
|
||||
from distutils.ccompiler import new_compiler
|
||||
from distutils.sysconfig import customize_compiler, get_config_var
|
||||
from distutils.errors import DistutilsError
|
||||
from distutils import log
|
||||
|
||||
from setuptools.extension import Library
|
||||
|
||||
try:
|
||||
# Attempt to use Cython for building extensions, if available
|
||||
from Cython.Distutils.build_ext import build_ext as _build_ext
|
||||
# Additionally, assert that the compiler module will load
|
||||
# also. Ref #1229.
|
||||
__import__('Cython.Compiler.Main')
|
||||
except ImportError:
|
||||
_build_ext = _du_build_ext
|
||||
|
||||
# make sure _config_vars is initialized
|
||||
get_config_var("LDSHARED")
|
||||
from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa
|
||||
|
||||
|
||||
def _customize_compiler_for_shlib(compiler):
|
||||
if sys.platform == "darwin":
|
||||
# building .dylib requires additional compiler flags on OSX; here we
|
||||
# temporarily substitute the pyconfig.h variables so that distutils'
|
||||
# 'customize_compiler' uses them before we build the shared libraries.
|
||||
tmp = _CONFIG_VARS.copy()
|
||||
try:
|
||||
# XXX Help! I don't have any idea whether these are right...
|
||||
_CONFIG_VARS['LDSHARED'] = (
|
||||
"gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
|
||||
_CONFIG_VARS['CCSHARED'] = " -dynamiclib"
|
||||
_CONFIG_VARS['SO'] = ".dylib"
|
||||
customize_compiler(compiler)
|
||||
finally:
|
||||
_CONFIG_VARS.clear()
|
||||
_CONFIG_VARS.update(tmp)
|
||||
else:
|
||||
customize_compiler(compiler)
|
||||
|
||||
|
||||
have_rtld = False
|
||||
use_stubs = False
|
||||
libtype = 'shared'
|
||||
|
||||
if sys.platform == "darwin":
|
||||
use_stubs = True
|
||||
elif os.name != 'nt':
|
||||
try:
|
||||
import dl
|
||||
use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def if_dl(s):
|
||||
return s if have_rtld else ''
|
||||
|
||||
|
||||
def get_abi3_suffix():
|
||||
"""Return the file extension for an abi3-compliant Extension()"""
|
||||
for suffix in EXTENSION_SUFFIXES:
|
||||
if '.abi3' in suffix: # Unix
|
||||
return suffix
|
||||
elif suffix == '.pyd': # Windows
|
||||
return suffix
|
||||
|
||||
|
||||
class build_ext(_build_ext):
|
||||
def run(self):
|
||||
"""Build extensions in build directory, then copy if --inplace"""
|
||||
old_inplace, self.inplace = self.inplace, 0
|
||||
_build_ext.run(self)
|
||||
self.inplace = old_inplace
|
||||
if old_inplace:
|
||||
self.copy_extensions_to_source()
|
||||
|
||||
def copy_extensions_to_source(self):
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
for ext in self.extensions:
|
||||
fullname = self.get_ext_fullname(ext.name)
|
||||
filename = self.get_ext_filename(fullname)
|
||||
modpath = fullname.split('.')
|
||||
package = '.'.join(modpath[:-1])
|
||||
package_dir = build_py.get_package_dir(package)
|
||||
dest_filename = os.path.join(package_dir,
|
||||
os.path.basename(filename))
|
||||
src_filename = os.path.join(self.build_lib, filename)
|
||||
|
||||
# Always copy, even if source is older than destination, to ensure
|
||||
# that the right extensions for the current Python/platform are
|
||||
# used.
|
||||
copy_file(
|
||||
src_filename, dest_filename, verbose=self.verbose,
|
||||
dry_run=self.dry_run
|
||||
)
|
||||
if ext._needs_stub:
|
||||
self.write_stub(package_dir or os.curdir, ext, True)
|
||||
|
||||
def get_ext_filename(self, fullname):
|
||||
so_ext = os.getenv('SETUPTOOLS_EXT_SUFFIX')
|
||||
if so_ext:
|
||||
filename = os.path.join(*fullname.split('.')) + so_ext
|
||||
else:
|
||||
filename = _build_ext.get_ext_filename(self, fullname)
|
||||
so_ext = get_config_var('EXT_SUFFIX')
|
||||
|
||||
if fullname in self.ext_map:
|
||||
ext = self.ext_map[fullname]
|
||||
use_abi3 = getattr(ext, 'py_limited_api') and get_abi3_suffix()
|
||||
if use_abi3:
|
||||
filename = filename[:-len(so_ext)]
|
||||
so_ext = get_abi3_suffix()
|
||||
filename = filename + so_ext
|
||||
if isinstance(ext, Library):
|
||||
fn, ext = os.path.splitext(filename)
|
||||
return self.shlib_compiler.library_filename(fn, libtype)
|
||||
elif use_stubs and ext._links_to_dynamic:
|
||||
d, fn = os.path.split(filename)
|
||||
return os.path.join(d, 'dl-' + fn)
|
||||
return filename
|
||||
|
||||
def initialize_options(self):
|
||||
_build_ext.initialize_options(self)
|
||||
self.shlib_compiler = None
|
||||
self.shlibs = []
|
||||
self.ext_map = {}
|
||||
|
||||
def finalize_options(self):
|
||||
_build_ext.finalize_options(self)
|
||||
self.extensions = self.extensions or []
|
||||
self.check_extensions_list(self.extensions)
|
||||
self.shlibs = [ext for ext in self.extensions
|
||||
if isinstance(ext, Library)]
|
||||
if self.shlibs:
|
||||
self.setup_shlib_compiler()
|
||||
for ext in self.extensions:
|
||||
ext._full_name = self.get_ext_fullname(ext.name)
|
||||
for ext in self.extensions:
|
||||
fullname = ext._full_name
|
||||
self.ext_map[fullname] = ext
|
||||
|
||||
# distutils 3.1 will also ask for module names
|
||||
# XXX what to do with conflicts?
|
||||
self.ext_map[fullname.split('.')[-1]] = ext
|
||||
|
||||
ltd = self.shlibs and self.links_to_dynamic(ext) or False
|
||||
ns = ltd and use_stubs and not isinstance(ext, Library)
|
||||
ext._links_to_dynamic = ltd
|
||||
ext._needs_stub = ns
|
||||
filename = ext._file_name = self.get_ext_filename(fullname)
|
||||
libdir = os.path.dirname(os.path.join(self.build_lib, filename))
|
||||
if ltd and libdir not in ext.library_dirs:
|
||||
ext.library_dirs.append(libdir)
|
||||
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
|
||||
ext.runtime_library_dirs.append(os.curdir)
|
||||
|
||||
def setup_shlib_compiler(self):
|
||||
compiler = self.shlib_compiler = new_compiler(
|
||||
compiler=self.compiler, dry_run=self.dry_run, force=self.force
|
||||
)
|
||||
_customize_compiler_for_shlib(compiler)
|
||||
|
||||
if self.include_dirs is not None:
|
||||
compiler.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for (name, value) in self.define:
|
||||
compiler.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
compiler.undefine_macro(macro)
|
||||
if self.libraries is not None:
|
||||
compiler.set_libraries(self.libraries)
|
||||
if self.library_dirs is not None:
|
||||
compiler.set_library_dirs(self.library_dirs)
|
||||
if self.rpath is not None:
|
||||
compiler.set_runtime_library_dirs(self.rpath)
|
||||
if self.link_objects is not None:
|
||||
compiler.set_link_objects(self.link_objects)
|
||||
|
||||
# hack so distutils' build_extension() builds a library instead
|
||||
compiler.link_shared_object = link_shared_object.__get__(compiler)
|
||||
|
||||
def get_export_symbols(self, ext):
|
||||
if isinstance(ext, Library):
|
||||
return ext.export_symbols
|
||||
return _build_ext.get_export_symbols(self, ext)
|
||||
|
||||
def build_extension(self, ext):
|
||||
ext._convert_pyx_sources_to_lang()
|
||||
_compiler = self.compiler
|
||||
try:
|
||||
if isinstance(ext, Library):
|
||||
self.compiler = self.shlib_compiler
|
||||
_build_ext.build_extension(self, ext)
|
||||
if ext._needs_stub:
|
||||
cmd = self.get_finalized_command('build_py').build_lib
|
||||
self.write_stub(cmd, ext)
|
||||
finally:
|
||||
self.compiler = _compiler
|
||||
|
||||
def links_to_dynamic(self, ext):
|
||||
"""Return true if 'ext' links to a dynamic lib in the same package"""
|
||||
# XXX this should check to ensure the lib is actually being built
|
||||
# XXX as dynamic, and not just using a locally-found version or a
|
||||
# XXX static-compiled version
|
||||
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
|
||||
pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
|
||||
return any(pkg + libname in libnames for libname in ext.libraries)
|
||||
|
||||
def get_outputs(self):
|
||||
return _build_ext.get_outputs(self) + self.__get_stubs_outputs()
|
||||
|
||||
def __get_stubs_outputs(self):
|
||||
# assemble the base name for each extension that needs a stub
|
||||
ns_ext_bases = (
|
||||
os.path.join(self.build_lib, *ext._full_name.split('.'))
|
||||
for ext in self.extensions
|
||||
if ext._needs_stub
|
||||
)
|
||||
# pair each base with the extension
|
||||
pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
|
||||
return list(base + fnext for base, fnext in pairs)
|
||||
|
||||
def __get_output_extensions(self):
|
||||
yield '.py'
|
||||
yield '.pyc'
|
||||
if self.get_finalized_command('build_py').optimize:
|
||||
yield '.pyo'
|
||||
|
||||
def write_stub(self, output_dir, ext, compile=False):
|
||||
log.info("writing stub loader for %s to %s", ext._full_name,
|
||||
output_dir)
|
||||
stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
|
||||
'.py')
|
||||
if compile and os.path.exists(stub_file):
|
||||
raise DistutilsError(stub_file + " already exists! Please delete.")
|
||||
if not self.dry_run:
|
||||
f = open(stub_file, 'w')
|
||||
f.write(
|
||||
'\n'.join([
|
||||
"def __bootstrap__():",
|
||||
" global __bootstrap__, __file__, __loader__",
|
||||
" import sys, os, pkg_resources, importlib.util" +
|
||||
if_dl(", dl"),
|
||||
" __file__ = pkg_resources.resource_filename"
|
||||
"(__name__,%r)"
|
||||
% os.path.basename(ext._file_name),
|
||||
" del __bootstrap__",
|
||||
" if '__loader__' in globals():",
|
||||
" del __loader__",
|
||||
if_dl(" old_flags = sys.getdlopenflags()"),
|
||||
" old_dir = os.getcwd()",
|
||||
" try:",
|
||||
" os.chdir(os.path.dirname(__file__))",
|
||||
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
|
||||
" spec = importlib.util.spec_from_file_location(",
|
||||
" __name__, __file__)",
|
||||
" mod = importlib.util.module_from_spec(spec)",
|
||||
" spec.loader.exec_module(mod)",
|
||||
" finally:",
|
||||
if_dl(" sys.setdlopenflags(old_flags)"),
|
||||
" os.chdir(old_dir)",
|
||||
"__bootstrap__()",
|
||||
"" # terminal \n
|
||||
])
|
||||
)
|
||||
f.close()
|
||||
if compile:
|
||||
from distutils.util import byte_compile
|
||||
|
||||
byte_compile([stub_file], optimize=0,
|
||||
force=True, dry_run=self.dry_run)
|
||||
optimize = self.get_finalized_command('install_lib').optimize
|
||||
if optimize > 0:
|
||||
byte_compile([stub_file], optimize=optimize,
|
||||
force=True, dry_run=self.dry_run)
|
||||
if os.path.exists(stub_file) and not self.dry_run:
|
||||
os.unlink(stub_file)
|
||||
|
||||
|
||||
if use_stubs or os.name == 'nt':
|
||||
# Build shared libraries
|
||||
#
|
||||
def link_shared_object(
|
||||
self, objects, output_libname, output_dir=None, libraries=None,
|
||||
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
|
||||
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
|
||||
target_lang=None):
|
||||
self.link(
|
||||
self.SHARED_LIBRARY, objects, output_libname,
|
||||
output_dir, libraries, library_dirs, runtime_library_dirs,
|
||||
export_symbols, debug, extra_preargs, extra_postargs,
|
||||
build_temp, target_lang
|
||||
)
|
||||
else:
|
||||
# Build static libraries everywhere else
|
||||
libtype = 'static'
|
||||
|
||||
def link_shared_object(
|
||||
self, objects, output_libname, output_dir=None, libraries=None,
|
||||
library_dirs=None, runtime_library_dirs=None, export_symbols=None,
|
||||
debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
|
||||
target_lang=None):
|
||||
# XXX we need to either disallow these attrs on Library instances,
|
||||
# or warn/abort here if set, or something...
|
||||
# libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
# export_symbols=None, extra_preargs=None, extra_postargs=None,
|
||||
# build_temp=None
|
||||
|
||||
assert output_dir is None # distutils build_ext doesn't pass this
|
||||
output_dir, filename = os.path.split(output_libname)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if self.library_filename("x").startswith('lib'):
|
||||
# strip 'lib' prefix; this is kludgy if some platform uses
|
||||
# a different prefix
|
||||
basename = basename[3:]
|
||||
|
||||
self.create_static_lib(
|
||||
objects, basename, output_dir, debug, target_lang
|
||||
)
|
@ -0,0 +1,242 @@
|
||||
from glob import glob
|
||||
from distutils.util import convert_path
|
||||
import distutils.command.build_py as orig
|
||||
import os
|
||||
import fnmatch
|
||||
import textwrap
|
||||
import io
|
||||
import distutils.errors
|
||||
import itertools
|
||||
import stat
|
||||
from setuptools.extern.more_itertools import unique_everseen
|
||||
|
||||
|
||||
def make_writable(target):
|
||||
os.chmod(target, os.stat(target).st_mode | stat.S_IWRITE)
|
||||
|
||||
|
||||
class build_py(orig.build_py):
|
||||
"""Enhanced 'build_py' command that includes data files with packages
|
||||
|
||||
The data files are specified via a 'package_data' argument to 'setup()'.
|
||||
See 'setuptools.dist.Distribution' for more details.
|
||||
|
||||
Also, this version of the 'build_py' command allows you to specify both
|
||||
'py_modules' and 'packages' in the same setup operation.
|
||||
"""
|
||||
|
||||
def finalize_options(self):
|
||||
orig.build_py.finalize_options(self)
|
||||
self.package_data = self.distribution.package_data
|
||||
self.exclude_package_data = self.distribution.exclude_package_data or {}
|
||||
if 'data_files' in self.__dict__:
|
||||
del self.__dict__['data_files']
|
||||
self.__updated_files = []
|
||||
|
||||
def run(self):
|
||||
"""Build modules, packages, and copy data files to build directory"""
|
||||
if not self.py_modules and not self.packages:
|
||||
return
|
||||
|
||||
if self.py_modules:
|
||||
self.build_modules()
|
||||
|
||||
if self.packages:
|
||||
self.build_packages()
|
||||
self.build_package_data()
|
||||
|
||||
# Only compile actual .py files, using our base class' idea of what our
|
||||
# output files are.
|
||||
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
|
||||
|
||||
def __getattr__(self, attr):
|
||||
"lazily compute data files"
|
||||
if attr == 'data_files':
|
||||
self.data_files = self._get_data_files()
|
||||
return self.data_files
|
||||
return orig.build_py.__getattr__(self, attr)
|
||||
|
||||
def build_module(self, module, module_file, package):
|
||||
outfile, copied = orig.build_py.build_module(self, module, module_file, package)
|
||||
if copied:
|
||||
self.__updated_files.append(outfile)
|
||||
return outfile, copied
|
||||
|
||||
def _get_data_files(self):
|
||||
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
||||
self.analyze_manifest()
|
||||
return list(map(self._get_pkg_data_files, self.packages or ()))
|
||||
|
||||
def get_data_files_without_manifest(self):
|
||||
"""
|
||||
Generate list of ``(package,src_dir,build_dir,filenames)`` tuples,
|
||||
but without triggering any attempt to analyze or build the manifest.
|
||||
"""
|
||||
# Prevent eventual errors from unset `manifest_files`
|
||||
# (that would otherwise be set by `analyze_manifest`)
|
||||
self.__dict__.setdefault('manifest_files', {})
|
||||
return list(map(self._get_pkg_data_files, self.packages or ()))
|
||||
|
||||
def _get_pkg_data_files(self, package):
|
||||
# Locate package source directory
|
||||
src_dir = self.get_package_dir(package)
|
||||
|
||||
# Compute package build directory
|
||||
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
||||
|
||||
# Strip directory from globbed filenames
|
||||
filenames = [
|
||||
os.path.relpath(file, src_dir)
|
||||
for file in self.find_data_files(package, src_dir)
|
||||
]
|
||||
return package, src_dir, build_dir, filenames
|
||||
|
||||
def find_data_files(self, package, src_dir):
|
||||
"""Return filenames for package's data files in 'src_dir'"""
|
||||
patterns = self._get_platform_patterns(
|
||||
self.package_data,
|
||||
package,
|
||||
src_dir,
|
||||
)
|
||||
globs_expanded = map(glob, patterns)
|
||||
# flatten the expanded globs into an iterable of matches
|
||||
globs_matches = itertools.chain.from_iterable(globs_expanded)
|
||||
glob_files = filter(os.path.isfile, globs_matches)
|
||||
files = itertools.chain(
|
||||
self.manifest_files.get(package, []),
|
||||
glob_files,
|
||||
)
|
||||
return self.exclude_data_files(package, src_dir, files)
|
||||
|
||||
def build_package_data(self):
|
||||
"""Copy data files into build directory"""
|
||||
for package, src_dir, build_dir, filenames in self.data_files:
|
||||
for filename in filenames:
|
||||
target = os.path.join(build_dir, filename)
|
||||
self.mkpath(os.path.dirname(target))
|
||||
srcfile = os.path.join(src_dir, filename)
|
||||
outf, copied = self.copy_file(srcfile, target)
|
||||
make_writable(target)
|
||||
srcfile = os.path.abspath(srcfile)
|
||||
|
||||
def analyze_manifest(self):
|
||||
self.manifest_files = mf = {}
|
||||
if not self.distribution.include_package_data:
|
||||
return
|
||||
src_dirs = {}
|
||||
for package in self.packages or ():
|
||||
# Locate package source directory
|
||||
src_dirs[assert_relative(self.get_package_dir(package))] = package
|
||||
|
||||
self.run_command('egg_info')
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
for path in ei_cmd.filelist.files:
|
||||
d, f = os.path.split(assert_relative(path))
|
||||
prev = None
|
||||
oldf = f
|
||||
while d and d != prev and d not in src_dirs:
|
||||
prev = d
|
||||
d, df = os.path.split(d)
|
||||
f = os.path.join(df, f)
|
||||
if d in src_dirs:
|
||||
if path.endswith('.py') and f == oldf:
|
||||
continue # it's a module, not data
|
||||
mf.setdefault(src_dirs[d], []).append(path)
|
||||
|
||||
def get_data_files(self):
|
||||
pass # Lazily compute data files in _get_data_files() function.
|
||||
|
||||
def check_package(self, package, package_dir):
|
||||
"""Check namespace packages' __init__ for declare_namespace"""
|
||||
try:
|
||||
return self.packages_checked[package]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
init_py = orig.build_py.check_package(self, package, package_dir)
|
||||
self.packages_checked[package] = init_py
|
||||
|
||||
if not init_py or not self.distribution.namespace_packages:
|
||||
return init_py
|
||||
|
||||
for pkg in self.distribution.namespace_packages:
|
||||
if pkg == package or pkg.startswith(package + '.'):
|
||||
break
|
||||
else:
|
||||
return init_py
|
||||
|
||||
with io.open(init_py, 'rb') as f:
|
||||
contents = f.read()
|
||||
if b'declare_namespace' not in contents:
|
||||
raise distutils.errors.DistutilsError(
|
||||
"Namespace package problem: %s is a namespace package, but "
|
||||
"its\n__init__.py does not call declare_namespace()! Please "
|
||||
'fix it.\n(See the setuptools manual under '
|
||||
'"Namespace Packages" for details.)\n"' % (package,)
|
||||
)
|
||||
return init_py
|
||||
|
||||
def initialize_options(self):
|
||||
self.packages_checked = {}
|
||||
orig.build_py.initialize_options(self)
|
||||
|
||||
def get_package_dir(self, package):
|
||||
res = orig.build_py.get_package_dir(self, package)
|
||||
if self.distribution.src_root is not None:
|
||||
return os.path.join(self.distribution.src_root, res)
|
||||
return res
|
||||
|
||||
def exclude_data_files(self, package, src_dir, files):
|
||||
"""Filter filenames for package's data files in 'src_dir'"""
|
||||
files = list(files)
|
||||
patterns = self._get_platform_patterns(
|
||||
self.exclude_package_data,
|
||||
package,
|
||||
src_dir,
|
||||
)
|
||||
match_groups = (fnmatch.filter(files, pattern) for pattern in patterns)
|
||||
# flatten the groups of matches into an iterable of matches
|
||||
matches = itertools.chain.from_iterable(match_groups)
|
||||
bad = set(matches)
|
||||
keepers = (fn for fn in files if fn not in bad)
|
||||
# ditch dupes
|
||||
return list(unique_everseen(keepers))
|
||||
|
||||
@staticmethod
|
||||
def _get_platform_patterns(spec, package, src_dir):
|
||||
"""
|
||||
yield platform-specific path patterns (suitable for glob
|
||||
or fn_match) from a glob-based spec (such as
|
||||
self.package_data or self.exclude_package_data)
|
||||
matching package in src_dir.
|
||||
"""
|
||||
raw_patterns = itertools.chain(
|
||||
spec.get('', []),
|
||||
spec.get(package, []),
|
||||
)
|
||||
return (
|
||||
# Each pattern has to be converted to a platform-specific path
|
||||
os.path.join(src_dir, convert_path(pattern))
|
||||
for pattern in raw_patterns
|
||||
)
|
||||
|
||||
|
||||
def assert_relative(path):
|
||||
if not os.path.isabs(path):
|
||||
return path
|
||||
from distutils.errors import DistutilsSetupError
|
||||
|
||||
msg = (
|
||||
textwrap.dedent(
|
||||
"""
|
||||
Error: setup script specifies an absolute path:
|
||||
|
||||
%s
|
||||
|
||||
setup() arguments must *always* be /-separated paths relative to the
|
||||
setup.py directory, *never* absolute paths.
|
||||
"""
|
||||
).lstrip()
|
||||
% path
|
||||
)
|
||||
raise DistutilsSetupError(msg)
|
@ -0,0 +1,193 @@
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsError, DistutilsOptionError
|
||||
import os
|
||||
import glob
|
||||
import io
|
||||
|
||||
import pkg_resources
|
||||
from setuptools.command.easy_install import easy_install
|
||||
from setuptools import namespaces
|
||||
import setuptools
|
||||
|
||||
|
||||
class develop(namespaces.DevelopInstaller, easy_install):
|
||||
"""Set up package for development"""
|
||||
|
||||
description = "install package in 'development mode'"
|
||||
|
||||
user_options = easy_install.user_options + [
|
||||
("uninstall", "u", "Uninstall this source package"),
|
||||
("egg-path=", None, "Set the path to be used in the .egg-link file"),
|
||||
]
|
||||
|
||||
boolean_options = easy_install.boolean_options + ['uninstall']
|
||||
|
||||
command_consumes_arguments = False # override base
|
||||
|
||||
def run(self):
|
||||
if self.uninstall:
|
||||
self.multi_version = True
|
||||
self.uninstall_link()
|
||||
self.uninstall_namespaces()
|
||||
else:
|
||||
self.install_for_development()
|
||||
self.warn_deprecated_options()
|
||||
|
||||
def initialize_options(self):
|
||||
self.uninstall = None
|
||||
self.egg_path = None
|
||||
easy_install.initialize_options(self)
|
||||
self.setup_path = None
|
||||
self.always_copy_from = '.' # always copy eggs installed in curdir
|
||||
|
||||
def finalize_options(self):
|
||||
ei = self.get_finalized_command("egg_info")
|
||||
if ei.broken_egg_info:
|
||||
template = "Please rename %r to %r before using 'develop'"
|
||||
args = ei.egg_info, ei.broken_egg_info
|
||||
raise DistutilsError(template % args)
|
||||
self.args = [ei.egg_name]
|
||||
|
||||
easy_install.finalize_options(self)
|
||||
self.expand_basedirs()
|
||||
self.expand_dirs()
|
||||
# pick up setup-dir .egg files only: no .egg-info
|
||||
self.package_index.scan(glob.glob('*.egg'))
|
||||
|
||||
egg_link_fn = ei.egg_name + '.egg-link'
|
||||
self.egg_link = os.path.join(self.install_dir, egg_link_fn)
|
||||
self.egg_base = ei.egg_base
|
||||
if self.egg_path is None:
|
||||
self.egg_path = os.path.abspath(ei.egg_base)
|
||||
|
||||
target = pkg_resources.normalize_path(self.egg_base)
|
||||
egg_path = pkg_resources.normalize_path(
|
||||
os.path.join(self.install_dir, self.egg_path)
|
||||
)
|
||||
if egg_path != target:
|
||||
raise DistutilsOptionError(
|
||||
"--egg-path must be a relative path from the install"
|
||||
" directory to " + target
|
||||
)
|
||||
|
||||
# Make a distribution for the package's source
|
||||
self.dist = pkg_resources.Distribution(
|
||||
target,
|
||||
pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)),
|
||||
project_name=ei.egg_name,
|
||||
)
|
||||
|
||||
self.setup_path = self._resolve_setup_path(
|
||||
self.egg_base,
|
||||
self.install_dir,
|
||||
self.egg_path,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _resolve_setup_path(egg_base, install_dir, egg_path):
|
||||
"""
|
||||
Generate a path from egg_base back to '.' where the
|
||||
setup script resides and ensure that path points to the
|
||||
setup path from $install_dir/$egg_path.
|
||||
"""
|
||||
path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
|
||||
if path_to_setup != os.curdir:
|
||||
path_to_setup = '../' * (path_to_setup.count('/') + 1)
|
||||
resolved = pkg_resources.normalize_path(
|
||||
os.path.join(install_dir, egg_path, path_to_setup)
|
||||
)
|
||||
if resolved != pkg_resources.normalize_path(os.curdir):
|
||||
raise DistutilsOptionError(
|
||||
"Can't get a consistent path to setup script from"
|
||||
" installation directory",
|
||||
resolved,
|
||||
pkg_resources.normalize_path(os.curdir),
|
||||
)
|
||||
return path_to_setup
|
||||
|
||||
def install_for_development(self):
|
||||
self.run_command('egg_info')
|
||||
|
||||
# Build extensions in-place
|
||||
self.reinitialize_command('build_ext', inplace=1)
|
||||
self.run_command('build_ext')
|
||||
|
||||
if setuptools.bootstrap_install_from:
|
||||
self.easy_install(setuptools.bootstrap_install_from)
|
||||
setuptools.bootstrap_install_from = None
|
||||
|
||||
self.install_namespaces()
|
||||
|
||||
# create an .egg-link in the installation dir, pointing to our egg
|
||||
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
|
||||
if not self.dry_run:
|
||||
with open(self.egg_link, "w") as f:
|
||||
f.write(self.egg_path + "\n" + self.setup_path)
|
||||
# postprocess the installed distro, fixing up .pth, installing scripts,
|
||||
# and handling requirements
|
||||
self.process_distribution(None, self.dist, not self.no_deps)
|
||||
|
||||
def uninstall_link(self):
|
||||
if os.path.exists(self.egg_link):
|
||||
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
|
||||
egg_link_file = open(self.egg_link)
|
||||
contents = [line.rstrip() for line in egg_link_file]
|
||||
egg_link_file.close()
|
||||
if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
|
||||
log.warn("Link points to %s: uninstall aborted", contents)
|
||||
return
|
||||
if not self.dry_run:
|
||||
os.unlink(self.egg_link)
|
||||
if not self.dry_run:
|
||||
self.update_pth(self.dist) # remove any .pth link to us
|
||||
if self.distribution.scripts:
|
||||
# XXX should also check for entry point scripts!
|
||||
log.warn("Note: you must uninstall or replace scripts manually!")
|
||||
|
||||
def install_egg_scripts(self, dist):
|
||||
if dist is not self.dist:
|
||||
# Installing a dependency, so fall back to normal behavior
|
||||
return easy_install.install_egg_scripts(self, dist)
|
||||
|
||||
# create wrapper scripts in the script dir, pointing to dist.scripts
|
||||
|
||||
# new-style...
|
||||
self.install_wrapper_scripts(dist)
|
||||
|
||||
# ...and old-style
|
||||
for script_name in self.distribution.scripts or []:
|
||||
script_path = os.path.abspath(convert_path(script_name))
|
||||
script_name = os.path.basename(script_path)
|
||||
with io.open(script_path) as strm:
|
||||
script_text = strm.read()
|
||||
self.install_script(dist, script_name, script_text, script_path)
|
||||
|
||||
def install_wrapper_scripts(self, dist):
|
||||
dist = VersionlessRequirement(dist)
|
||||
return easy_install.install_wrapper_scripts(self, dist)
|
||||
|
||||
|
||||
class VersionlessRequirement:
|
||||
"""
|
||||
Adapt a pkg_resources.Distribution to simply return the project
|
||||
name as the 'requirement' so that scripts will work across
|
||||
multiple versions.
|
||||
|
||||
>>> from pkg_resources import Distribution
|
||||
>>> dist = Distribution(project_name='foo', version='1.0')
|
||||
>>> str(dist.as_requirement())
|
||||
'foo==1.0'
|
||||
>>> adapted_dist = VersionlessRequirement(dist)
|
||||
>>> str(adapted_dist.as_requirement())
|
||||
'foo'
|
||||
"""
|
||||
|
||||
def __init__(self, dist):
|
||||
self.__dist = dist
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.__dist, name)
|
||||
|
||||
def as_requirement(self):
|
||||
return self.project_name
|
@ -0,0 +1,36 @@
|
||||
"""
|
||||
Create a dist_info directory
|
||||
As defined in the wheel specification
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from distutils.core import Command
|
||||
from distutils import log
|
||||
|
||||
|
||||
class dist_info(Command):
|
||||
|
||||
description = 'create a .dist-info directory'
|
||||
|
||||
user_options = [
|
||||
('egg-base=', 'e', "directory containing .egg-info directories"
|
||||
" (default: top of the source tree)"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.egg_base = None
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
egg_info = self.get_finalized_command('egg_info')
|
||||
egg_info.egg_base = self.egg_base
|
||||
egg_info.finalize_options()
|
||||
egg_info.run()
|
||||
dist_info_dir = egg_info.egg_info[:-len('.egg-info')] + '.dist-info'
|
||||
log.info("creating '{}'".format(os.path.abspath(dist_info_dir)))
|
||||
|
||||
bdist_wheel = self.get_finalized_command('bdist_wheel')
|
||||
bdist_wheel.egg2dist(egg_info.egg_info, dist_info_dir)
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,755 @@
|
||||
"""setuptools.command.egg_info
|
||||
|
||||
Create a distribution's .egg-info directory and contents"""
|
||||
|
||||
from distutils.filelist import FileList as _FileList
|
||||
from distutils.errors import DistutilsInternalError
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
import distutils.errors
|
||||
import distutils.filelist
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import io
|
||||
import warnings
|
||||
import time
|
||||
import collections
|
||||
|
||||
from setuptools import Command
|
||||
from setuptools.command.sdist import sdist
|
||||
from setuptools.command.sdist import walk_revctrl
|
||||
from setuptools.command.setopt import edit_config
|
||||
from setuptools.command import bdist_egg
|
||||
from pkg_resources import (
|
||||
parse_requirements, safe_name, parse_version,
|
||||
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
|
||||
import setuptools.unicode_utils as unicode_utils
|
||||
from setuptools.glob import glob
|
||||
|
||||
from setuptools.extern import packaging
|
||||
from setuptools import SetuptoolsDeprecationWarning
|
||||
|
||||
|
||||
def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME
|
||||
"""
|
||||
Translate a file path glob like '*.txt' in to a regular expression.
|
||||
This differs from fnmatch.translate which allows wildcards to match
|
||||
directory separators. It also knows about '**/' which matches any number of
|
||||
directories.
|
||||
"""
|
||||
pat = ''
|
||||
|
||||
# This will split on '/' within [character classes]. This is deliberate.
|
||||
chunks = glob.split(os.path.sep)
|
||||
|
||||
sep = re.escape(os.sep)
|
||||
valid_char = '[^%s]' % (sep,)
|
||||
|
||||
for c, chunk in enumerate(chunks):
|
||||
last_chunk = c == len(chunks) - 1
|
||||
|
||||
# Chunks that are a literal ** are globstars. They match anything.
|
||||
if chunk == '**':
|
||||
if last_chunk:
|
||||
# Match anything if this is the last component
|
||||
pat += '.*'
|
||||
else:
|
||||
# Match '(name/)*'
|
||||
pat += '(?:%s+%s)*' % (valid_char, sep)
|
||||
continue # Break here as the whole path component has been handled
|
||||
|
||||
# Find any special characters in the remainder
|
||||
i = 0
|
||||
chunk_len = len(chunk)
|
||||
while i < chunk_len:
|
||||
char = chunk[i]
|
||||
if char == '*':
|
||||
# Match any number of name characters
|
||||
pat += valid_char + '*'
|
||||
elif char == '?':
|
||||
# Match a name character
|
||||
pat += valid_char
|
||||
elif char == '[':
|
||||
# Character class
|
||||
inner_i = i + 1
|
||||
# Skip initial !/] chars
|
||||
if inner_i < chunk_len and chunk[inner_i] == '!':
|
||||
inner_i = inner_i + 1
|
||||
if inner_i < chunk_len and chunk[inner_i] == ']':
|
||||
inner_i = inner_i + 1
|
||||
|
||||
# Loop till the closing ] is found
|
||||
while inner_i < chunk_len and chunk[inner_i] != ']':
|
||||
inner_i = inner_i + 1
|
||||
|
||||
if inner_i >= chunk_len:
|
||||
# Got to the end of the string without finding a closing ]
|
||||
# Do not treat this as a matching group, but as a literal [
|
||||
pat += re.escape(char)
|
||||
else:
|
||||
# Grab the insides of the [brackets]
|
||||
inner = chunk[i + 1:inner_i]
|
||||
char_class = ''
|
||||
|
||||
# Class negation
|
||||
if inner[0] == '!':
|
||||
char_class = '^'
|
||||
inner = inner[1:]
|
||||
|
||||
char_class += re.escape(inner)
|
||||
pat += '[%s]' % (char_class,)
|
||||
|
||||
# Skip to the end ]
|
||||
i = inner_i
|
||||
else:
|
||||
pat += re.escape(char)
|
||||
i += 1
|
||||
|
||||
# Join each chunk with the dir separator
|
||||
if not last_chunk:
|
||||
pat += sep
|
||||
|
||||
pat += r'\Z'
|
||||
return re.compile(pat, flags=re.MULTILINE | re.DOTALL)
|
||||
|
||||
|
||||
class InfoCommon:
|
||||
tag_build = None
|
||||
tag_date = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return safe_name(self.distribution.get_name())
|
||||
|
||||
def tagged_version(self):
|
||||
return safe_version(self._maybe_tag(self.distribution.get_version()))
|
||||
|
||||
def _maybe_tag(self, version):
|
||||
"""
|
||||
egg_info may be called more than once for a distribution,
|
||||
in which case the version string already contains all tags.
|
||||
"""
|
||||
return (
|
||||
version if self.vtags and version.endswith(self.vtags)
|
||||
else version + self.vtags
|
||||
)
|
||||
|
||||
def tags(self):
|
||||
version = ''
|
||||
if self.tag_build:
|
||||
version += self.tag_build
|
||||
if self.tag_date:
|
||||
version += time.strftime("-%Y%m%d")
|
||||
return version
|
||||
vtags = property(tags)
|
||||
|
||||
|
||||
class egg_info(InfoCommon, Command):
|
||||
description = "create a distribution's .egg-info directory"
|
||||
|
||||
user_options = [
|
||||
('egg-base=', 'e', "directory containing .egg-info directories"
|
||||
" (default: top of the source tree)"),
|
||||
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
|
||||
('tag-build=', 'b', "Specify explicit tag to add to version number"),
|
||||
('no-date', 'D', "Don't include date stamp [default]"),
|
||||
]
|
||||
|
||||
boolean_options = ['tag-date']
|
||||
negative_opt = {
|
||||
'no-date': 'tag-date',
|
||||
}
|
||||
|
||||
def initialize_options(self):
|
||||
self.egg_base = None
|
||||
self.egg_name = None
|
||||
self.egg_info = None
|
||||
self.egg_version = None
|
||||
self.broken_egg_info = False
|
||||
|
||||
####################################
|
||||
# allow the 'tag_svn_revision' to be detected and
|
||||
# set, supporting sdists built on older Setuptools.
|
||||
@property
|
||||
def tag_svn_revision(self):
|
||||
pass
|
||||
|
||||
@tag_svn_revision.setter
|
||||
def tag_svn_revision(self, value):
|
||||
pass
|
||||
####################################
|
||||
|
||||
def save_version_info(self, filename):
|
||||
"""
|
||||
Materialize the value of date into the
|
||||
build tag. Install build keys in a deterministic order
|
||||
to avoid arbitrary reordering on subsequent builds.
|
||||
"""
|
||||
egg_info = collections.OrderedDict()
|
||||
# follow the order these keys would have been added
|
||||
# when PYTHONHASHSEED=0
|
||||
egg_info['tag_build'] = self.tags()
|
||||
egg_info['tag_date'] = 0
|
||||
edit_config(filename, dict(egg_info=egg_info))
|
||||
|
||||
def finalize_options(self):
|
||||
# Note: we need to capture the current value returned
|
||||
# by `self.tagged_version()`, so we can later update
|
||||
# `self.distribution.metadata.version` without
|
||||
# repercussions.
|
||||
self.egg_name = self.name
|
||||
self.egg_version = self.tagged_version()
|
||||
parsed_version = parse_version(self.egg_version)
|
||||
|
||||
try:
|
||||
is_version = isinstance(parsed_version, packaging.version.Version)
|
||||
spec = (
|
||||
"%s==%s" if is_version else "%s===%s"
|
||||
)
|
||||
list(
|
||||
parse_requirements(spec % (self.egg_name, self.egg_version))
|
||||
)
|
||||
except ValueError as e:
|
||||
raise distutils.errors.DistutilsOptionError(
|
||||
"Invalid distribution name or version syntax: %s-%s" %
|
||||
(self.egg_name, self.egg_version)
|
||||
) from e
|
||||
|
||||
if self.egg_base is None:
|
||||
dirs = self.distribution.package_dir
|
||||
self.egg_base = (dirs or {}).get('', os.curdir)
|
||||
|
||||
self.ensure_dirname('egg_base')
|
||||
self.egg_info = to_filename(self.egg_name) + '.egg-info'
|
||||
if self.egg_base != os.curdir:
|
||||
self.egg_info = os.path.join(self.egg_base, self.egg_info)
|
||||
if '-' in self.egg_name:
|
||||
self.check_broken_egg_info()
|
||||
|
||||
# Set package version for the benefit of dumber commands
|
||||
# (e.g. sdist, bdist_wininst, etc.)
|
||||
#
|
||||
self.distribution.metadata.version = self.egg_version
|
||||
|
||||
# If we bootstrapped around the lack of a PKG-INFO, as might be the
|
||||
# case in a fresh checkout, make sure that any special tags get added
|
||||
# to the version info
|
||||
#
|
||||
pd = self.distribution._patched_dist
|
||||
if pd is not None and pd.key == self.egg_name.lower():
|
||||
pd._version = self.egg_version
|
||||
pd._parsed_version = parse_version(self.egg_version)
|
||||
self.distribution._patched_dist = None
|
||||
|
||||
def write_or_delete_file(self, what, filename, data, force=False):
|
||||
"""Write `data` to `filename` or delete if empty
|
||||
|
||||
If `data` is non-empty, this routine is the same as ``write_file()``.
|
||||
If `data` is empty but not ``None``, this is the same as calling
|
||||
``delete_file(filename)`. If `data` is ``None``, then this is a no-op
|
||||
unless `filename` exists, in which case a warning is issued about the
|
||||
orphaned file (if `force` is false), or deleted (if `force` is true).
|
||||
"""
|
||||
if data:
|
||||
self.write_file(what, filename, data)
|
||||
elif os.path.exists(filename):
|
||||
if data is None and not force:
|
||||
log.warn(
|
||||
"%s not set in setup(), but %s exists", what, filename
|
||||
)
|
||||
return
|
||||
else:
|
||||
self.delete_file(filename)
|
||||
|
||||
def write_file(self, what, filename, data):
|
||||
"""Write `data` to `filename` (if not a dry run) after announcing it
|
||||
|
||||
`what` is used in a log message to identify what is being written
|
||||
to the file.
|
||||
"""
|
||||
log.info("writing %s to %s", what, filename)
|
||||
data = data.encode("utf-8")
|
||||
if not self.dry_run:
|
||||
f = open(filename, 'wb')
|
||||
f.write(data)
|
||||
f.close()
|
||||
|
||||
def delete_file(self, filename):
|
||||
"""Delete `filename` (if not a dry run) after announcing it"""
|
||||
log.info("deleting %s", filename)
|
||||
if not self.dry_run:
|
||||
os.unlink(filename)
|
||||
|
||||
def run(self):
|
||||
self.mkpath(self.egg_info)
|
||||
os.utime(self.egg_info, None)
|
||||
installer = self.distribution.fetch_build_egg
|
||||
for ep in iter_entry_points('egg_info.writers'):
|
||||
ep.require(installer=installer)
|
||||
writer = ep.resolve()
|
||||
writer(self, ep.name, os.path.join(self.egg_info, ep.name))
|
||||
|
||||
# Get rid of native_libs.txt if it was put there by older bdist_egg
|
||||
nl = os.path.join(self.egg_info, "native_libs.txt")
|
||||
if os.path.exists(nl):
|
||||
self.delete_file(nl)
|
||||
|
||||
self.find_sources()
|
||||
|
||||
def find_sources(self):
|
||||
"""Generate SOURCES.txt manifest file"""
|
||||
manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
|
||||
mm = manifest_maker(self.distribution)
|
||||
mm.manifest = manifest_filename
|
||||
mm.run()
|
||||
self.filelist = mm.filelist
|
||||
|
||||
def check_broken_egg_info(self):
|
||||
bei = self.egg_name + '.egg-info'
|
||||
if self.egg_base != os.curdir:
|
||||
bei = os.path.join(self.egg_base, bei)
|
||||
if os.path.exists(bei):
|
||||
log.warn(
|
||||
"-" * 78 + '\n'
|
||||
"Note: Your current .egg-info directory has a '-' in its name;"
|
||||
'\nthis will not work correctly with "setup.py develop".\n\n'
|
||||
'Please rename %s to %s to correct this problem.\n' + '-' * 78,
|
||||
bei, self.egg_info
|
||||
)
|
||||
self.broken_egg_info = self.egg_info
|
||||
self.egg_info = bei # make it work for now
|
||||
|
||||
|
||||
class FileList(_FileList):
|
||||
# Implementations of the various MANIFEST.in commands
|
||||
|
||||
def process_template_line(self, line):
|
||||
# Parse the line: split it up, make sure the right number of words
|
||||
# is there, and return the relevant words. 'action' is always
|
||||
# defined: it's the first word of the line. Which of the other
|
||||
# three are defined depends on the action; it'll be either
|
||||
# patterns, (dir and patterns), or (dir_pattern).
|
||||
(action, patterns, dir, dir_pattern) = self._parse_template_line(line)
|
||||
|
||||
action_map = {
|
||||
'include': self.include,
|
||||
'exclude': self.exclude,
|
||||
'global-include': self.global_include,
|
||||
'global-exclude': self.global_exclude,
|
||||
'recursive-include': functools.partial(
|
||||
self.recursive_include, dir,
|
||||
),
|
||||
'recursive-exclude': functools.partial(
|
||||
self.recursive_exclude, dir,
|
||||
),
|
||||
'graft': self.graft,
|
||||
'prune': self.prune,
|
||||
}
|
||||
log_map = {
|
||||
'include': "warning: no files found matching '%s'",
|
||||
'exclude': (
|
||||
"warning: no previously-included files found "
|
||||
"matching '%s'"
|
||||
),
|
||||
'global-include': (
|
||||
"warning: no files found matching '%s' "
|
||||
"anywhere in distribution"
|
||||
),
|
||||
'global-exclude': (
|
||||
"warning: no previously-included files matching "
|
||||
"'%s' found anywhere in distribution"
|
||||
),
|
||||
'recursive-include': (
|
||||
"warning: no files found matching '%s' "
|
||||
"under directory '%s'"
|
||||
),
|
||||
'recursive-exclude': (
|
||||
"warning: no previously-included files matching "
|
||||
"'%s' found under directory '%s'"
|
||||
),
|
||||
'graft': "warning: no directories found matching '%s'",
|
||||
'prune': "no previously-included directories found matching '%s'",
|
||||
}
|
||||
|
||||
try:
|
||||
process_action = action_map[action]
|
||||
except KeyError:
|
||||
raise DistutilsInternalError(
|
||||
"this cannot happen: invalid action '{action!s}'".
|
||||
format(action=action),
|
||||
)
|
||||
|
||||
# OK, now we know that the action is valid and we have the
|
||||
# right number of words on the line for that action -- so we
|
||||
# can proceed with minimal error-checking.
|
||||
|
||||
action_is_recursive = action.startswith('recursive-')
|
||||
if action in {'graft', 'prune'}:
|
||||
patterns = [dir_pattern]
|
||||
extra_log_args = (dir, ) if action_is_recursive else ()
|
||||
log_tmpl = log_map[action]
|
||||
|
||||
self.debug_print(
|
||||
' '.join(
|
||||
[action] +
|
||||
([dir] if action_is_recursive else []) +
|
||||
patterns,
|
||||
)
|
||||
)
|
||||
for pattern in patterns:
|
||||
if not process_action(pattern):
|
||||
log.warn(log_tmpl, pattern, *extra_log_args)
|
||||
|
||||
def _remove_files(self, predicate):
|
||||
"""
|
||||
Remove all files from the file list that match the predicate.
|
||||
Return True if any matching files were removed
|
||||
"""
|
||||
found = False
|
||||
for i in range(len(self.files) - 1, -1, -1):
|
||||
if predicate(self.files[i]):
|
||||
self.debug_print(" removing " + self.files[i])
|
||||
del self.files[i]
|
||||
found = True
|
||||
return found
|
||||
|
||||
def include(self, pattern):
|
||||
"""Include files that match 'pattern'."""
|
||||
found = [f for f in glob(pattern) if not os.path.isdir(f)]
|
||||
self.extend(found)
|
||||
return bool(found)
|
||||
|
||||
def exclude(self, pattern):
|
||||
"""Exclude files that match 'pattern'."""
|
||||
match = translate_pattern(pattern)
|
||||
return self._remove_files(match.match)
|
||||
|
||||
def recursive_include(self, dir, pattern):
|
||||
"""
|
||||
Include all files anywhere in 'dir/' that match the pattern.
|
||||
"""
|
||||
full_pattern = os.path.join(dir, '**', pattern)
|
||||
found = [f for f in glob(full_pattern, recursive=True)
|
||||
if not os.path.isdir(f)]
|
||||
self.extend(found)
|
||||
return bool(found)
|
||||
|
||||
def recursive_exclude(self, dir, pattern):
|
||||
"""
|
||||
Exclude any file anywhere in 'dir/' that match the pattern.
|
||||
"""
|
||||
match = translate_pattern(os.path.join(dir, '**', pattern))
|
||||
return self._remove_files(match.match)
|
||||
|
||||
def graft(self, dir):
|
||||
"""Include all files from 'dir/'."""
|
||||
found = [
|
||||
item
|
||||
for match_dir in glob(dir)
|
||||
for item in distutils.filelist.findall(match_dir)
|
||||
]
|
||||
self.extend(found)
|
||||
return bool(found)
|
||||
|
||||
def prune(self, dir):
|
||||
"""Filter out files from 'dir/'."""
|
||||
match = translate_pattern(os.path.join(dir, '**'))
|
||||
return self._remove_files(match.match)
|
||||
|
||||
def global_include(self, pattern):
|
||||
"""
|
||||
Include all files anywhere in the current directory that match the
|
||||
pattern. This is very inefficient on large file trees.
|
||||
"""
|
||||
if self.allfiles is None:
|
||||
self.findall()
|
||||
match = translate_pattern(os.path.join('**', pattern))
|
||||
found = [f for f in self.allfiles if match.match(f)]
|
||||
self.extend(found)
|
||||
return bool(found)
|
||||
|
||||
def global_exclude(self, pattern):
|
||||
"""
|
||||
Exclude all files anywhere that match the pattern.
|
||||
"""
|
||||
match = translate_pattern(os.path.join('**', pattern))
|
||||
return self._remove_files(match.match)
|
||||
|
||||
def append(self, item):
|
||||
if item.endswith('\r'): # Fix older sdists built on Windows
|
||||
item = item[:-1]
|
||||
path = convert_path(item)
|
||||
|
||||
if self._safe_path(path):
|
||||
self.files.append(path)
|
||||
|
||||
def extend(self, paths):
|
||||
self.files.extend(filter(self._safe_path, paths))
|
||||
|
||||
def _repair(self):
|
||||
"""
|
||||
Replace self.files with only safe paths
|
||||
|
||||
Because some owners of FileList manipulate the underlying
|
||||
``files`` attribute directly, this method must be called to
|
||||
repair those paths.
|
||||
"""
|
||||
self.files = list(filter(self._safe_path, self.files))
|
||||
|
||||
def _safe_path(self, path):
|
||||
enc_warn = "'%s' not %s encodable -- skipping"
|
||||
|
||||
# To avoid accidental trans-codings errors, first to unicode
|
||||
u_path = unicode_utils.filesys_decode(path)
|
||||
if u_path is None:
|
||||
log.warn("'%s' in unexpected encoding -- skipping" % path)
|
||||
return False
|
||||
|
||||
# Must ensure utf-8 encodability
|
||||
utf8_path = unicode_utils.try_encode(u_path, "utf-8")
|
||||
if utf8_path is None:
|
||||
log.warn(enc_warn, path, 'utf-8')
|
||||
return False
|
||||
|
||||
try:
|
||||
# accept is either way checks out
|
||||
if os.path.exists(u_path) or os.path.exists(utf8_path):
|
||||
return True
|
||||
# this will catch any encode errors decoding u_path
|
||||
except UnicodeEncodeError:
|
||||
log.warn(enc_warn, path, sys.getfilesystemencoding())
|
||||
|
||||
|
||||
class manifest_maker(sdist):
|
||||
template = "MANIFEST.in"
|
||||
|
||||
def initialize_options(self):
|
||||
self.use_defaults = 1
|
||||
self.prune = 1
|
||||
self.manifest_only = 1
|
||||
self.force_manifest = 1
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
self.filelist = FileList()
|
||||
if not os.path.exists(self.manifest):
|
||||
self.write_manifest() # it must exist so it'll get in the list
|
||||
self.add_defaults()
|
||||
if os.path.exists(self.template):
|
||||
self.read_template()
|
||||
self.add_license_files()
|
||||
self.prune_file_list()
|
||||
self.filelist.sort()
|
||||
self.filelist.remove_duplicates()
|
||||
self.write_manifest()
|
||||
|
||||
def _manifest_normalize(self, path):
|
||||
path = unicode_utils.filesys_decode(path)
|
||||
return path.replace(os.sep, '/')
|
||||
|
||||
def write_manifest(self):
|
||||
"""
|
||||
Write the file list in 'self.filelist' to the manifest file
|
||||
named by 'self.manifest'.
|
||||
"""
|
||||
self.filelist._repair()
|
||||
|
||||
# Now _repairs should encodability, but not unicode
|
||||
files = [self._manifest_normalize(f) for f in self.filelist.files]
|
||||
msg = "writing manifest file '%s'" % self.manifest
|
||||
self.execute(write_file, (self.manifest, files), msg)
|
||||
|
||||
def warn(self, msg):
|
||||
if not self._should_suppress_warning(msg):
|
||||
sdist.warn(self, msg)
|
||||
|
||||
@staticmethod
|
||||
def _should_suppress_warning(msg):
|
||||
"""
|
||||
suppress missing-file warnings from sdist
|
||||
"""
|
||||
return re.match(r"standard file .*not found", msg)
|
||||
|
||||
def add_defaults(self):
|
||||
sdist.add_defaults(self)
|
||||
self.filelist.append(self.template)
|
||||
self.filelist.append(self.manifest)
|
||||
rcfiles = list(walk_revctrl())
|
||||
if rcfiles:
|
||||
self.filelist.extend(rcfiles)
|
||||
elif os.path.exists(self.manifest):
|
||||
self.read_manifest()
|
||||
|
||||
if os.path.exists("setup.py"):
|
||||
# setup.py should be included by default, even if it's not
|
||||
# the script called to create the sdist
|
||||
self.filelist.append("setup.py")
|
||||
|
||||
ei_cmd = self.get_finalized_command('egg_info')
|
||||
self.filelist.graft(ei_cmd.egg_info)
|
||||
|
||||
def add_license_files(self):
|
||||
license_files = self.distribution.metadata.license_files or []
|
||||
for lf in license_files:
|
||||
log.info("adding license file '%s'", lf)
|
||||
pass
|
||||
self.filelist.extend(license_files)
|
||||
|
||||
def prune_file_list(self):
|
||||
build = self.get_finalized_command('build')
|
||||
base_dir = self.distribution.get_fullname()
|
||||
self.filelist.prune(build.build_base)
|
||||
self.filelist.prune(base_dir)
|
||||
sep = re.escape(os.sep)
|
||||
self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
|
||||
is_regex=1)
|
||||
|
||||
def _safe_data_files(self, build_py):
|
||||
"""
|
||||
The parent class implementation of this method
|
||||
(``sdist``) will try to include data files, which
|
||||
might cause recursion problems when
|
||||
``include_package_data=True``.
|
||||
|
||||
Therefore, avoid triggering any attempt of
|
||||
analyzing/building the manifest again.
|
||||
"""
|
||||
if hasattr(build_py, 'get_data_files_without_manifest'):
|
||||
return build_py.get_data_files_without_manifest()
|
||||
|
||||
warnings.warn(
|
||||
"Custom 'build_py' does not implement "
|
||||
"'get_data_files_without_manifest'.\nPlease extend command classes"
|
||||
" from setuptools instead of distutils.",
|
||||
SetuptoolsDeprecationWarning
|
||||
)
|
||||
return build_py.get_data_files()
|
||||
|
||||
|
||||
def write_file(filename, contents):
|
||||
"""Create a file with the specified name and write 'contents' (a
|
||||
sequence of strings without line terminators) to it.
|
||||
"""
|
||||
contents = "\n".join(contents)
|
||||
|
||||
# assuming the contents has been vetted for utf-8 encoding
|
||||
contents = contents.encode("utf-8")
|
||||
|
||||
with open(filename, "wb") as f: # always write POSIX-style manifest
|
||||
f.write(contents)
|
||||
|
||||
|
||||
def write_pkg_info(cmd, basename, filename):
|
||||
log.info("writing %s", filename)
|
||||
if not cmd.dry_run:
|
||||
metadata = cmd.distribution.metadata
|
||||
metadata.version, oldver = cmd.egg_version, metadata.version
|
||||
metadata.name, oldname = cmd.egg_name, metadata.name
|
||||
|
||||
try:
|
||||
# write unescaped data to PKG-INFO, so older pkg_resources
|
||||
# can still parse it
|
||||
metadata.write_pkg_info(cmd.egg_info)
|
||||
finally:
|
||||
metadata.name, metadata.version = oldname, oldver
|
||||
|
||||
safe = getattr(cmd.distribution, 'zip_safe', None)
|
||||
|
||||
bdist_egg.write_safety_flag(cmd.egg_info, safe)
|
||||
|
||||
|
||||
def warn_depends_obsolete(cmd, basename, filename):
|
||||
if os.path.exists(filename):
|
||||
log.warn(
|
||||
"WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
|
||||
"Use the install_requires/extras_require setup() args instead."
|
||||
)
|
||||
|
||||
|
||||
def _write_requirements(stream, reqs):
|
||||
lines = yield_lines(reqs or ())
|
||||
|
||||
def append_cr(line):
|
||||
return line + '\n'
|
||||
lines = map(append_cr, lines)
|
||||
stream.writelines(lines)
|
||||
|
||||
|
||||
def write_requirements(cmd, basename, filename):
|
||||
dist = cmd.distribution
|
||||
data = io.StringIO()
|
||||
_write_requirements(data, dist.install_requires)
|
||||
extras_require = dist.extras_require or {}
|
||||
for extra in sorted(extras_require):
|
||||
data.write('\n[{extra}]\n'.format(**vars()))
|
||||
_write_requirements(data, extras_require[extra])
|
||||
cmd.write_or_delete_file("requirements", filename, data.getvalue())
|
||||
|
||||
|
||||
def write_setup_requirements(cmd, basename, filename):
|
||||
data = io.StringIO()
|
||||
_write_requirements(data, cmd.distribution.setup_requires)
|
||||
cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
|
||||
|
||||
|
||||
def write_toplevel_names(cmd, basename, filename):
|
||||
pkgs = dict.fromkeys(
|
||||
[
|
||||
k.split('.', 1)[0]
|
||||
for k in cmd.distribution.iter_distribution_names()
|
||||
]
|
||||
)
|
||||
cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
|
||||
|
||||
|
||||
def overwrite_arg(cmd, basename, filename):
|
||||
write_arg(cmd, basename, filename, True)
|
||||
|
||||
|
||||
def write_arg(cmd, basename, filename, force=False):
|
||||
argname = os.path.splitext(basename)[0]
|
||||
value = getattr(cmd.distribution, argname, None)
|
||||
if value is not None:
|
||||
value = '\n'.join(value) + '\n'
|
||||
cmd.write_or_delete_file(argname, filename, value, force)
|
||||
|
||||
|
||||
def write_entries(cmd, basename, filename):
|
||||
ep = cmd.distribution.entry_points
|
||||
|
||||
if isinstance(ep, str) or ep is None:
|
||||
data = ep
|
||||
elif ep is not None:
|
||||
data = []
|
||||
for section, contents in sorted(ep.items()):
|
||||
if not isinstance(contents, str):
|
||||
contents = EntryPoint.parse_group(section, contents)
|
||||
contents = '\n'.join(sorted(map(str, contents.values())))
|
||||
data.append('[%s]\n%s\n\n' % (section, contents))
|
||||
data = ''.join(data)
|
||||
|
||||
cmd.write_or_delete_file('entry points', filename, data, True)
|
||||
|
||||
|
||||
def get_pkg_info_revision():
|
||||
"""
|
||||
Get a -r### off of PKG-INFO Version in case this is an sdist of
|
||||
a subversion revision.
|
||||
"""
|
||||
warnings.warn(
|
||||
"get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning)
|
||||
if os.path.exists('PKG-INFO'):
|
||||
with io.open('PKG-INFO') as f:
|
||||
for line in f:
|
||||
match = re.match(r"Version:.*-r(\d+)\s*$", line)
|
||||
if match:
|
||||
return int(match.group(1))
|
||||
return 0
|
||||
|
||||
|
||||
class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
|
||||
"""Deprecated behavior warning for EggInfo, bypassing suppression."""
|
@ -0,0 +1,132 @@
|
||||
from distutils.errors import DistutilsArgError
|
||||
import inspect
|
||||
import glob
|
||||
import warnings
|
||||
import platform
|
||||
import distutils.command.install as orig
|
||||
|
||||
import setuptools
|
||||
|
||||
# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
|
||||
# now. See https://github.com/pypa/setuptools/issues/199/
|
||||
_install = orig.install
|
||||
|
||||
|
||||
class install(orig.install):
|
||||
"""Use easy_install to install the package, w/dependencies"""
|
||||
|
||||
user_options = orig.install.user_options + [
|
||||
('old-and-unmanageable', None, "Try not to use this!"),
|
||||
('single-version-externally-managed', None,
|
||||
"used by system package builders to create 'flat' eggs"),
|
||||
]
|
||||
boolean_options = orig.install.boolean_options + [
|
||||
'old-and-unmanageable', 'single-version-externally-managed',
|
||||
]
|
||||
new_commands = [
|
||||
('install_egg_info', lambda self: True),
|
||||
('install_scripts', lambda self: True),
|
||||
]
|
||||
_nc = dict(new_commands)
|
||||
|
||||
def initialize_options(self):
|
||||
|
||||
warnings.warn(
|
||||
"setup.py install is deprecated. "
|
||||
"Use build and pip and other standards-based tools.",
|
||||
setuptools.SetuptoolsDeprecationWarning,
|
||||
)
|
||||
|
||||
orig.install.initialize_options(self)
|
||||
self.old_and_unmanageable = None
|
||||
self.single_version_externally_managed = None
|
||||
|
||||
def finalize_options(self):
|
||||
orig.install.finalize_options(self)
|
||||
if self.root:
|
||||
self.single_version_externally_managed = True
|
||||
elif self.single_version_externally_managed:
|
||||
if not self.root and not self.record:
|
||||
raise DistutilsArgError(
|
||||
"You must specify --record or --root when building system"
|
||||
" packages"
|
||||
)
|
||||
|
||||
def handle_extra_path(self):
|
||||
if self.root or self.single_version_externally_managed:
|
||||
# explicit backward-compatibility mode, allow extra_path to work
|
||||
return orig.install.handle_extra_path(self)
|
||||
|
||||
# Ignore extra_path when installing an egg (or being run by another
|
||||
# command without --root or --single-version-externally-managed
|
||||
self.path_file = None
|
||||
self.extra_dirs = ''
|
||||
|
||||
def run(self):
|
||||
# Explicit request for old-style install? Just do it
|
||||
if self.old_and_unmanageable or self.single_version_externally_managed:
|
||||
return orig.install.run(self)
|
||||
|
||||
if not self._called_from_setup(inspect.currentframe()):
|
||||
# Run in backward-compatibility mode to support bdist_* commands.
|
||||
orig.install.run(self)
|
||||
else:
|
||||
self.do_egg_install()
|
||||
|
||||
@staticmethod
|
||||
def _called_from_setup(run_frame):
|
||||
"""
|
||||
Attempt to detect whether run() was called from setup() or by another
|
||||
command. If called by setup(), the parent caller will be the
|
||||
'run_command' method in 'distutils.dist', and *its* caller will be
|
||||
the 'run_commands' method. If called any other way, the
|
||||
immediate caller *might* be 'run_command', but it won't have been
|
||||
called by 'run_commands'. Return True in that case or if a call stack
|
||||
is unavailable. Return False otherwise.
|
||||
"""
|
||||
if run_frame is None:
|
||||
msg = "Call stack not available. bdist_* commands may fail."
|
||||
warnings.warn(msg)
|
||||
if platform.python_implementation() == 'IronPython':
|
||||
msg = "For best results, pass -X:Frames to enable call stack."
|
||||
warnings.warn(msg)
|
||||
return True
|
||||
res = inspect.getouterframes(run_frame)[2]
|
||||
caller, = res[:1]
|
||||
info = inspect.getframeinfo(caller)
|
||||
caller_module = caller.f_globals.get('__name__', '')
|
||||
return (
|
||||
caller_module == 'distutils.dist'
|
||||
and info.function == 'run_commands'
|
||||
)
|
||||
|
||||
def do_egg_install(self):
|
||||
|
||||
easy_install = self.distribution.get_command_class('easy_install')
|
||||
|
||||
cmd = easy_install(
|
||||
self.distribution, args="x", root=self.root, record=self.record,
|
||||
)
|
||||
cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
|
||||
cmd.always_copy_from = '.' # make sure local-dir eggs get installed
|
||||
|
||||
# pick up setup-dir .egg files only: no .egg-info
|
||||
cmd.package_index.scan(glob.glob('*.egg'))
|
||||
|
||||
self.run_command('bdist_egg')
|
||||
args = [self.distribution.get_command_obj('bdist_egg').egg_output]
|
||||
|
||||
if setuptools.bootstrap_install_from:
|
||||
# Bootstrap self-installation of setuptools
|
||||
args.insert(0, setuptools.bootstrap_install_from)
|
||||
|
||||
cmd.args = args
|
||||
cmd.run(show_deprecation=False)
|
||||
setuptools.bootstrap_install_from = None
|
||||
|
||||
|
||||
# XXX Python 3.1 doesn't see _nc if this is inside the class
|
||||
install.sub_commands = (
|
||||
[cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +
|
||||
install.new_commands
|
||||
)
|
@ -0,0 +1,62 @@
|
||||
from distutils import log, dir_util
|
||||
import os
|
||||
|
||||
from setuptools import Command
|
||||
from setuptools import namespaces
|
||||
from setuptools.archive_util import unpack_archive
|
||||
import pkg_resources
|
||||
|
||||
|
||||
class install_egg_info(namespaces.Installer, Command):
|
||||
"""Install an .egg-info directory for the package"""
|
||||
|
||||
description = "Install an .egg-info directory for the package"
|
||||
|
||||
user_options = [
|
||||
('install-dir=', 'd', "directory to install to"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install_lib',
|
||||
('install_dir', 'install_dir'))
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
basename = pkg_resources.Distribution(
|
||||
None, None, ei_cmd.egg_name, ei_cmd.egg_version
|
||||
).egg_name() + '.egg-info'
|
||||
self.source = ei_cmd.egg_info
|
||||
self.target = os.path.join(self.install_dir, basename)
|
||||
self.outputs = []
|
||||
|
||||
def run(self):
|
||||
self.run_command('egg_info')
|
||||
if os.path.isdir(self.target) and not os.path.islink(self.target):
|
||||
dir_util.remove_tree(self.target, dry_run=self.dry_run)
|
||||
elif os.path.exists(self.target):
|
||||
self.execute(os.unlink, (self.target,), "Removing " + self.target)
|
||||
if not self.dry_run:
|
||||
pkg_resources.ensure_directory(self.target)
|
||||
self.execute(
|
||||
self.copytree, (), "Copying %s to %s" % (self.source, self.target)
|
||||
)
|
||||
self.install_namespaces()
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outputs
|
||||
|
||||
def copytree(self):
|
||||
# Copy the .egg-info tree to site-packages
|
||||
def skimmer(src, dst):
|
||||
# filter out source-control directories; note that 'src' is always
|
||||
# a '/'-separated path, regardless of platform. 'dst' is a
|
||||
# platform-specific path.
|
||||
for skip in '.svn/', 'CVS/':
|
||||
if src.startswith(skip) or '/' + skip in src:
|
||||
return None
|
||||
self.outputs.append(dst)
|
||||
log.debug("Copying %s to %s", src, dst)
|
||||
return dst
|
||||
|
||||
unpack_archive(self.source, self.target, skimmer)
|
@ -0,0 +1,122 @@
|
||||
import os
|
||||
import sys
|
||||
from itertools import product, starmap
|
||||
import distutils.command.install_lib as orig
|
||||
|
||||
|
||||
class install_lib(orig.install_lib):
|
||||
"""Don't add compiled flags to filenames of non-Python files"""
|
||||
|
||||
def run(self):
|
||||
self.build()
|
||||
outfiles = self.install()
|
||||
if outfiles is not None:
|
||||
# always compile, in case we have any extension stubs to deal with
|
||||
self.byte_compile(outfiles)
|
||||
|
||||
def get_exclusions(self):
|
||||
"""
|
||||
Return a collections.Sized collections.Container of paths to be
|
||||
excluded for single_version_externally_managed installations.
|
||||
"""
|
||||
all_packages = (
|
||||
pkg
|
||||
for ns_pkg in self._get_SVEM_NSPs()
|
||||
for pkg in self._all_packages(ns_pkg)
|
||||
)
|
||||
|
||||
excl_specs = product(all_packages, self._gen_exclusion_paths())
|
||||
return set(starmap(self._exclude_pkg_path, excl_specs))
|
||||
|
||||
def _exclude_pkg_path(self, pkg, exclusion_path):
|
||||
"""
|
||||
Given a package name and exclusion path within that package,
|
||||
compute the full exclusion path.
|
||||
"""
|
||||
parts = pkg.split('.') + [exclusion_path]
|
||||
return os.path.join(self.install_dir, *parts)
|
||||
|
||||
@staticmethod
|
||||
def _all_packages(pkg_name):
|
||||
"""
|
||||
>>> list(install_lib._all_packages('foo.bar.baz'))
|
||||
['foo.bar.baz', 'foo.bar', 'foo']
|
||||
"""
|
||||
while pkg_name:
|
||||
yield pkg_name
|
||||
pkg_name, sep, child = pkg_name.rpartition('.')
|
||||
|
||||
def _get_SVEM_NSPs(self):
|
||||
"""
|
||||
Get namespace packages (list) but only for
|
||||
single_version_externally_managed installations and empty otherwise.
|
||||
"""
|
||||
# TODO: is it necessary to short-circuit here? i.e. what's the cost
|
||||
# if get_finalized_command is called even when namespace_packages is
|
||||
# False?
|
||||
if not self.distribution.namespace_packages:
|
||||
return []
|
||||
|
||||
install_cmd = self.get_finalized_command('install')
|
||||
svem = install_cmd.single_version_externally_managed
|
||||
|
||||
return self.distribution.namespace_packages if svem else []
|
||||
|
||||
@staticmethod
|
||||
def _gen_exclusion_paths():
|
||||
"""
|
||||
Generate file paths to be excluded for namespace packages (bytecode
|
||||
cache files).
|
||||
"""
|
||||
# always exclude the package module itself
|
||||
yield '__init__.py'
|
||||
|
||||
yield '__init__.pyc'
|
||||
yield '__init__.pyo'
|
||||
|
||||
if not hasattr(sys, 'implementation'):
|
||||
return
|
||||
|
||||
base = os.path.join(
|
||||
'__pycache__', '__init__.' + sys.implementation.cache_tag)
|
||||
yield base + '.pyc'
|
||||
yield base + '.pyo'
|
||||
yield base + '.opt-1.pyc'
|
||||
yield base + '.opt-2.pyc'
|
||||
|
||||
def copy_tree(
|
||||
self, infile, outfile,
|
||||
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
|
||||
):
|
||||
assert preserve_mode and preserve_times and not preserve_symlinks
|
||||
exclude = self.get_exclusions()
|
||||
|
||||
if not exclude:
|
||||
return orig.install_lib.copy_tree(self, infile, outfile)
|
||||
|
||||
# Exclude namespace package __init__.py* files from the output
|
||||
|
||||
from setuptools.archive_util import unpack_directory
|
||||
from distutils import log
|
||||
|
||||
outfiles = []
|
||||
|
||||
def pf(src, dst):
|
||||
if dst in exclude:
|
||||
log.warn("Skipping installation of %s (namespace package)",
|
||||
dst)
|
||||
return False
|
||||
|
||||
log.info("copying %s -> %s", src, os.path.dirname(dst))
|
||||
outfiles.append(dst)
|
||||
return dst
|
||||
|
||||
unpack_directory(infile, outfile, pf)
|
||||
return outfiles
|
||||
|
||||
def get_outputs(self):
|
||||
outputs = orig.install_lib.get_outputs(self)
|
||||
exclude = self.get_exclusions()
|
||||
if exclude:
|
||||
return [f for f in outputs if f not in exclude]
|
||||
return outputs
|
@ -0,0 +1,69 @@
|
||||
from distutils import log
|
||||
import distutils.command.install_scripts as orig
|
||||
from distutils.errors import DistutilsModuleError
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pkg_resources import Distribution, PathMetadata, ensure_directory
|
||||
|
||||
|
||||
class install_scripts(orig.install_scripts):
|
||||
"""Do normal script install, plus any egg_info wrapper scripts"""
|
||||
|
||||
def initialize_options(self):
|
||||
orig.install_scripts.initialize_options(self)
|
||||
self.no_ep = False
|
||||
|
||||
def run(self):
|
||||
import setuptools.command.easy_install as ei
|
||||
|
||||
self.run_command("egg_info")
|
||||
if self.distribution.scripts:
|
||||
orig.install_scripts.run(self) # run first to set up self.outfiles
|
||||
else:
|
||||
self.outfiles = []
|
||||
if self.no_ep:
|
||||
# don't install entry point scripts into .egg file!
|
||||
return
|
||||
|
||||
ei_cmd = self.get_finalized_command("egg_info")
|
||||
dist = Distribution(
|
||||
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
|
||||
ei_cmd.egg_name, ei_cmd.egg_version,
|
||||
)
|
||||
bs_cmd = self.get_finalized_command('build_scripts')
|
||||
exec_param = getattr(bs_cmd, 'executable', None)
|
||||
try:
|
||||
bw_cmd = self.get_finalized_command("bdist_wininst")
|
||||
is_wininst = getattr(bw_cmd, '_is_running', False)
|
||||
except (ImportError, DistutilsModuleError):
|
||||
is_wininst = False
|
||||
writer = ei.ScriptWriter
|
||||
if is_wininst:
|
||||
exec_param = "python.exe"
|
||||
writer = ei.WindowsScriptWriter
|
||||
if exec_param == sys.executable:
|
||||
# In case the path to the Python executable contains a space, wrap
|
||||
# it so it's not split up.
|
||||
exec_param = [exec_param]
|
||||
# resolve the writer to the environment
|
||||
writer = writer.best()
|
||||
cmd = writer.command_spec_class.best().from_param(exec_param)
|
||||
for args in writer.get_args(dist, cmd.as_header()):
|
||||
self.write_script(*args)
|
||||
|
||||
def write_script(self, script_name, contents, mode="t", *ignored):
|
||||
"""Write an executable file to the scripts directory"""
|
||||
from setuptools.command.easy_install import chmod, current_umask
|
||||
|
||||
log.info("Installing %s script to %s", script_name, self.install_dir)
|
||||
target = os.path.join(self.install_dir, script_name)
|
||||
self.outfiles.append(target)
|
||||
|
||||
mask = current_umask()
|
||||
if not self.dry_run:
|
||||
ensure_directory(target)
|
||||
f = open(target, "w" + mode)
|
||||
f.write(contents)
|
||||
f.close()
|
||||
chmod(target, 0o777 - mask)
|
@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<assemblyIdentity version="1.0.0.0"
|
||||
processorArchitecture="X86"
|
||||
name="%(name)s"
|
||||
type="win32"/>
|
||||
<!-- Identify the application security requirements. -->
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
</assembly>
|
@ -0,0 +1,134 @@
|
||||
import os
|
||||
from glob import glob
|
||||
from distutils.util import convert_path
|
||||
from distutils.command import sdist
|
||||
|
||||
|
||||
class sdist_add_defaults:
|
||||
"""
|
||||
Mix-in providing forward-compatibility for functionality as found in
|
||||
distutils on Python 3.7.
|
||||
|
||||
Do not edit the code in this class except to update functionality
|
||||
as implemented in distutils. Instead, override in the subclass.
|
||||
"""
|
||||
|
||||
def add_defaults(self):
|
||||
"""Add all the default files to self.filelist:
|
||||
- README or README.txt
|
||||
- setup.py
|
||||
- test/test*.py
|
||||
- all pure Python modules mentioned in setup script
|
||||
- all files pointed by package_data (build_py)
|
||||
- all files defined in data_files.
|
||||
- all files defined as scripts.
|
||||
- all C sources listed as part of extensions or C libraries
|
||||
in the setup script (doesn't catch C headers!)
|
||||
Warns if (README or README.txt) or setup.py are missing; everything
|
||||
else is optional.
|
||||
"""
|
||||
self._add_defaults_standards()
|
||||
self._add_defaults_optional()
|
||||
self._add_defaults_python()
|
||||
self._add_defaults_data_files()
|
||||
self._add_defaults_ext()
|
||||
self._add_defaults_c_libs()
|
||||
self._add_defaults_scripts()
|
||||
|
||||
@staticmethod
|
||||
def _cs_path_exists(fspath):
|
||||
"""
|
||||
Case-sensitive path existence check
|
||||
|
||||
>>> sdist_add_defaults._cs_path_exists(__file__)
|
||||
True
|
||||
>>> sdist_add_defaults._cs_path_exists(__file__.upper())
|
||||
False
|
||||
"""
|
||||
if not os.path.exists(fspath):
|
||||
return False
|
||||
# make absolute so we always have a directory
|
||||
abspath = os.path.abspath(fspath)
|
||||
directory, filename = os.path.split(abspath)
|
||||
return filename in os.listdir(directory)
|
||||
|
||||
def _add_defaults_standards(self):
|
||||
standards = [self.READMES, self.distribution.script_name]
|
||||
for fn in standards:
|
||||
if isinstance(fn, tuple):
|
||||
alts = fn
|
||||
got_it = False
|
||||
for fn in alts:
|
||||
if self._cs_path_exists(fn):
|
||||
got_it = True
|
||||
self.filelist.append(fn)
|
||||
break
|
||||
|
||||
if not got_it:
|
||||
self.warn("standard file not found: should have one of " +
|
||||
', '.join(alts))
|
||||
else:
|
||||
if self._cs_path_exists(fn):
|
||||
self.filelist.append(fn)
|
||||
else:
|
||||
self.warn("standard file '%s' not found" % fn)
|
||||
|
||||
def _add_defaults_optional(self):
|
||||
optional = ['test/test*.py', 'setup.cfg']
|
||||
for pattern in optional:
|
||||
files = filter(os.path.isfile, glob(pattern))
|
||||
self.filelist.extend(files)
|
||||
|
||||
def _add_defaults_python(self):
|
||||
# build_py is used to get:
|
||||
# - python modules
|
||||
# - files defined in package_data
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
|
||||
# getting python files
|
||||
if self.distribution.has_pure_modules():
|
||||
self.filelist.extend(build_py.get_source_files())
|
||||
|
||||
# getting package_data files
|
||||
# (computed in build_py.data_files by build_py.finalize_options)
|
||||
for pkg, src_dir, build_dir, filenames in build_py.data_files:
|
||||
for filename in filenames:
|
||||
self.filelist.append(os.path.join(src_dir, filename))
|
||||
|
||||
def _add_defaults_data_files(self):
|
||||
# getting distribution.data_files
|
||||
if self.distribution.has_data_files():
|
||||
for item in self.distribution.data_files:
|
||||
if isinstance(item, str):
|
||||
# plain file
|
||||
item = convert_path(item)
|
||||
if os.path.isfile(item):
|
||||
self.filelist.append(item)
|
||||
else:
|
||||
# a (dirname, filenames) tuple
|
||||
dirname, filenames = item
|
||||
for f in filenames:
|
||||
f = convert_path(f)
|
||||
if os.path.isfile(f):
|
||||
self.filelist.append(f)
|
||||
|
||||
def _add_defaults_ext(self):
|
||||
if self.distribution.has_ext_modules():
|
||||
build_ext = self.get_finalized_command('build_ext')
|
||||
self.filelist.extend(build_ext.get_source_files())
|
||||
|
||||
def _add_defaults_c_libs(self):
|
||||
if self.distribution.has_c_libraries():
|
||||
build_clib = self.get_finalized_command('build_clib')
|
||||
self.filelist.extend(build_clib.get_source_files())
|
||||
|
||||
def _add_defaults_scripts(self):
|
||||
if self.distribution.has_scripts():
|
||||
build_scripts = self.get_finalized_command('build_scripts')
|
||||
self.filelist.extend(build_scripts.get_source_files())
|
||||
|
||||
|
||||
if hasattr(sdist.sdist, '_add_defaults_standards'):
|
||||
# disable the functionality already available upstream
|
||||
class sdist_add_defaults: # noqa
|
||||
pass
|
@ -0,0 +1,18 @@
|
||||
from distutils import log
|
||||
import distutils.command.register as orig
|
||||
|
||||
from setuptools.errors import RemovedCommandError
|
||||
|
||||
|
||||
class register(orig.register):
|
||||
"""Formerly used to register packages on PyPI."""
|
||||
|
||||
def run(self):
|
||||
msg = (
|
||||
"The register command has been removed, use twine to upload "
|
||||
+ "instead (https://pypi.org/p/twine)"
|
||||
)
|
||||
|
||||
self.announce("ERROR: " + msg, log.ERROR)
|
||||
|
||||
raise RemovedCommandError(msg)
|
@ -0,0 +1,64 @@
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
from distutils.errors import DistutilsOptionError
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
|
||||
class rotate(Command):
|
||||
"""Delete older distributions"""
|
||||
|
||||
description = "delete older distributions, keeping N newest files"
|
||||
user_options = [
|
||||
('match=', 'm', "patterns to match (required)"),
|
||||
('dist-dir=', 'd', "directory where the distributions are"),
|
||||
('keep=', 'k', "number of matching distributions to keep"),
|
||||
]
|
||||
|
||||
boolean_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
self.match = None
|
||||
self.dist_dir = None
|
||||
self.keep = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.match is None:
|
||||
raise DistutilsOptionError(
|
||||
"Must specify one or more (comma-separated) match patterns "
|
||||
"(e.g. '.zip' or '.egg')"
|
||||
)
|
||||
if self.keep is None:
|
||||
raise DistutilsOptionError("Must specify number of files to keep")
|
||||
try:
|
||||
self.keep = int(self.keep)
|
||||
except ValueError as e:
|
||||
raise DistutilsOptionError("--keep must be an integer") from e
|
||||
if isinstance(self.match, str):
|
||||
self.match = [
|
||||
convert_path(p.strip()) for p in self.match.split(',')
|
||||
]
|
||||
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
||||
|
||||
def run(self):
|
||||
self.run_command("egg_info")
|
||||
from glob import glob
|
||||
|
||||
for pattern in self.match:
|
||||
pattern = self.distribution.get_name() + '*' + pattern
|
||||
files = glob(os.path.join(self.dist_dir, pattern))
|
||||
files = [(os.path.getmtime(f), f) for f in files]
|
||||
files.sort()
|
||||
files.reverse()
|
||||
|
||||
log.info("%d file(s) matching %s", len(files), pattern)
|
||||
files = files[self.keep:]
|
||||
for (t, f) in files:
|
||||
log.info("Deleting %s", f)
|
||||
if not self.dry_run:
|
||||
if os.path.isdir(f):
|
||||
shutil.rmtree(f)
|
||||
else:
|
||||
os.unlink(f)
|
@ -0,0 +1,22 @@
|
||||
from setuptools.command.setopt import edit_config, option_base
|
||||
|
||||
|
||||
class saveopts(option_base):
|
||||
"""Save command-line options to a file"""
|
||||
|
||||
description = "save supplied options to setup.cfg or other config file"
|
||||
|
||||
def run(self):
|
||||
dist = self.distribution
|
||||
settings = {}
|
||||
|
||||
for cmd in dist.command_options:
|
||||
|
||||
if cmd == 'saveopts':
|
||||
continue # don't save our own options!
|
||||
|
||||
for opt, (src, val) in dist.get_option_dict(cmd).items():
|
||||
if src == "command line":
|
||||
settings.setdefault(cmd, {})[opt] = val
|
||||
|
||||
edit_config(self.filename, settings, self.dry_run)
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user