Compare commits
No commits in common. 'c9' and 'i8c' have entirely different histories.
@ -1,515 +0,0 @@
|
||||
"""Module/script to byte-compile all .py files to .pyc files.
|
||||
|
||||
When called as a script with arguments, this compiles the directories
|
||||
given as arguments recursively; the -l option prevents it from
|
||||
recursing into directories.
|
||||
|
||||
Without arguments, if compiles all modules on sys.path, without
|
||||
recursing into subdirectories. (Even though it should do so for
|
||||
packages -- for now, you'll have to deal with packages separately.)
|
||||
|
||||
See module py_compile for details of the actual byte-compilation.
|
||||
|
||||
License:
|
||||
Compileall2 is an enhanced copy of Python's compileall module
|
||||
and it follows Python licensing. For more info see: https://www.python.org/psf/license/
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import importlib.util
|
||||
import py_compile
|
||||
import struct
|
||||
import filecmp
|
||||
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
|
||||
# Python 3.7 and higher
|
||||
PY37 = sys.version_info[0:2] >= (3, 7)
|
||||
# Python 3.6 and higher
|
||||
PY36 = sys.version_info[0:2] >= (3, 6)
|
||||
# Python 3.5 and higher
|
||||
PY35 = sys.version_info[0:2] >= (3, 5)
|
||||
|
||||
# Python 3.7 and above has a different structure and length
|
||||
# of pyc files header. Also, multiple ways how to invalidate pyc file was
|
||||
# introduced in Python 3.7. These cases are covered by variables here or by PY37
|
||||
# variable itself.
|
||||
if PY37:
|
||||
pyc_struct_format = '<4sll'
|
||||
pyc_header_lenght = 12
|
||||
pyc_header_format = (pyc_struct_format, importlib.util.MAGIC_NUMBER, 0)
|
||||
else:
|
||||
pyc_struct_format = '<4sl'
|
||||
pyc_header_lenght = 8
|
||||
pyc_header_format = (pyc_struct_format, importlib.util.MAGIC_NUMBER)
|
||||
|
||||
__all__ = ["compile_dir","compile_file","compile_path"]
|
||||
|
||||
def optimization_kwarg(opt):
|
||||
"""Returns opt as a dictionary {optimization: opt} for use as **kwarg
|
||||
for Python >= 3.5 and empty dictionary for Python 3.4"""
|
||||
if PY35:
|
||||
return dict(optimization=opt)
|
||||
else:
|
||||
# `debug_override` is a way how to enable optimized byte-compiled files
|
||||
# (.pyo) in Python <= 3.4
|
||||
if opt:
|
||||
return dict(debug_override=False)
|
||||
else:
|
||||
return dict()
|
||||
|
||||
def _walk_dir(dir, maxlevels, quiet=0):
|
||||
if PY36 and quiet < 2 and isinstance(dir, os.PathLike):
|
||||
dir = os.fspath(dir)
|
||||
else:
|
||||
dir = str(dir)
|
||||
if not quiet:
|
||||
print('Listing {!r}...'.format(dir))
|
||||
try:
|
||||
names = os.listdir(dir)
|
||||
except OSError:
|
||||
if quiet < 2:
|
||||
print("Can't list {!r}".format(dir))
|
||||
names = []
|
||||
names.sort()
|
||||
for name in names:
|
||||
if name == '__pycache__':
|
||||
continue
|
||||
fullname = os.path.join(dir, name)
|
||||
if not os.path.isdir(fullname):
|
||||
yield fullname
|
||||
elif (maxlevels > 0 and name != os.curdir and name != os.pardir and
|
||||
os.path.isdir(fullname) and not os.path.islink(fullname)):
|
||||
yield from _walk_dir(fullname, maxlevels=maxlevels - 1,
|
||||
quiet=quiet)
|
||||
|
||||
def compile_dir(dir, maxlevels=None, ddir=None, force=False,
|
||||
rx=None, quiet=0, legacy=False, optimize=-1, workers=1,
|
||||
invalidation_mode=None, stripdir=None,
|
||||
prependdir=None, limit_sl_dest=None, hardlink_dupes=False):
|
||||
"""Byte-compile all modules in the given directory tree.
|
||||
|
||||
Arguments (only dir is required):
|
||||
|
||||
dir: the directory to byte-compile
|
||||
maxlevels: maximum recursion level (default `sys.getrecursionlimit()`)
|
||||
ddir: the directory that will be prepended to the path to the
|
||||
file as it is compiled into each byte-code file.
|
||||
force: if True, force compilation, even if timestamps are up-to-date
|
||||
quiet: full output with False or 0, errors only with 1,
|
||||
no output with 2
|
||||
legacy: if True, produce legacy pyc paths instead of PEP 3147 paths
|
||||
optimize: int or list of optimization levels or -1 for level of
|
||||
the interpreter. Multiple levels leads to multiple compiled
|
||||
files each with one optimization level.
|
||||
workers: maximum number of parallel workers
|
||||
invalidation_mode: how the up-to-dateness of the pyc will be checked
|
||||
stripdir: part of path to left-strip from source file path
|
||||
prependdir: path to prepend to beggining of original file path, applied
|
||||
after stripdir
|
||||
limit_sl_dest: ignore symlinks if they are pointing outside of
|
||||
the defined path
|
||||
hardlink_dupes: hardlink duplicated pyc files
|
||||
"""
|
||||
ProcessPoolExecutor = None
|
||||
if ddir is not None and (stripdir is not None or prependdir is not None):
|
||||
raise ValueError(("Destination dir (ddir) cannot be used "
|
||||
"in combination with stripdir or prependdir"))
|
||||
if ddir is not None:
|
||||
stripdir = dir
|
||||
prependdir = ddir
|
||||
ddir = None
|
||||
if workers is not None:
|
||||
if workers < 0:
|
||||
raise ValueError('workers must be greater or equal to 0')
|
||||
elif workers != 1:
|
||||
try:
|
||||
# Only import when needed, as low resource platforms may
|
||||
# fail to import it
|
||||
from concurrent.futures import ProcessPoolExecutor
|
||||
except ImportError:
|
||||
workers = 1
|
||||
if maxlevels is None:
|
||||
maxlevels = sys.getrecursionlimit()
|
||||
files = _walk_dir(dir, quiet=quiet, maxlevels=maxlevels)
|
||||
success = True
|
||||
if workers is not None and workers != 1 and ProcessPoolExecutor is not None:
|
||||
workers = workers or None
|
||||
with ProcessPoolExecutor(max_workers=workers) as executor:
|
||||
results = executor.map(partial(compile_file,
|
||||
ddir=ddir, force=force,
|
||||
rx=rx, quiet=quiet,
|
||||
legacy=legacy,
|
||||
optimize=optimize,
|
||||
invalidation_mode=invalidation_mode,
|
||||
stripdir=stripdir,
|
||||
prependdir=prependdir,
|
||||
limit_sl_dest=limit_sl_dest),
|
||||
files)
|
||||
success = min(results, default=True)
|
||||
else:
|
||||
for file in files:
|
||||
if not compile_file(file, ddir, force, rx, quiet,
|
||||
legacy, optimize, invalidation_mode,
|
||||
stripdir=stripdir, prependdir=prependdir,
|
||||
limit_sl_dest=limit_sl_dest,
|
||||
hardlink_dupes=hardlink_dupes):
|
||||
success = False
|
||||
return success
|
||||
|
||||
def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
|
||||
legacy=False, optimize=-1,
|
||||
invalidation_mode=None, stripdir=None, prependdir=None,
|
||||
limit_sl_dest=None, hardlink_dupes=False):
|
||||
"""Byte-compile one file.
|
||||
|
||||
Arguments (only fullname is required):
|
||||
|
||||
fullname: the file to byte-compile
|
||||
ddir: if given, the directory name compiled in to the
|
||||
byte-code file.
|
||||
force: if True, force compilation, even if timestamps are up-to-date
|
||||
quiet: full output with False or 0, errors only with 1,
|
||||
no output with 2
|
||||
legacy: if True, produce legacy pyc paths instead of PEP 3147 paths
|
||||
optimize: int or list of optimization levels or -1 for level of
|
||||
the interpreter. Multiple levels leads to multiple compiled
|
||||
files each with one optimization level.
|
||||
invalidation_mode: how the up-to-dateness of the pyc will be checked
|
||||
stripdir: part of path to left-strip from source file path
|
||||
prependdir: path to prepend to beggining of original file path, applied
|
||||
after stripdir
|
||||
limit_sl_dest: ignore symlinks if they are pointing outside of
|
||||
the defined path.
|
||||
hardlink_dupes: hardlink duplicated pyc files
|
||||
"""
|
||||
|
||||
if ddir is not None and (stripdir is not None or prependdir is not None):
|
||||
raise ValueError(("Destination dir (ddir) cannot be used "
|
||||
"in combination with stripdir or prependdir"))
|
||||
|
||||
success = True
|
||||
if PY36 and quiet < 2 and isinstance(fullname, os.PathLike):
|
||||
fullname = os.fspath(fullname)
|
||||
else:
|
||||
fullname = str(fullname)
|
||||
name = os.path.basename(fullname)
|
||||
|
||||
dfile = None
|
||||
|
||||
if ddir is not None:
|
||||
if not PY36:
|
||||
ddir = str(ddir)
|
||||
dfile = os.path.join(ddir, name)
|
||||
|
||||
if stripdir is not None:
|
||||
fullname_parts = fullname.split(os.path.sep)
|
||||
stripdir_parts = stripdir.split(os.path.sep)
|
||||
ddir_parts = list(fullname_parts)
|
||||
|
||||
for spart, opart in zip(stripdir_parts, fullname_parts):
|
||||
if spart == opart:
|
||||
ddir_parts.remove(spart)
|
||||
|
||||
dfile = os.path.join(*ddir_parts)
|
||||
|
||||
if prependdir is not None:
|
||||
if dfile is None:
|
||||
dfile = os.path.join(prependdir, fullname)
|
||||
else:
|
||||
dfile = os.path.join(prependdir, dfile)
|
||||
|
||||
if isinstance(optimize, int):
|
||||
optimize = [optimize]
|
||||
|
||||
if hardlink_dupes:
|
||||
raise ValueError(("Hardlinking of duplicated bytecode makes sense "
|
||||
"only for more than one optimization level."))
|
||||
|
||||
if rx is not None:
|
||||
mo = rx.search(fullname)
|
||||
if mo:
|
||||
return success
|
||||
|
||||
if limit_sl_dest is not None and os.path.islink(fullname):
|
||||
if Path(limit_sl_dest).resolve() not in Path(fullname).resolve().parents:
|
||||
return success
|
||||
|
||||
opt_cfiles = {}
|
||||
|
||||
if os.path.isfile(fullname):
|
||||
for opt_level in optimize:
|
||||
if legacy:
|
||||
opt_cfiles[opt_level] = fullname + 'c'
|
||||
else:
|
||||
if opt_level >= 0:
|
||||
opt = opt_level if opt_level >= 1 else ''
|
||||
opt_kwarg = optimization_kwarg(opt)
|
||||
cfile = (importlib.util.cache_from_source(
|
||||
fullname, **opt_kwarg))
|
||||
opt_cfiles[opt_level] = cfile
|
||||
else:
|
||||
cfile = importlib.util.cache_from_source(fullname)
|
||||
opt_cfiles[opt_level] = cfile
|
||||
|
||||
head, tail = name[:-3], name[-3:]
|
||||
if tail == '.py':
|
||||
if not force:
|
||||
try:
|
||||
mtime = int(os.stat(fullname).st_mtime)
|
||||
expect = struct.pack(*(pyc_header_format + (mtime,)))
|
||||
for cfile in opt_cfiles.values():
|
||||
with open(cfile, 'rb') as chandle:
|
||||
actual = chandle.read(pyc_header_lenght)
|
||||
if expect != actual:
|
||||
break
|
||||
else:
|
||||
return success
|
||||
except OSError:
|
||||
pass
|
||||
if not quiet:
|
||||
print('Compiling {!r}...'.format(fullname))
|
||||
try:
|
||||
for index, opt_level in enumerate(sorted(optimize)):
|
||||
cfile = opt_cfiles[opt_level]
|
||||
if PY37:
|
||||
ok = py_compile.compile(fullname, cfile, dfile, True,
|
||||
optimize=opt_level,
|
||||
invalidation_mode=invalidation_mode)
|
||||
else:
|
||||
ok = py_compile.compile(fullname, cfile, dfile, True,
|
||||
optimize=opt_level)
|
||||
|
||||
if index > 0 and hardlink_dupes:
|
||||
previous_cfile = opt_cfiles[optimize[index - 1]]
|
||||
if previous_cfile == cfile and optimize[0] not in (1, 2):
|
||||
# Python 3.4 has only one .pyo file for -O and -OO so
|
||||
# we hardlink it only if there is a .pyc file
|
||||
# with the same content
|
||||
previous_cfile = opt_cfiles[optimize[0]]
|
||||
if previous_cfile != cfile and filecmp.cmp(cfile, previous_cfile, shallow=False):
|
||||
os.unlink(cfile)
|
||||
os.link(previous_cfile, cfile)
|
||||
|
||||
except py_compile.PyCompileError as err:
|
||||
success = False
|
||||
if quiet >= 2:
|
||||
return success
|
||||
elif quiet:
|
||||
print('*** Error compiling {!r}...'.format(fullname))
|
||||
else:
|
||||
print('*** ', end='')
|
||||
# escape non-printable characters in msg
|
||||
msg = err.msg.encode(sys.stdout.encoding,
|
||||
errors='backslashreplace')
|
||||
msg = msg.decode(sys.stdout.encoding)
|
||||
print(msg)
|
||||
except (SyntaxError, UnicodeError, OSError) as e:
|
||||
success = False
|
||||
if quiet >= 2:
|
||||
return success
|
||||
elif quiet:
|
||||
print('*** Error compiling {!r}...'.format(fullname))
|
||||
else:
|
||||
print('*** ', end='')
|
||||
print(e.__class__.__name__ + ':', e)
|
||||
else:
|
||||
if ok == 0:
|
||||
success = False
|
||||
return success
|
||||
|
||||
def compile_path(skip_curdir=1, maxlevels=0, force=False, quiet=0,
|
||||
legacy=False, optimize=-1,
|
||||
invalidation_mode=None):
|
||||
"""Byte-compile all module on sys.path.
|
||||
|
||||
Arguments (all optional):
|
||||
|
||||
skip_curdir: if true, skip current directory (default True)
|
||||
maxlevels: max recursion level (default 0)
|
||||
force: as for compile_dir() (default False)
|
||||
quiet: as for compile_dir() (default 0)
|
||||
legacy: as for compile_dir() (default False)
|
||||
optimize: as for compile_dir() (default -1)
|
||||
invalidation_mode: as for compiler_dir()
|
||||
"""
|
||||
success = True
|
||||
for dir in sys.path:
|
||||
if (not dir or dir == os.curdir) and skip_curdir:
|
||||
if quiet < 2:
|
||||
print('Skipping current directory')
|
||||
else:
|
||||
success = success and compile_dir(
|
||||
dir,
|
||||
maxlevels,
|
||||
None,
|
||||
force,
|
||||
quiet=quiet,
|
||||
legacy=legacy,
|
||||
optimize=optimize,
|
||||
invalidation_mode=invalidation_mode,
|
||||
)
|
||||
return success
|
||||
|
||||
|
||||
def main():
|
||||
"""Script main program."""
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Utilities to support installing Python libraries.')
|
||||
parser.add_argument('-l', action='store_const', const=0,
|
||||
default=None, dest='maxlevels',
|
||||
help="don't recurse into subdirectories")
|
||||
parser.add_argument('-r', type=int, dest='recursion',
|
||||
help=('control the maximum recursion level. '
|
||||
'if `-l` and `-r` options are specified, '
|
||||
'then `-r` takes precedence.'))
|
||||
parser.add_argument('-f', action='store_true', dest='force',
|
||||
help='force rebuild even if timestamps are up to date')
|
||||
parser.add_argument('-q', action='count', dest='quiet', default=0,
|
||||
help='output only error messages; -qq will suppress '
|
||||
'the error messages as well.')
|
||||
parser.add_argument('-b', action='store_true', dest='legacy',
|
||||
help='use legacy (pre-PEP3147) compiled file locations')
|
||||
parser.add_argument('-d', metavar='DESTDIR', dest='ddir', default=None,
|
||||
help=('directory to prepend to file paths for use in '
|
||||
'compile-time tracebacks and in runtime '
|
||||
'tracebacks in cases where the source file is '
|
||||
'unavailable'))
|
||||
parser.add_argument('-s', metavar='STRIPDIR', dest='stripdir',
|
||||
default=None,
|
||||
help=('part of path to left-strip from path '
|
||||
'to source file - for example buildroot. '
|
||||
'`-d` and `-s` options cannot be '
|
||||
'specified together.'))
|
||||
parser.add_argument('-p', metavar='PREPENDDIR', dest='prependdir',
|
||||
default=None,
|
||||
help=('path to add as prefix to path '
|
||||
'to source file - for example / to make '
|
||||
'it absolute when some part is removed '
|
||||
'by `-s` option. '
|
||||
'`-d` and `-p` options cannot be '
|
||||
'specified together.'))
|
||||
parser.add_argument('-x', metavar='REGEXP', dest='rx', default=None,
|
||||
help=('skip files matching the regular expression; '
|
||||
'the regexp is searched for in the full path '
|
||||
'of each file considered for compilation'))
|
||||
parser.add_argument('-i', metavar='FILE', dest='flist',
|
||||
help=('add all the files and directories listed in '
|
||||
'FILE to the list considered for compilation; '
|
||||
'if "-", names are read from stdin'))
|
||||
parser.add_argument('compile_dest', metavar='FILE|DIR', nargs='*',
|
||||
help=('zero or more file and directory names '
|
||||
'to compile; if no arguments given, defaults '
|
||||
'to the equivalent of -l sys.path'))
|
||||
parser.add_argument('-j', '--workers', default=1,
|
||||
type=int, help='Run compileall concurrently')
|
||||
parser.add_argument('-o', action='append', type=int, dest='opt_levels',
|
||||
help=('Optimization levels to run compilation with. '
|
||||
'Default is -1 which uses optimization level of '
|
||||
'Python interpreter itself (specified by -O).'))
|
||||
parser.add_argument('-e', metavar='DIR', dest='limit_sl_dest',
|
||||
help='Ignore symlinks pointing outsite of the DIR')
|
||||
parser.add_argument('--hardlink-dupes', action='store_true',
|
||||
dest='hardlink_dupes',
|
||||
help='Hardlink duplicated pyc files')
|
||||
|
||||
if PY37:
|
||||
invalidation_modes = [mode.name.lower().replace('_', '-')
|
||||
for mode in py_compile.PycInvalidationMode]
|
||||
parser.add_argument('--invalidation-mode',
|
||||
choices=sorted(invalidation_modes),
|
||||
help=('set .pyc invalidation mode; defaults to '
|
||||
'"checked-hash" if the SOURCE_DATE_EPOCH '
|
||||
'environment variable is set, and '
|
||||
'"timestamp" otherwise.'))
|
||||
|
||||
args = parser.parse_args()
|
||||
compile_dests = args.compile_dest
|
||||
|
||||
if args.rx:
|
||||
import re
|
||||
args.rx = re.compile(args.rx)
|
||||
|
||||
if args.limit_sl_dest == "":
|
||||
args.limit_sl_dest = None
|
||||
|
||||
if args.recursion is not None:
|
||||
maxlevels = args.recursion
|
||||
else:
|
||||
maxlevels = args.maxlevels
|
||||
|
||||
if args.opt_levels is None:
|
||||
args.opt_levels = [-1]
|
||||
|
||||
if len(args.opt_levels) == 1 and args.hardlink_dupes:
|
||||
parser.error(("Hardlinking of duplicated bytecode makes sense "
|
||||
"only for more than one optimization level."))
|
||||
|
||||
if args.ddir is not None and (
|
||||
args.stripdir is not None or args.prependdir is not None
|
||||
):
|
||||
parser.error("-d cannot be used in combination with -s or -p")
|
||||
|
||||
# if flist is provided then load it
|
||||
if args.flist:
|
||||
try:
|
||||
with (sys.stdin if args.flist=='-' else open(args.flist)) as f:
|
||||
for line in f:
|
||||
compile_dests.append(line.strip())
|
||||
except OSError:
|
||||
if args.quiet < 2:
|
||||
print("Error reading file list {}".format(args.flist))
|
||||
return False
|
||||
|
||||
if args.workers is not None:
|
||||
args.workers = args.workers or None
|
||||
|
||||
if PY37 and args.invalidation_mode:
|
||||
ivl_mode = args.invalidation_mode.replace('-', '_').upper()
|
||||
invalidation_mode = py_compile.PycInvalidationMode[ivl_mode]
|
||||
else:
|
||||
invalidation_mode = None
|
||||
|
||||
success = True
|
||||
try:
|
||||
if compile_dests:
|
||||
for dest in compile_dests:
|
||||
if os.path.isfile(dest):
|
||||
if not compile_file(dest, args.ddir, args.force, args.rx,
|
||||
args.quiet, args.legacy,
|
||||
invalidation_mode=invalidation_mode,
|
||||
stripdir=args.stripdir,
|
||||
prependdir=args.prependdir,
|
||||
optimize=args.opt_levels,
|
||||
limit_sl_dest=args.limit_sl_dest,
|
||||
hardlink_dupes=args.hardlink_dupes):
|
||||
success = False
|
||||
else:
|
||||
if not compile_dir(dest, maxlevels, args.ddir,
|
||||
args.force, args.rx, args.quiet,
|
||||
args.legacy, workers=args.workers,
|
||||
invalidation_mode=invalidation_mode,
|
||||
stripdir=args.stripdir,
|
||||
prependdir=args.prependdir,
|
||||
optimize=args.opt_levels,
|
||||
limit_sl_dest=args.limit_sl_dest,
|
||||
hardlink_dupes=args.hardlink_dupes):
|
||||
success = False
|
||||
return success
|
||||
else:
|
||||
return compile_path(legacy=args.legacy, force=args.force,
|
||||
quiet=args.quiet,
|
||||
invalidation_mode=invalidation_mode)
|
||||
except KeyboardInterrupt:
|
||||
if args.quiet < 2:
|
||||
print("\n[interrupted]")
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit_status = int(not main())
|
||||
sys.exit(exit_status)
|
@ -1,171 +0,0 @@
|
||||
'''Script to perform import of each module given to %%py_check_import
|
||||
'''
|
||||
import argparse
|
||||
import importlib
|
||||
import fnmatch
|
||||
import os
|
||||
import re
|
||||
import site
|
||||
import sys
|
||||
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def read_modules_files(file_paths):
|
||||
'''Read module names from the files (modules must be newline separated).
|
||||
|
||||
Return the module names list or, if no files were provided, an empty list.
|
||||
'''
|
||||
|
||||
if not file_paths:
|
||||
return []
|
||||
|
||||
modules = []
|
||||
for file in file_paths:
|
||||
file_contents = file.read_text()
|
||||
modules.extend(file_contents.split())
|
||||
return modules
|
||||
|
||||
|
||||
def read_modules_from_cli(argv):
|
||||
'''Read module names from command-line arguments (space or comma separated).
|
||||
|
||||
Return the module names list.
|
||||
'''
|
||||
|
||||
if not argv:
|
||||
return []
|
||||
|
||||
# %%py3_check_import allows to separate module list with comma or whitespace,
|
||||
# we need to unify the output to a list of particular elements
|
||||
modules_as_str = ' '.join(argv)
|
||||
modules = re.split(r'[\s,]+', modules_as_str)
|
||||
# Because of shell expansion in some less typical cases it may happen
|
||||
# that a trailing space will occur at the end of the list.
|
||||
# Remove the empty items from the list before passing it further
|
||||
modules = [m for m in modules if m]
|
||||
return modules
|
||||
|
||||
|
||||
def filter_top_level_modules_only(modules):
|
||||
'''Filter out entries with nested modules (containing dot) ie. 'foo.bar'.
|
||||
|
||||
Return the list of top-level modules.
|
||||
'''
|
||||
|
||||
return [module for module in modules if '.' not in module]
|
||||
|
||||
|
||||
def any_match(text, globs):
|
||||
'''Return True if any of given globs fnmatchcase's the given text.'''
|
||||
|
||||
return any(fnmatch.fnmatchcase(text, g) for g in globs)
|
||||
|
||||
|
||||
def exclude_unwanted_module_globs(globs, modules):
|
||||
'''Filter out entries which match the either of the globs given as argv.
|
||||
|
||||
Return the list of filtered modules.
|
||||
'''
|
||||
|
||||
return [m for m in modules if not any_match(m, globs)]
|
||||
|
||||
|
||||
def read_modules_from_all_args(args):
|
||||
'''Return a joined list of modules from all given command-line arguments.
|
||||
'''
|
||||
|
||||
modules = read_modules_files(args.filename)
|
||||
modules.extend(read_modules_from_cli(args.modules))
|
||||
if args.exclude:
|
||||
modules = exclude_unwanted_module_globs(args.exclude, modules)
|
||||
|
||||
if args.top_level:
|
||||
modules = filter_top_level_modules_only(modules)
|
||||
|
||||
# Error when someone accidentally managed to filter out everything
|
||||
if len(modules) == 0:
|
||||
raise ValueError('No modules to check were left')
|
||||
|
||||
return modules
|
||||
|
||||
|
||||
def import_modules(modules):
|
||||
'''Procedure to perform import check for each module name from the given list of modules.
|
||||
'''
|
||||
|
||||
for module in modules:
|
||||
print('Check import:', module, file=sys.stderr)
|
||||
importlib.import_module(module)
|
||||
|
||||
|
||||
def argparser():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Generate list of all importable modules for import check.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'modules', nargs='*',
|
||||
help=('Add modules to check the import (space or comma separated).'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'-f', '--filename', action='append', type=Path,
|
||||
help='Add importable module names list from file.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-t', '--top-level', action='store_true',
|
||||
help='Check only top-level modules.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-e', '--exclude', action='append',
|
||||
help='Provide modules globs to be excluded from the check.',
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
@contextmanager
|
||||
def remove_unwanteds_from_sys_path():
|
||||
'''Remove cwd and this script's parent from sys.path for the import test.
|
||||
Bring the original contents back after import is done (or failed)
|
||||
'''
|
||||
|
||||
cwd_absolute = Path.cwd().absolute()
|
||||
this_file_parent = Path(__file__).parent.absolute()
|
||||
old_sys_path = list(sys.path)
|
||||
for path in old_sys_path:
|
||||
if Path(path).absolute() in (cwd_absolute, this_file_parent):
|
||||
sys.path.remove(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
sys.path = old_sys_path
|
||||
|
||||
|
||||
def addsitedirs_from_environ():
|
||||
'''Load directories from the _PYTHONSITE environment variable (separated by :)
|
||||
and load the ones already present in sys.path via site.addsitedir()
|
||||
to handle .pth files in them.
|
||||
|
||||
This is needed to properly import old-style namespace packages with nspkg.pth files.
|
||||
See https://bugzilla.redhat.com/2018551 for a more detailed rationale.'''
|
||||
for path in os.getenv('_PYTHONSITE', '').split(':'):
|
||||
if path in sys.path:
|
||||
site.addsitedir(path)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
|
||||
cli_args = argparser().parse_args(argv)
|
||||
|
||||
if not cli_args.modules and not cli_args.filename:
|
||||
raise ValueError('No modules to check were provided')
|
||||
|
||||
modules = read_modules_from_all_args(cli_args)
|
||||
|
||||
with remove_unwanteds_from_sys_path():
|
||||
addsitedirs_from_environ()
|
||||
import_modules(modules)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,52 +1,24 @@
|
||||
# Note that the path could itself be a python file, or a directory
|
||||
|
||||
# Note that the py_byte_compile macro should work for all Python versions
|
||||
# Which unfortunately makes the definition more complicated than it should be
|
||||
# Python's compile_all module only works on directories, and requires a max
|
||||
# recursion depth
|
||||
|
||||
# Usage:
|
||||
# %%py_byte_compile <interpereter> <path>
|
||||
# %py_byte_compile <interpereter> <path>
|
||||
# Example:
|
||||
# %%py_byte_compile %%{__python3} %%{buildroot}%%{_datadir}/spam/plugins/
|
||||
# %py_byte_compile %{__python3} %{buildroot}%{_datadir}/spam/plugins/
|
||||
|
||||
# This will terminate build on SyntaxErrors, if you want to avoid that,
|
||||
# use it in a subshell like this:
|
||||
# (%%{py_byte_compile <interpereter> <path>}) || :
|
||||
|
||||
# Setting PYTHONHASHSEED=0 disables Python hash seed randomization
|
||||
# This should help with byte-compilation reproducibility: https://bugzilla.redhat.com/show_bug.cgi?id=1686078
|
||||
# (%{py_byte_compile <interpereter> <path>}) || :
|
||||
|
||||
%py_byte_compile()\
|
||||
py2_byte_compile () {\
|
||||
python_binary="env PYTHONHASHSEED=0 %1"\
|
||||
bytecode_compilation_path="%2"\
|
||||
failure=0\
|
||||
find $bytecode_compilation_path -type f -a -name "*.py" -print0 | xargs -0 $python_binary -s -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("'"$RPM_BUILD_ROOT"'")[2], doraise=True) for f in sys.argv[1:]]' || failure=1\
|
||||
find $bytecode_compilation_path -type f -a -name "*.py" -print0 | xargs -0 $python_binary -s -O -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("'"$RPM_BUILD_ROOT"'")[2], doraise=True) for f in sys.argv[1:]]' || failure=1\
|
||||
test $failure -eq 0\
|
||||
}\
|
||||
\
|
||||
py3_byte_compile () {\
|
||||
python_binary="env PYTHONHASHSEED=0 %1"\
|
||||
bytecode_compilation_path="%2"\
|
||||
PYTHONPATH="%{_rpmconfigdir}/redhat" $python_binary -s -B -m compileall2 -o 0 -o 1 -s $RPM_BUILD_ROOT -p / $bytecode_compilation_path \
|
||||
}\
|
||||
\
|
||||
py39_byte_compile () {\
|
||||
python_binary="env PYTHONHASHSEED=0 %1"\
|
||||
bytecode_compilation_path="%2"\
|
||||
$python_binary -s -B -m compileall -o 0 -o 1 -s $RPM_BUILD_ROOT -p / $bytecode_compilation_path \
|
||||
}\
|
||||
\
|
||||
# Path to intepreter should not contain any arguments \
|
||||
[[ "%1" =~ " -" ]] && echo "ERROR py_byte_compile: Path to interpreter should not contain any arguments" >&2 && exit 1 \
|
||||
# Get version without a dot (36 instead of 3.6), bash doesn't compare floats well \
|
||||
python_version=$(%1 -c "import sys; sys.stdout.write('{0.major}{0.minor}'.format(sys.version_info))") \
|
||||
# compileall2 is an enhanced fork of stdlib compileall module for Python >= 3.4 \
|
||||
# and it was merged back to stdlib in Python >= 3.9 \
|
||||
if [ "$python_version" -ge 39 ]; then \
|
||||
py39_byte_compile "%1" "%2"; \
|
||||
elif [ "$python_version" -ge 34 ]; then \
|
||||
py3_byte_compile "%1" "%2"; \
|
||||
else \
|
||||
py2_byte_compile "%1" "%2"; \
|
||||
fi
|
||||
python_binary="%1"\
|
||||
buildroot_path="%2"\
|
||||
bytecode_compilation_path=".${buildroot_path/#$RPM_BUILD_ROOT}"\
|
||||
failure=0\
|
||||
pushd $RPM_BUILD_ROOT\
|
||||
find $bytecode_compilation_path -type f -a -name "*.py" -print0 | xargs -0 $python_binary -O -m py_compile || failure=1\
|
||||
find $bytecode_compilation_path -type f -a -name "*.py" -print0 | xargs -0 $python_binary -m py_compile || failure=1\
|
||||
popd\
|
||||
test $failure -eq 0
|
||||
|
@ -0,0 +1,41 @@
|
||||
%__python2 /usr/bin/python2
|
||||
%python2_sitelib %(RHEL_ALLOW_PYTHON2_FOR_BUILD=1 %{__python2} -Esc "from distutils.sysconfig import get_python_lib; print(get_python_lib())")
|
||||
%python2_sitearch %(RHEL_ALLOW_PYTHON2_FOR_BUILD=1 %{__python2} -Esc "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")
|
||||
%python2_version %(RHEL_ALLOW_PYTHON2_FOR_BUILD=1 %{__python2} -Esc "import sys; sys.stdout.write('{0.major}.{0.minor}'.format(sys.version_info))")
|
||||
%python2_version_nodots %(RHEL_ALLOW_PYTHON2_FOR_BUILD=1 %{__python2} -Esc "import sys; sys.stdout.write('{0.major}{0.minor}'.format(sys.version_info))")
|
||||
|
||||
%py2_shbang_opts -s
|
||||
|
||||
# Use the slashes after expand so that the command starts on the same line as
|
||||
# the macro
|
||||
%py2_build() %{expand:\\\
|
||||
CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}"\\\
|
||||
%{__python2} %{py_setup} %{?py_setup_args} build --executable="%{__python2} %{py2_shbang_opts}" %{?*}
|
||||
sleep 1
|
||||
}
|
||||
|
||||
%py2_build_egg() %{expand:\\\
|
||||
CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}"\\\
|
||||
%{__python2} %{py_setup} %{?py_setup_args} bdist_egg %{?*}
|
||||
sleep 1
|
||||
}
|
||||
|
||||
%py2_build_wheel() %{expand:\\\
|
||||
CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}"\\\
|
||||
%{__python2} %{py_setup} %{?py_setup_args} bdist_wheel %{?*}
|
||||
sleep 1
|
||||
}
|
||||
|
||||
%py2_install() %{expand:\\\
|
||||
CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}"\\\
|
||||
%{__python2} %{py_setup} %{?py_setup_args} install -O1 --skip-build --root %{buildroot} %{?*}
|
||||
}
|
||||
|
||||
%py2_install_egg() %{expand:\\\
|
||||
mkdir -p %{buildroot}%{python2_sitelib}
|
||||
easy_install-%{python2_version} -m --prefix %{buildroot}%{_prefix} -Z dist/*-py%{python2_version}.egg %{?*}
|
||||
}
|
||||
|
||||
%py2_install_wheel() %{expand:\\\
|
||||
pip%{python2_version} install -I dist/%{1} --root %{buildroot} --strip-file-prefix %{buildroot} --no-deps
|
||||
}
|
@ -1,110 +0,0 @@
|
||||
-- Convenience Lua functions that can be used within Python srpm/rpm macros
|
||||
|
||||
-- Determine alternate names provided from the given name.
|
||||
-- Used in pythonname provides generator, python_provide and py_provides.
|
||||
-- If only_3_to_3_X is false/nil/unused there are 2 rules:
|
||||
-- python3-foo -> python-foo, python3.X-foo
|
||||
-- python3.X-foo -> python-foo, python3-foo
|
||||
-- If only_3_to_3_X is true there is only 1 rule:
|
||||
-- python3-foo -> python3X-foo
|
||||
-- There is no python-foo -> rule, python-foo packages are version agnostic.
|
||||
-- Returns a table/array with strings. Empty when no rule matched.
|
||||
local function python_altnames(name, only_3_to_3_X)
|
||||
local xy
|
||||
if only_3_to_3_X then
|
||||
-- Here we hardcode the xy prefix we want to obsolete to "39", because:
|
||||
-- 1. Python 3.9 will remain the main Python version in RHEL 9
|
||||
-- 2. python39 in RHEL 8 is still using the dotless naming (as opposed to
|
||||
-- python3.9)
|
||||
xy = "39"
|
||||
else
|
||||
xy = rpm.expand('%{__default_python3_pkgversion}')
|
||||
end
|
||||
local altnames = {}
|
||||
local replaced
|
||||
-- NB: dash needs to be escaped!
|
||||
if name:match('^python3%-') then
|
||||
local prefixes = only_3_to_3_X and {} or {'python-'}
|
||||
for i, prefix in ipairs({'python' .. xy .. '-', table.unpack(prefixes)}) do
|
||||
replaced = name:gsub('^python3%-', prefix)
|
||||
table.insert(altnames, replaced)
|
||||
end
|
||||
elseif name:match('^python' .. xy .. '%-') and not only_3_to_3_X then
|
||||
for i, prefix in ipairs({'python-', 'python3-'}) do
|
||||
replaced = name:gsub('^python' .. xy .. '%-', prefix)
|
||||
table.insert(altnames, replaced)
|
||||
end
|
||||
end
|
||||
return altnames
|
||||
end
|
||||
|
||||
|
||||
local function __python_alttags(name, evr, tag_type)
|
||||
-- for the "provides" tag_type we want also unversioned provides
|
||||
local only_3_to_3_X = tag_type ~= "provides"
|
||||
local operator = tag_type == "provides" and ' = ' or ' < '
|
||||
|
||||
-- global cache that tells what package NEVRs were already processed for the
|
||||
-- given tag type
|
||||
if __python_alttags_beenthere == nil then
|
||||
__python_alttags_beenthere = {}
|
||||
end
|
||||
if __python_alttags_beenthere[tag_type] == nil then
|
||||
__python_alttags_beenthere[tag_type] = {}
|
||||
end
|
||||
__python_alttags_beenthere[tag_type][name .. ' ' .. evr] = true
|
||||
local alttags = {}
|
||||
for i, altname in ipairs(python_altnames(name, only_3_to_3_X)) do
|
||||
table.insert(alttags, altname .. operator .. evr)
|
||||
end
|
||||
return alttags
|
||||
end
|
||||
|
||||
-- For any given name and epoch-version-release, return provides except self.
|
||||
-- Uses python_altnames under the hood
|
||||
-- Returns a table/array with strings.
|
||||
local function python_altprovides(name, evr)
|
||||
return __python_alttags(name, evr, "provides")
|
||||
end
|
||||
|
||||
-- For any given name and epoch-version-release, return versioned obsoletes except self.
|
||||
-- Uses python_altnames under the hood
|
||||
-- Returns a table/array with strings.
|
||||
local function python_altobsoletes(name, evr)
|
||||
return __python_alttags(name, evr, "obsoletes")
|
||||
end
|
||||
|
||||
|
||||
local function __python_alttags_once(name, evr, tag_type)
|
||||
-- global cache that tells what provides were already processed
|
||||
if __python_alttags_beenthere == nil
|
||||
or __python_alttags_beenthere[tag_type] == nil
|
||||
or __python_alttags_beenthere[tag_type][name .. ' ' .. evr] == nil then
|
||||
return __python_alttags(name, evr, tag_type)
|
||||
else
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
-- Like python_altprovides but only return something once.
|
||||
-- For each argument can only be used once, returns nil otherwise.
|
||||
-- Previous usage of python_altprovides counts as well.
|
||||
local function python_altprovides_once(name, evr)
|
||||
return __python_alttags_once(name, evr, "provides")
|
||||
end
|
||||
|
||||
-- Like python_altobsoletes but only return something once.
|
||||
-- For each argument can only be used once, returns nil otherwise.
|
||||
-- Previous usage of python_altobsoletes counts as well.
|
||||
local function python_altobsoletes_once(name, evr)
|
||||
return __python_alttags_once(name, evr, "obsoletes")
|
||||
end
|
||||
|
||||
|
||||
return {
|
||||
python_altnames = python_altnames,
|
||||
python_altprovides = python_altprovides,
|
||||
python_altobsoletes = python_altobsoletes,
|
||||
python_altprovides_once = python_altprovides_once,
|
||||
python_altobsoletes_once = python_altobsoletes_once,
|
||||
}
|
Loading…
Reference in new issue