parent
455a6d994d
commit
0a9fe2fa07
@ -0,0 +1,88 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
"""
|
||||||
|
Several packages with various Python interpreters *Supplement* tox.
|
||||||
|
*Supplements* is a reverse dependency to *Recommends*.
|
||||||
|
|
||||||
|
See https://lists.fedoraproject.org/archives/list/python-devel@lists.fedoraproject.org/thread/NVVUXSVSPFQOWIGBE2JNI67HEO7R63ZQ/
|
||||||
|
|
||||||
|
This script:
|
||||||
|
|
||||||
|
1) figures out all packages in the enabled repositories supplementing tox
|
||||||
|
2) ensures there is a venv.sh test for each of them in tests.yml
|
||||||
|
|
||||||
|
That way, when we change tox (update, patch, etc.),
|
||||||
|
we will always test it with all Pythons that supplement it.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import shlex
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
def parse_python_test_arg(command):
|
||||||
|
tokens = shlex.split(command)
|
||||||
|
for token in tokens:
|
||||||
|
if token.startswith('PYTHON='):
|
||||||
|
return token.removeprefix('PYTHON=')
|
||||||
|
# only check VERSION if PYTHON was not found
|
||||||
|
for token in tokens:
|
||||||
|
if token.startswith('VERSION='):
|
||||||
|
return 'python' + token.removeprefix('VERSION=')
|
||||||
|
raise RuntimeError(f'Could not determine the Python version from `{command}`')
|
||||||
|
|
||||||
|
|
||||||
|
# First, construct a set of various Pythons we test, e.g. {python3.10, python3.7, pypy3.6, ...}
|
||||||
|
tested_pythons = set()
|
||||||
|
with open('tests.yml') as f:
|
||||||
|
tests_yml = yaml.safe_load(f)
|
||||||
|
# this nested structure access is quite fragile,
|
||||||
|
# but at least it should fail the test if we reach to a wrong place
|
||||||
|
for test in tests_yml[-1]['roles'][0]['tests']:
|
||||||
|
for value in test.values():
|
||||||
|
run = value['run']
|
||||||
|
if run.endswith('./venv.sh'):
|
||||||
|
tested_pythons.add(parse_python_test_arg(run))
|
||||||
|
print('Tested Pythons found in tests.yml:', file=sys.stderr)
|
||||||
|
for python in sorted(tested_pythons):
|
||||||
|
print(' ', python, file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
|
# Get all packages that supplement tox,
|
||||||
|
# no repo explicitly specified means we use the enabled repos on the CI system which should be what we want
|
||||||
|
repoquery_result = subprocess.check_output(['dnf', 'repoquery', '--whatsupplements', 'tox'], text=True)
|
||||||
|
supplementing_pkgs = set(repoquery_result.splitlines())
|
||||||
|
|
||||||
|
|
||||||
|
# It gets quite tricky, since packages like "pypy" can supplement tox, we get a set of provides for all of them
|
||||||
|
supplementing_pkgs_provides = {}
|
||||||
|
for nvra in supplementing_pkgs:
|
||||||
|
repoquery_result = subprocess.check_output(['dnf', '-q', 'repoquery', '--provides', nvra], text=True)
|
||||||
|
provides = set(repoquery_result.splitlines())
|
||||||
|
unversioned_provides = {provide.split(' ')[0] for provide in provides}
|
||||||
|
supplementing_pkgs_provides[nvra.rsplit('-', 2)[0]] = unversioned_provides
|
||||||
|
|
||||||
|
|
||||||
|
# We use this hack to treat -devel and -libs packages as if they were not such
|
||||||
|
def normalize_name(pkgname):
|
||||||
|
for suffix in '-devel', '-libs':
|
||||||
|
if pkgname.endswith(suffix):
|
||||||
|
return pkgname.removesuffix(suffix)
|
||||||
|
return pkgname
|
||||||
|
|
||||||
|
|
||||||
|
# Now, for each package that supplements tox, we check if there is a tested Python that *is* it
|
||||||
|
exit_code = 0
|
||||||
|
for pkg, provides in supplementing_pkgs_provides.items():
|
||||||
|
if normalize_name(pkg) in tested_pythons:
|
||||||
|
print(f'{pkg} is tested', file=sys.stderr)
|
||||||
|
continue
|
||||||
|
for provide in provides:
|
||||||
|
if normalize_name(provide) in tested_pythons:
|
||||||
|
print(f'{pkg} is tested (via {provide})', file=sys.stderr)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
print(f'{pkg} is NOT tested', file=sys.stderr)
|
||||||
|
exit_code = 1
|
||||||
|
|
||||||
|
sys.exit(exit_code)
|
Loading…
Reference in new issue