commit 2e3f8c9c8d4692a35289502cd3f23e6b602d08c8 Author: MSVSphere Packaging Team Date: Fri Mar 29 16:16:26 2024 +0300 import python3.11-3.11.7-1.el8 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b16aaa9 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +SOURCES/Python-3.11.7.tar.xz diff --git a/.python3.11.metadata b/.python3.11.metadata new file mode 100644 index 0000000..7c0a04b --- /dev/null +++ b/.python3.11.metadata @@ -0,0 +1 @@ +f2534d591121f3845388fbdd6a121b96dfe305a6 SOURCES/Python-3.11.7.tar.xz diff --git a/SOURCES/00001-rpath.patch b/SOURCES/00001-rpath.patch new file mode 100644 index 0000000..778c077 --- /dev/null +++ b/SOURCES/00001-rpath.patch @@ -0,0 +1,30 @@ +From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 +From: David Malcolm +Date: Wed, 13 Jan 2010 21:25:18 +0000 +Subject: [PATCH] 00001: Fixup distutils/unixccompiler.py to remove standard + library path from rpath Was Patch0 in ivazquez' python3000 specfile + +--- + Lib/distutils/unixccompiler.py | 9 +++++++++ + 1 file changed, 9 insertions(+) + +diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py +index d00c48981e..0283a28c19 100644 +--- a/Lib/distutils/unixccompiler.py ++++ b/Lib/distutils/unixccompiler.py +@@ -82,6 +82,15 @@ class UnixCCompiler(CCompiler): + if sys.platform == "cygwin": + exe_extension = ".exe" + ++ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): ++ """Remove standard library path from rpath""" ++ libraries, library_dirs, runtime_library_dirs = super()._fix_lib_args( ++ libraries, library_dirs, runtime_library_dirs) ++ libdir = sysconfig.get_config_var('LIBDIR') ++ if runtime_library_dirs and (libdir in runtime_library_dirs): ++ runtime_library_dirs.remove(libdir) ++ return libraries, library_dirs, runtime_library_dirs ++ + def preprocess(self, source, output_file=None, macros=None, + include_dirs=None, extra_preargs=None, extra_postargs=None): + fixed_args = self._fix_compile_args(None, macros, include_dirs) diff --git a/SOURCES/00251-change-user-install-location.patch b/SOURCES/00251-change-user-install-location.patch new file mode 100644 index 0000000..53096ec --- /dev/null +++ b/SOURCES/00251-change-user-install-location.patch @@ -0,0 +1,204 @@ +From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= +Date: Mon, 15 Feb 2021 12:19:27 +0100 +Subject: [PATCH] 00251: Change user install location +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +Set values of base and platbase in sysconfig from /usr +to /usr/local when RPM build is not detected +to make pip and similar tools install into separate location. + +Set values of prefix and exec_prefix in distutils install command +to /usr/local if executable is /usr/bin/python* and RPM build +is not detected to make distutils and pypa/distutils install into separate location. + +Fedora Change: https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe +Downstream only. + +We've tried to rework in Fedora 36/Python 3.10 to follow https://bugs.python.org/issue43976 +but we have identified serious problems with that approach, +see https://bugzilla.redhat.com/2026979 or https://bugzilla.redhat.com/2097183 + +pypa/distutils integration: https://github.com/pypa/distutils/pull/70 + +Co-authored-by: Petr Viktorin +Co-authored-by: Miro Hrončok +Co-authored-by: Michal Cyprian +Co-authored-by: Lumír Balhar +--- + Lib/distutils/command/install.py | 8 ++++-- + Lib/site.py | 9 +++++- + Lib/sysconfig.py | 49 +++++++++++++++++++++++++++++++- + Lib/test/test_sysconfig.py | 17 +++++++++-- + 4 files changed, 77 insertions(+), 6 deletions(-) + +diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py +index 01d5331a63..79f70f0de4 100644 +--- a/Lib/distutils/command/install.py ++++ b/Lib/distutils/command/install.py +@@ -159,6 +159,8 @@ class install(Command): + + negative_opt = {'no-compile' : 'compile'} + ++ # Allow Fedora to add components to the prefix ++ _prefix_addition = getattr(sysconfig, '_prefix_addition', '') + + def initialize_options(self): + """Initializes options.""" +@@ -441,8 +443,10 @@ def finalize_unix(self): + raise DistutilsOptionError( + "must not supply exec-prefix without prefix") + +- self.prefix = os.path.normpath(sys.prefix) +- self.exec_prefix = os.path.normpath(sys.exec_prefix) ++ self.prefix = ( ++ os.path.normpath(sys.prefix) + self._prefix_addition) ++ self.exec_prefix = ( ++ os.path.normpath(sys.exec_prefix) + self._prefix_addition) + + else: + if self.exec_prefix is None: +diff --git a/Lib/site.py b/Lib/site.py +index 69670d9d7f..104cb93899 100644 +--- a/Lib/site.py ++++ b/Lib/site.py +@@ -377,8 +377,15 @@ def getsitepackages(prefixes=None): + return sitepackages + + def addsitepackages(known_paths, prefixes=None): +- """Add site-packages to sys.path""" ++ """Add site-packages to sys.path ++ ++ '/usr/local' is included in PREFIXES if RPM build is not detected ++ to make packages installed into this location visible. ++ ++ """ + _trace("Processing global site-packages") ++ if ENABLE_USER_SITE and 'RPM_BUILD_ROOT' not in os.environ: ++ PREFIXES.insert(0, "/usr/local") + for sitedir in getsitepackages(prefixes): + if os.path.isdir(sitedir): + addsitedir(sitedir, known_paths) +diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py +index ebe3711827..55af57b335 100644 +--- a/Lib/sysconfig.py ++++ b/Lib/sysconfig.py +@@ -103,6 +103,11 @@ + else: + _INSTALL_SCHEMES['venv'] = _INSTALL_SCHEMES['posix_venv'] + ++# For a brief period of time in the Fedora 36 life cycle, ++# this installation scheme existed and was documented in the release notes. ++# For backwards compatibility, we keep it here (at least on 3.10 and 3.11). ++_INSTALL_SCHEMES['rpm_prefix'] = _INSTALL_SCHEMES['posix_prefix'] ++ + + # NOTE: site.py has copy of this function. + # Sync it when modify this function. +@@ -162,6 +167,19 @@ def joinuser(*args): + }, + } + ++# This is used by distutils.command.install in the stdlib ++# as well as pypa/distutils (e.g. bundled in setuptools). ++# The self.prefix value is set to sys.prefix + /local/ ++# if neither RPM build nor virtual environment is ++# detected to make distutils install packages ++# into the separate location. ++# https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe ++if (not (hasattr(sys, 'real_prefix') or ++ sys.prefix != sys.base_prefix) and ++ 'RPM_BUILD_ROOT' not in os.environ): ++ _prefix_addition = '/local' ++ ++ + _SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include', + 'scripts', 'data') + +@@ -258,11 +276,40 @@ def _extend_dict(target_dict, other_dict): + target_dict[key] = value + + ++_CONFIG_VARS_LOCAL = None ++ ++ ++def _config_vars_local(): ++ # This function returns the config vars with prefixes amended to /usr/local ++ # https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe ++ global _CONFIG_VARS_LOCAL ++ if _CONFIG_VARS_LOCAL is None: ++ _CONFIG_VARS_LOCAL = dict(get_config_vars()) ++ _CONFIG_VARS_LOCAL['base'] = '/usr/local' ++ _CONFIG_VARS_LOCAL['platbase'] = '/usr/local' ++ return _CONFIG_VARS_LOCAL ++ ++ + def _expand_vars(scheme, vars): + res = {} + if vars is None: + vars = {} +- _extend_dict(vars, get_config_vars()) ++ ++ # when we are not in a virtual environment or an RPM build ++ # we change '/usr' to '/usr/local' ++ # to avoid surprises, we explicitly check for the /usr/ prefix ++ # Python virtual environments have different prefixes ++ # we only do this for posix_prefix, not to mangle the venv scheme ++ # posix_prefix is used by sudo pip install ++ # we only change the defaults here, so explicit --prefix will take precedence ++ # https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe ++ if (scheme == 'posix_prefix' and ++ _PREFIX == '/usr' and ++ 'RPM_BUILD_ROOT' not in os.environ): ++ _extend_dict(vars, _config_vars_local()) ++ else: ++ _extend_dict(vars, get_config_vars()) ++ + if os.name == 'nt': + # On Windows we want to substitute 'lib' for schemes rather + # than the native value (without modifying vars, in case it +diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py +index d96371d242..72b028435f 100644 +--- a/Lib/test/test_sysconfig.py ++++ b/Lib/test/test_sysconfig.py +@@ -111,8 +111,19 @@ def test_get_path(self): + for scheme in _INSTALL_SCHEMES: + for name in _INSTALL_SCHEMES[scheme]: + expected = _INSTALL_SCHEMES[scheme][name].format(**config_vars) ++ tested = get_path(name, scheme) ++ # https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe ++ if tested.startswith('/usr/local'): ++ # /usr/local should only be used in posix_prefix ++ self.assertEqual(scheme, 'posix_prefix') ++ # Fedora CI runs tests for venv and virtualenv that check for other prefixes ++ self.assertEqual(sys.prefix, '/usr') ++ # When building the RPM of Python, %check runs this with RPM_BUILD_ROOT set ++ # Fedora CI runs this with RPM_BUILD_ROOT unset ++ self.assertNotIn('RPM_BUILD_ROOT', os.environ) ++ tested = tested.replace('/usr/local', '/usr') + self.assertEqual( +- os.path.normpath(get_path(name, scheme)), ++ os.path.normpath(tested), + os.path.normpath(expected), + ) + +@@ -336,7 +347,7 @@ def test_get_config_h_filename(self): + self.assertTrue(os.path.isfile(config_h), config_h) + + def test_get_scheme_names(self): +- wanted = ['nt', 'posix_home', 'posix_prefix', 'posix_venv', 'nt_venv', 'venv'] ++ wanted = ['nt', 'posix_home', 'posix_prefix', 'posix_venv', 'nt_venv', 'venv', 'rpm_prefix'] + if HAS_USER_BASE: + wanted.extend(['nt_user', 'osx_framework_user', 'posix_user']) + self.assertEqual(get_scheme_names(), tuple(sorted(wanted))) +@@ -348,6 +359,8 @@ def test_symlink(self): # Issue 7880 + cmd = "-c", "import sysconfig; print(sysconfig.get_platform())" + self.assertEqual(py.call_real(*cmd), py.call_link(*cmd)) + ++ @unittest.skipIf('RPM_BUILD_ROOT' not in os.environ, ++ "Test doesn't expect Fedora's paths") + def test_user_similar(self): + # Issue #8759: make sure the posix scheme for the users + # is similar to the global posix_prefix one diff --git a/SOURCES/00328-pyc-timestamp-invalidation-mode.patch b/SOURCES/00328-pyc-timestamp-invalidation-mode.patch new file mode 100644 index 0000000..d04a267 --- /dev/null +++ b/SOURCES/00328-pyc-timestamp-invalidation-mode.patch @@ -0,0 +1,54 @@ +From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= +Date: Thu, 11 Jul 2019 13:44:13 +0200 +Subject: [PATCH] 00328: Restore pyc to TIMESTAMP invalidation mode as default + in rpmbuild + +Since Fedora 31, the $SOURCE_DATE_EPOCH is set in rpmbuild to the latest +%changelog date. This makes Python default to the CHECKED_HASH pyc +invalidation mode, bringing more reproducible builds traded for an import +performance decrease. To avoid that, we don't default to CHECKED_HASH +when $RPM_BUILD_ROOT is set (i.e. when we are building RPM packages). + +See https://src.fedoraproject.org/rpms/redhat-rpm-config/pull-request/57#comment-27426 +Downstream only: only used when building RPM packages +Ideally, we should talk to upstream and explain why we don't want this +--- + Lib/py_compile.py | 3 ++- + Lib/test/test_py_compile.py | 2 ++ + 2 files changed, 4 insertions(+), 1 deletion(-) + +diff --git a/Lib/py_compile.py b/Lib/py_compile.py +index 388614e51b..db52725016 100644 +--- a/Lib/py_compile.py ++++ b/Lib/py_compile.py +@@ -70,7 +70,8 @@ class PycInvalidationMode(enum.Enum): + + + def _get_default_invalidation_mode(): +- if os.environ.get('SOURCE_DATE_EPOCH'): ++ if (os.environ.get('SOURCE_DATE_EPOCH') and not ++ os.environ.get('RPM_BUILD_ROOT')): + return PycInvalidationMode.CHECKED_HASH + else: + return PycInvalidationMode.TIMESTAMP +diff --git a/Lib/test/test_py_compile.py b/Lib/test/test_py_compile.py +index a4a52b180d..e53f5d92aa 100644 +--- a/Lib/test/test_py_compile.py ++++ b/Lib/test/test_py_compile.py +@@ -19,6 +19,7 @@ def without_source_date_epoch(fxn): + def wrapper(*args, **kwargs): + with os_helper.EnvironmentVarGuard() as env: + env.unset('SOURCE_DATE_EPOCH') ++ env.unset('RPM_BUILD_ROOT') + return fxn(*args, **kwargs) + return wrapper + +@@ -29,6 +30,7 @@ def with_source_date_epoch(fxn): + def wrapper(*args, **kwargs): + with os_helper.EnvironmentVarGuard() as env: + env['SOURCE_DATE_EPOCH'] = '123456789' ++ env.unset('RPM_BUILD_ROOT') + return fxn(*args, **kwargs) + return wrapper + diff --git a/SOURCES/00329-fips.patch b/SOURCES/00329-fips.patch new file mode 100644 index 0000000..256d214 --- /dev/null +++ b/SOURCES/00329-fips.patch @@ -0,0 +1,1189 @@ +From ecc5137120f471c22ff6dcb1bd128561c31e023c Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Thu, 12 Dec 2019 16:58:31 +0100 +Subject: [PATCH 1/7] Expose blake2b and blake2s hashes from OpenSSL + +These aren't as powerful as Python's own implementation, but they can be +used under FIPS. +--- + Lib/test/test_hashlib.py | 6 ++ + Modules/_hashopenssl.c | 37 +++++++++++ + Modules/clinic/_hashopenssl.c.h | 106 +++++++++++++++++++++++++++++++- + 3 files changed, 148 insertions(+), 1 deletion(-) + +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 67becdd..6607ef7 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -363,6 +363,12 @@ class HashLibTestCase(unittest.TestCase): + # 2 is for hashlib.name(...) and hashlib.new(name, ...) + self.assertGreaterEqual(len(constructors), 2) + for hash_object_constructor in constructors: ++ ++ # OpenSSL's blake2s & blake2d don't support `key` ++ _name = hash_object_constructor.__name__ ++ if 'key' in kwargs and _name.startswith('openssl_blake2'): ++ return ++ + m = hash_object_constructor(data, **kwargs) + computed = m.hexdigest() if not shake else m.hexdigest(length) + self.assertEqual( +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index 57d64bd..d0c3b9e 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -1078,6 +1078,41 @@ _hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj, + } + + ++/*[clinic input] ++_hashlib.openssl_blake2b ++ string as data_obj: object(py_default="b''") = NULL ++ * ++ usedforsecurity: bool = True ++Returns a blake2b hash object; optionally initialized with a string ++[clinic start generated code]*/ ++ ++static PyObject * ++_hashlib_openssl_blake2b_impl(PyObject *module, PyObject *data_obj, ++ int usedforsecurity) ++/*[clinic end generated code: output=7a838b1643cde13e input=4ad7fd54268f3689]*/ ++ ++{ ++ return py_evp_fromname(module, Py_hash_blake2b, data_obj, usedforsecurity); ++} ++ ++/*[clinic input] ++_hashlib.openssl_blake2s ++ string as data_obj: object(py_default="b''") = NULL ++ * ++ usedforsecurity: bool = True ++Returns a blake2s hash object; optionally initialized with a string ++[clinic start generated code]*/ ++ ++static PyObject * ++_hashlib_openssl_blake2s_impl(PyObject *module, PyObject *data_obj, ++ int usedforsecurity) ++/*[clinic end generated code: output=4eda6b40757471da input=1ed39481ffa4e26a]*/ ++ ++{ ++ return py_evp_fromname(module, Py_hash_blake2s, data_obj, usedforsecurity); ++} ++ ++ + #ifdef PY_OPENSSL_HAS_SHA3 + + /*[clinic input] +@@ -2066,6 +2101,8 @@ static struct PyMethodDef EVP_functions[] = { + _HASHLIB_OPENSSL_SHA256_METHODDEF + _HASHLIB_OPENSSL_SHA384_METHODDEF + _HASHLIB_OPENSSL_SHA512_METHODDEF ++ _HASHLIB_OPENSSL_BLAKE2B_METHODDEF ++ _HASHLIB_OPENSSL_BLAKE2S_METHODDEF + _HASHLIB_OPENSSL_SHA3_224_METHODDEF + _HASHLIB_OPENSSL_SHA3_256_METHODDEF + _HASHLIB_OPENSSL_SHA3_384_METHODDEF +diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h +index 5d84f4a..011026a 100644 +--- a/Modules/clinic/_hashopenssl.c.h ++++ b/Modules/clinic/_hashopenssl.c.h +@@ -530,6 +530,110 @@ exit: + return return_value; + } + ++PyDoc_STRVAR(_hashlib_openssl_blake2b__doc__, ++"openssl_blake2b($module, /, string=b\'\', *, usedforsecurity=True)\n" ++"--\n" ++"\n" ++"Returns a blake2b hash object; optionally initialized with a string"); ++ ++#define _HASHLIB_OPENSSL_BLAKE2B_METHODDEF \ ++ {"openssl_blake2b", _PyCFunction_CAST(_hashlib_openssl_blake2b), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_blake2b__doc__}, ++ ++static PyObject * ++_hashlib_openssl_blake2b_impl(PyObject *module, PyObject *data_obj, ++ int usedforsecurity); ++ ++static PyObject * ++_hashlib_openssl_blake2b(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) ++{ ++ PyObject *return_value = NULL; ++ static const char * const _keywords[] = {"string", "usedforsecurity", NULL}; ++ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_blake2b", 0}; ++ PyObject *argsbuf[2]; ++ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; ++ PyObject *data_obj = NULL; ++ int usedforsecurity = 1; ++ ++ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); ++ if (!args) { ++ goto exit; ++ } ++ if (!noptargs) { ++ goto skip_optional_pos; ++ } ++ if (args[0]) { ++ data_obj = args[0]; ++ if (!--noptargs) { ++ goto skip_optional_pos; ++ } ++ } ++skip_optional_pos: ++ if (!noptargs) { ++ goto skip_optional_kwonly; ++ } ++ usedforsecurity = PyObject_IsTrue(args[1]); ++ if (usedforsecurity < 0) { ++ goto exit; ++ } ++skip_optional_kwonly: ++ return_value = _hashlib_openssl_blake2b_impl(module, data_obj, usedforsecurity); ++ ++exit: ++ return return_value; ++} ++ ++PyDoc_STRVAR(_hashlib_openssl_blake2s__doc__, ++"openssl_blake2s($module, /, string=b\'\', *, usedforsecurity=True)\n" ++"--\n" ++"\n" ++"Returns a blake2s hash object; optionally initialized with a string"); ++ ++#define _HASHLIB_OPENSSL_BLAKE2S_METHODDEF \ ++ {"openssl_blake2s", _PyCFunction_CAST(_hashlib_openssl_blake2s), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_blake2s__doc__}, ++ ++static PyObject * ++_hashlib_openssl_blake2s_impl(PyObject *module, PyObject *data_obj, ++ int usedforsecurity); ++ ++static PyObject * ++_hashlib_openssl_blake2s(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) ++{ ++ PyObject *return_value = NULL; ++ static const char * const _keywords[] = {"string", "usedforsecurity", NULL}; ++ static _PyArg_Parser _parser = {NULL, _keywords, "openssl_blake2s", 0}; ++ PyObject *argsbuf[2]; ++ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; ++ PyObject *data_obj = NULL; ++ int usedforsecurity = 1; ++ ++ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); ++ if (!args) { ++ goto exit; ++ } ++ if (!noptargs) { ++ goto skip_optional_pos; ++ } ++ if (args[0]) { ++ data_obj = args[0]; ++ if (!--noptargs) { ++ goto skip_optional_pos; ++ } ++ } ++skip_optional_pos: ++ if (!noptargs) { ++ goto skip_optional_kwonly; ++ } ++ usedforsecurity = PyObject_IsTrue(args[1]); ++ if (usedforsecurity < 0) { ++ goto exit; ++ } ++skip_optional_kwonly: ++ return_value = _hashlib_openssl_blake2s_impl(module, data_obj, usedforsecurity); ++ ++exit: ++ return return_value; ++} ++ + #if defined(PY_OPENSSL_HAS_SHA3) + + PyDoc_STRVAR(_hashlib_openssl_sha3_224__doc__, +@@ -1385,4 +1489,4 @@ exit: + #ifndef _HASHLIB_SCRYPT_METHODDEF + #define _HASHLIB_SCRYPT_METHODDEF + #endif /* !defined(_HASHLIB_SCRYPT_METHODDEF) */ +-/*[clinic end generated code: output=69f2374071bff707 input=a9049054013a1b77]*/ ++/*[clinic end generated code: output=c6a9af5563972eda input=a9049054013a1b77]*/ +-- +2.43.0 + + +From 0198d467525e79cb4be4418708719af3eaee7a40 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 1 Aug 2019 17:57:05 +0200 +Subject: [PATCH 2/7] Use a stronger hash in multiprocessing handshake + +Adapted from patch by David Malcolm, +https://bugs.python.org/issue17258 +--- + Lib/multiprocessing/connection.py | 8 ++++++-- + 1 file changed, 6 insertions(+), 2 deletions(-) + +diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py +index 8b81f99..69c0b7e 100644 +--- a/Lib/multiprocessing/connection.py ++++ b/Lib/multiprocessing/connection.py +@@ -43,6 +43,10 @@ BUFSIZE = 8192 + # A very generous timeout when it comes to local connections... + CONNECTION_TIMEOUT = 20. + ++# The hmac module implicitly defaults to using MD5. ++# Support using a stronger algorithm for the challenge/response code: ++HMAC_DIGEST_NAME='sha256' ++ + _mmap_counter = itertools.count() + + default_family = 'AF_INET' +@@ -752,7 +756,7 @@ def deliver_challenge(connection, authkey): + "Authkey must be bytes, not {0!s}".format(type(authkey))) + message = os.urandom(MESSAGE_LENGTH) + connection.send_bytes(CHALLENGE + message) +- digest = hmac.new(authkey, message, 'md5').digest() ++ digest = hmac.new(authkey, message, HMAC_DIGEST_NAME).digest() + response = connection.recv_bytes(256) # reject large message + if response == digest: + connection.send_bytes(WELCOME) +@@ -768,7 +772,7 @@ def answer_challenge(connection, authkey): + message = connection.recv_bytes(256) # reject large message + assert message[:len(CHALLENGE)] == CHALLENGE, 'message = %r' % message + message = message[len(CHALLENGE):] +- digest = hmac.new(authkey, message, 'md5').digest() ++ digest = hmac.new(authkey, message, HMAC_DIGEST_NAME).digest() + connection.send_bytes(digest) + response = connection.recv_bytes(256) # reject large message + if response != WELCOME: +-- +2.43.0 + + +From a7822e2e1f21529e9730885bd8c9c6ab7c704d5b Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Thu, 25 Jul 2019 17:19:06 +0200 +Subject: [PATCH 3/7] Disable Python's hash implementations in FIPS mode, + forcing OpenSSL + +--- + Lib/hashlib.py | 11 +++++++---- + Lib/test/test_hashlib.py | 17 ++++++++++++----- + Modules/_blake2/blake2b_impl.c | 4 ++++ + Modules/_blake2/blake2module.c | 3 +++ + Modules/_blake2/blake2s_impl.c | 4 ++++ + Modules/hashlib.h | 23 +++++++++++++++++++++++ + configure.ac | 3 ++- + 7 files changed, 55 insertions(+), 10 deletions(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index b546a3f..8b835a4 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -70,14 +70,17 @@ __all__ = __always_supported + ('new', 'algorithms_guaranteed', + + __builtin_constructor_cache = {} + +-# Prefer our blake2 implementation ++# Prefer our blake2 implementation (unless in FIPS mode) + # OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. The OpenSSL + # implementations neither support keyed blake2 (blake2 MAC) nor advanced + # features like salt, personalization, or tree hashing. OpenSSL hash-only + # variants are available as 'blake2b512' and 'blake2s256', though. +-__block_openssl_constructor = { +- 'blake2b', 'blake2s', +-} ++import _hashlib ++if _hashlib.get_fips_mode(): ++ __block_openssl_constructor = set() ++else: ++ __block_openssl_constructor = {'blake2b', 'blake2s'} ++ + + def __get_builtin_constructor(name): + cache = __builtin_constructor_cache +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 6607ef7..01d12f5 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -40,14 +40,15 @@ else: + m.strip() for m in builtin_hashes.strip('"').lower().split(",") + } + +-# hashlib with and without OpenSSL backend for PBKDF2 +-# only import builtin_hashlib when all builtin hashes are available. +-# Otherwise import prints noise on stderr ++# RHEL: `_hashlib` is always importable and `hashlib` can't be imported ++# without it. + openssl_hashlib = import_fresh_module('hashlib', fresh=['_hashlib']) +-if builtin_hashes == default_builtin_hashes: ++try: + builtin_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) +-else: ++except ImportError: + builtin_hashlib = None ++else: ++ raise AssertionError('hashlib is importablee without _hashlib') + + try: + from _hashlib import HASH, HASHXOF, openssl_md_meth_names, get_fips_mode +@@ -118,6 +119,12 @@ class HashLibTestCase(unittest.TestCase): + except ModuleNotFoundError as error: + if self._warn_on_extension_import and module_name in builtin_hashes: + warnings.warn('Did a C extension fail to compile? %s' % error) ++ except ImportError: ++ if get_fips_mode() and module_name == '_blake2': ++ # blake2b & blake2s disabled under FIPS ++ return None ++ else: ++ raise + return None + + def __init__(self, *args, **kwargs): +diff --git a/Modules/_blake2/blake2b_impl.c b/Modules/_blake2/blake2b_impl.c +index c2cac98..55b1677 100644 +--- a/Modules/_blake2/blake2b_impl.c ++++ b/Modules/_blake2/blake2b_impl.c +@@ -98,6 +98,8 @@ py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + BLAKE2bObject *self = NULL; + Py_buffer buf; + ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2"); ++ + self = new_BLAKE2bObject(type); + if (self == NULL) { + goto error; +@@ -276,6 +278,8 @@ _blake2_blake2b_update(BLAKE2bObject *self, PyObject *data) + { + Py_buffer buf; + ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2"); ++ + GET_BUFFER_VIEW_OR_ERROUT(data, &buf); + + if (self->lock == NULL && buf.len >= HASHLIB_GIL_MINSIZE) +diff --git a/Modules/_blake2/blake2module.c b/Modules/_blake2/blake2module.c +index 93478f5..e3a024d 100644 +--- a/Modules/_blake2/blake2module.c ++++ b/Modules/_blake2/blake2module.c +@@ -13,6 +13,7 @@ + #endif + + #include "Python.h" ++#include "../hashlib.h" + #include "blake2module.h" + + extern PyType_Spec blake2b_type_spec; +@@ -83,6 +84,7 @@ _blake2_free(void *module) + static int + blake2_exec(PyObject *m) + { ++ + Blake2State* st = blake2_get_state(m); + + st->blake2b_type = (PyTypeObject *)PyType_FromModuleAndSpec( +@@ -154,5 +156,6 @@ static struct PyModuleDef blake2_module = { + PyMODINIT_FUNC + PyInit__blake2(void) + { ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ImportError, "blake2"); + return PyModuleDef_Init(&blake2_module); + } +\ No newline at end of file +diff --git a/Modules/_blake2/blake2s_impl.c b/Modules/_blake2/blake2s_impl.c +index 1c47328..cd4a202 100644 +--- a/Modules/_blake2/blake2s_impl.c ++++ b/Modules/_blake2/blake2s_impl.c +@@ -98,6 +98,8 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size, + BLAKE2sObject *self = NULL; + Py_buffer buf; + ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2"); ++ + self = new_BLAKE2sObject(type); + if (self == NULL) { + goto error; +@@ -276,6 +278,8 @@ _blake2_blake2s_update(BLAKE2sObject *self, PyObject *data) + { + Py_buffer buf; + ++ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2"); ++ + GET_BUFFER_VIEW_OR_ERROUT(data, &buf); + + if (self->lock == NULL && buf.len >= HASHLIB_GIL_MINSIZE) +diff --git a/Modules/hashlib.h b/Modules/hashlib.h +index 56ae7a5..45fb403 100644 +--- a/Modules/hashlib.h ++++ b/Modules/hashlib.h +@@ -1,5 +1,11 @@ + /* Common code for use by all hashlib related modules. */ + ++// RHEL: use OpenSSL to turn off unsupported modules under FIPS mode ++// EVP_default_properties_is_fips_enabled() on OpenSSL >= 3.0.0 ++#include ++// FIPS_mode() on OpenSSL < 3.0.0 ++#include ++ + /* + * Given a PyObject* obj, fill in the Py_buffer* viewp with the result + * of PyObject_GetBuffer. Sets an exception and issues the erraction +@@ -57,3 +63,20 @@ + * to allow the user to optimize based on the platform they're using. */ + #define HASHLIB_GIL_MINSIZE 2048 + ++__attribute__((__unused__)) ++static int ++_Py_hashlib_fips_error(PyObject *exc, char *name) { ++#if OPENSSL_VERSION_NUMBER >= 0x30000000L ++ if (EVP_default_properties_is_fips_enabled(NULL)) { ++#else ++ if (FIPS_mode()) { ++#endif ++ PyErr_Format(exc, "%s is not available in FIPS mode", name); ++ return 1; ++ } ++ return 0; ++} ++ ++#define FAIL_RETURN_IN_FIPS_MODE(exc, name) do { \ ++ if (_Py_hashlib_fips_error(exc, name)) return NULL; \ ++} while (0) +diff --git a/configure.ac b/configure.ac +index 52d5c1f..56aff78 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -7069,7 +7069,8 @@ PY_STDLIB_MOD([_sha512], [test "$with_builtin_sha512" = yes]) + PY_STDLIB_MOD([_sha3], [test "$with_builtin_sha3" = yes]) + PY_STDLIB_MOD([_blake2], + [test "$with_builtin_blake2" = yes], [], +- [$LIBB2_CFLAGS], [$LIBB2_LIBS]) ++ [$LIBB2_CFLAGS $OPENSSL_INCLUDES], ++ [$LIBB2_LIBS $OPENSSL_LDFLAGS $OPENSSL_LDFLAGS_RPATH $OPENSSL_LIBS]) + + PY_STDLIB_MOD([_crypt], + [], [test "$ac_cv_crypt_crypt" = yes], +-- +2.43.0 + + +From e9ce6d33544559172dbebbe0c0dfba2757c62331 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Fri, 29 Jan 2021 14:16:21 +0100 +Subject: [PATCH 4/7] Use python's fall back crypto implementations only if we + are not in FIPS mode + +--- + Lib/hashlib.py | 72 +++------------------------------------- + Lib/test/test_hashlib.py | 23 ++++++++++++- + 2 files changed, 27 insertions(+), 68 deletions(-) + +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index 8b835a4..d0834c2 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -83,6 +83,8 @@ else: + + + def __get_builtin_constructor(name): ++ if _hashlib.get_fips_mode(): ++ raise ValueError('unsupported hash type ' + name + '(in FIPS mode)') + cache = __builtin_constructor_cache + constructor = cache.get(name) + if constructor is not None: +@@ -178,83 +180,19 @@ try: + except ImportError: + _hashlib = None + new = __py_new +- __get_hash = __get_builtin_constructor ++ raise # importing _hashlib should never fail on RHEL + + try: + # OpenSSL's PKCS5_PBKDF2_HMAC requires OpenSSL 1.0+ with HMAC and SHA + from _hashlib import pbkdf2_hmac + except ImportError: +- from warnings import warn as _warn +- _trans_5C = bytes((x ^ 0x5C) for x in range(256)) +- _trans_36 = bytes((x ^ 0x36) for x in range(256)) +- +- def pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None): +- """Password based key derivation function 2 (PKCS #5 v2.0) +- +- This Python implementations based on the hmac module about as fast +- as OpenSSL's PKCS5_PBKDF2_HMAC for short passwords and much faster +- for long passwords. +- """ +- _warn( +- "Python implementation of pbkdf2_hmac() is deprecated.", +- category=DeprecationWarning, +- stacklevel=2 +- ) +- if not isinstance(hash_name, str): +- raise TypeError(hash_name) +- +- if not isinstance(password, (bytes, bytearray)): +- password = bytes(memoryview(password)) +- if not isinstance(salt, (bytes, bytearray)): +- salt = bytes(memoryview(salt)) +- +- # Fast inline HMAC implementation +- inner = new(hash_name) +- outer = new(hash_name) +- blocksize = getattr(inner, 'block_size', 64) +- if len(password) > blocksize: +- password = new(hash_name, password).digest() +- password = password + b'\x00' * (blocksize - len(password)) +- inner.update(password.translate(_trans_36)) +- outer.update(password.translate(_trans_5C)) +- +- def prf(msg, inner=inner, outer=outer): +- # PBKDF2_HMAC uses the password as key. We can re-use the same +- # digest objects and just update copies to skip initialization. +- icpy = inner.copy() +- ocpy = outer.copy() +- icpy.update(msg) +- ocpy.update(icpy.digest()) +- return ocpy.digest() +- +- if iterations < 1: +- raise ValueError(iterations) +- if dklen is None: +- dklen = outer.digest_size +- if dklen < 1: +- raise ValueError(dklen) +- +- dkey = b'' +- loop = 1 +- from_bytes = int.from_bytes +- while len(dkey) < dklen: +- prev = prf(salt + loop.to_bytes(4)) +- # endianness doesn't matter here as long to / from use the same +- rkey = from_bytes(prev) +- for i in range(iterations - 1): +- prev = prf(prev) +- # rkey = rkey ^ prev +- rkey ^= from_bytes(prev) +- loop += 1 +- dkey += rkey.to_bytes(inner.digest_size) +- +- return dkey[:dklen] ++ raise # importing _hashlib should never fail on RHEL + + try: + # OpenSSL's scrypt requires OpenSSL 1.1+ + from _hashlib import scrypt + except ImportError: +- pass ++ raise # importing _hashlib should never fail on RHEL + + + def file_digest(fileobj, digest, /, *, _bufsize=2**18): +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index 01d12f5..a7cdb07 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -171,7 +171,13 @@ class HashLibTestCase(unittest.TestCase): + constructors.add(constructor) + + def add_builtin_constructor(name): +- constructor = getattr(hashlib, "__get_builtin_constructor")(name) ++ try: ++ constructor = getattr(hashlib, "__get_builtin_constructor")(name) ++ except ValueError: ++ if get_fips_mode(): ++ return ++ else: ++ raise + self.constructors_to_test[name].add(constructor) + + _md5 = self._conditional_import_module('_md5') +@@ -266,6 +272,20 @@ class HashLibTestCase(unittest.TestCase): + def test_new_upper_to_lower(self): + self.assertEqual(hashlib.new("SHA256").name, "sha256") + ++ @unittest.skipUnless(get_fips_mode(), "Builtin constructor only usable in FIPS mode") ++ def test_get_builtin_constructor_fips(self): ++ get_builtin_constructor = getattr(hashlib, ++ '__get_builtin_constructor') ++ with self.assertRaises(ValueError): ++ get_builtin_constructor('md5') ++ with self.assertRaises(ValueError): ++ get_builtin_constructor('sha256') ++ with self.assertRaises(ValueError): ++ get_builtin_constructor('blake2s') ++ with self.assertRaises(ValueError): ++ get_builtin_constructor('test') ++ ++ @unittest.skipIf(get_fips_mode(), "No builtin constructors in FIPS mode") + def test_get_builtin_constructor(self): + get_builtin_constructor = getattr(hashlib, + '__get_builtin_constructor') +@@ -1111,6 +1131,7 @@ class KDFTests(unittest.TestCase): + iterations=1, dklen=None) + self.assertEqual(out, self.pbkdf2_results['sha1'][0][0]) + ++ @unittest.skip("The python implementation of pbkdf2_hmac has been removed") + @unittest.skipIf(builtin_hashlib is None, "test requires builtin_hashlib") + def test_pbkdf2_hmac_py(self): + with warnings_helper.check_warnings(): +-- +2.43.0 + + +From 641c617775b6973ed84711a2602ba190fe064474 Mon Sep 17 00:00:00 2001 +From: Charalampos Stratakis +Date: Wed, 31 Jul 2019 15:43:43 +0200 +Subject: [PATCH 5/7] Test equivalence of hashes for the various digests with + usedforsecurity=True/False + +--- + Lib/test/test_fips.py | 24 ++++++++++++++++++++ + Lib/test/test_hashlib.py | 47 ++++++++++++++++++++++++++++++---------- + 2 files changed, 60 insertions(+), 11 deletions(-) + create mode 100644 Lib/test/test_fips.py + +diff --git a/Lib/test/test_fips.py b/Lib/test/test_fips.py +new file mode 100644 +index 0000000..1f99dd7 +--- /dev/null ++++ b/Lib/test/test_fips.py +@@ -0,0 +1,24 @@ ++import unittest ++import hashlib, _hashlib ++ ++ ++ ++class HashlibFipsTests(unittest.TestCase): ++ ++ @unittest.skipUnless(_hashlib.get_fips_mode(), "Test only when FIPS is enabled") ++ def test_fips_imports(self): ++ """blake2s and blake2b should fail to import in FIPS mode ++ """ ++ with self.assertRaises(ValueError, msg='blake2s not available in FIPS'): ++ m = hashlib.blake2s() ++ with self.assertRaises(ValueError, msg='blake2b not available in FIPS'): ++ m = hashlib.blake2b() ++ ++ @unittest.skipIf(_hashlib.get_fips_mode(), "blake2 hashes are not available under FIPS") ++ def test_blake2_hashes(self): ++ self.assertEqual(hashlib.blake2b(b'abc').hexdigest(), _hashlib.openssl_blake2b(b'abc').hexdigest()) ++ self.assertEqual(hashlib.blake2s(b'abc').hexdigest(), _hashlib.openssl_blake2s(b'abc').hexdigest()) ++ ++ ++if __name__ == "__main__": ++ unittest.main() +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index a7cdb07..c071f28 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -25,6 +25,7 @@ from test.support import os_helper + from test.support import threading_helper + from test.support import warnings_helper + from http.client import HTTPException ++from functools import partial + + # Were we compiled --with-pydebug or with #define Py_DEBUG? + COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') +@@ -60,6 +61,11 @@ except ImportError: + def get_fips_mode(): + return 0 + ++if get_fips_mode(): ++ FIPS_DISABLED = {'md5'} ++else: ++ FIPS_DISABLED = set() ++ + try: + import _blake2 + except ImportError: +@@ -98,6 +104,11 @@ def read_vectors(hash_name): + parts[0] = bytes.fromhex(parts[0]) + yield parts + ++def _is_blake2_constructor(constructor): ++ if isinstance(constructor, partial): ++ constructor = constructor.func ++ return getattr(constructor, '__name__', '').startswith('openssl_blake2') ++ + + class HashLibTestCase(unittest.TestCase): + supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1', +@@ -142,15 +153,21 @@ class HashLibTestCase(unittest.TestCase): + continue + self.constructors_to_test[algorithm] = set() + ++ def _add_constructor(algorithm, constructor): ++ constructors.add(partial(constructor, usedforsecurity=False)) ++ if algorithm not in FIPS_DISABLED: ++ constructors.add(constructor) ++ constructors.add(partial(constructor, usedforsecurity=True)) ++ + # For each algorithm, test the direct constructor and the use + # of hashlib.new given the algorithm name. + for algorithm, constructors in self.constructors_to_test.items(): +- constructors.add(getattr(hashlib, algorithm)) ++ _add_constructor(algorithm, getattr(hashlib, algorithm)) + def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm, **kwargs): + if data is None: + return hashlib.new(_alg, **kwargs) + return hashlib.new(_alg, data, **kwargs) +- constructors.add(_test_algorithm_via_hashlib_new) ++ _add_constructor(algorithm, _test_algorithm_via_hashlib_new) + + _hashlib = self._conditional_import_module('_hashlib') + self._hashlib = _hashlib +@@ -162,13 +179,7 @@ class HashLibTestCase(unittest.TestCase): + for algorithm, constructors in self.constructors_to_test.items(): + constructor = getattr(_hashlib, 'openssl_'+algorithm, None) + if constructor: +- try: +- constructor() +- except ValueError: +- # default constructor blocked by crypto policy +- pass +- else: +- constructors.add(constructor) ++ _add_constructor(algorithm, constructor) + + def add_builtin_constructor(name): + try: +@@ -346,6 +357,8 @@ class HashLibTestCase(unittest.TestCase): + self.assertIn(h.name, self.supported_hash_names) + else: + self.assertNotIn(h.name, self.supported_hash_names) ++ if not h.name.startswith('blake2') and h.name not in FIPS_DISABLED: ++ self.assertEqual(h.name, hashlib.new(h.name).name) + self.assertEqual( + h.name, + hashlib.new(h.name, usedforsecurity=False).name +@@ -392,8 +405,10 @@ class HashLibTestCase(unittest.TestCase): + for hash_object_constructor in constructors: + + # OpenSSL's blake2s & blake2d don't support `key` +- _name = hash_object_constructor.__name__ +- if 'key' in kwargs and _name.startswith('openssl_blake2'): ++ if ( ++ 'key' in kwargs ++ and _is_blake2_constructor(hash_object_constructor) ++ ): + return + + m = hash_object_constructor(data, **kwargs) +@@ -1024,6 +1039,16 @@ class HashLibTestCase(unittest.TestCase): + with self.assertRaisesRegex(TypeError, "immutable type"): + hash_type.value = False + ++ @unittest.skipUnless(get_fips_mode(), 'Needs FIPS mode.') ++ def test_usedforsecurity_repeat(self): ++ """Make sure usedforsecurity flag isn't copied to other contexts""" ++ for i in range(3): ++ for cons in hashlib.md5, partial(hashlib.new, 'md5'): ++ self.assertRaises(ValueError, cons) ++ self.assertRaises(ValueError, partial(cons, usedforsecurity=True)) ++ self.assertEqual(cons(usedforsecurity=False).hexdigest(), ++ 'd41d8cd98f00b204e9800998ecf8427e') ++ + + class KDFTests(unittest.TestCase): + +-- +2.43.0 + + +From a706c8342f0f9307d44c43c203702e1476fe73b4 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 26 Aug 2019 19:39:48 +0200 +Subject: [PATCH 6/7] Guard against Python HMAC in FIPS mode + +--- + Lib/hmac.py | 13 +++++++++---- + Lib/test/test_hmac.py | 10 ++++++++++ + 2 files changed, 19 insertions(+), 4 deletions(-) + +diff --git a/Lib/hmac.py b/Lib/hmac.py +index 8b4f920..20ef96c 100644 +--- a/Lib/hmac.py ++++ b/Lib/hmac.py +@@ -16,8 +16,9 @@ else: + + import hashlib as _hashlib + +-trans_5C = bytes((x ^ 0x5C) for x in range(256)) +-trans_36 = bytes((x ^ 0x36) for x in range(256)) ++if not _hashopenssl.get_fips_mode(): ++ trans_5C = bytes((x ^ 0x5C) for x in range(256)) ++ trans_36 = bytes((x ^ 0x36) for x in range(256)) + + # The size of the digests returned by HMAC depends on the underlying + # hashing module used. Use digest_size from the instance of HMAC instead. +@@ -48,17 +49,18 @@ class HMAC: + msg argument. Passing it as a keyword argument is + recommended, though not required for legacy API reasons. + """ +- + if not isinstance(key, (bytes, bytearray)): + raise TypeError("key: expected bytes or bytearray, but got %r" % type(key).__name__) + + if not digestmod: + raise TypeError("Missing required parameter 'digestmod'.") + +- if _hashopenssl and isinstance(digestmod, (str, _functype)): ++ if _hashopenssl.get_fips_mode() or (_hashopenssl and isinstance(digestmod, (str, _functype))): + try: + self._init_hmac(key, msg, digestmod) + except _hashopenssl.UnsupportedDigestmodError: ++ if _hashopenssl.get_fips_mode(): ++ raise + self._init_old(key, msg, digestmod) + else: + self._init_old(key, msg, digestmod) +@@ -69,6 +71,9 @@ class HMAC: + self.block_size = self._hmac.block_size + + def _init_old(self, key, msg, digestmod): ++ if _hashopenssl.get_fips_mode(): ++ # In FIPS mode, use OpenSSL anyway: raise the appropriate error ++ return self._init_hmac(key, msg, digestmod) + if callable(digestmod): + digest_cons = digestmod + elif isinstance(digestmod, str): +diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py +index a39a2c4..0742a1c 100644 +--- a/Lib/test/test_hmac.py ++++ b/Lib/test/test_hmac.py +@@ -5,6 +5,7 @@ import hashlib + import unittest + import unittest.mock + import warnings ++from _hashlib import get_fips_mode + + from test.support import hashlib_helper, check_disallow_instantiation + +@@ -339,6 +340,7 @@ class TestVectorsTestCase(unittest.TestCase): + def test_sha512_rfc4231(self): + self._rfc4231_test_cases(hashlib.sha512, 'sha512', 64, 128) + ++ #unittest.skipIf(get_fips_mode(), 'MockCrazyHash unacceptable in FIPS mode.') + @hashlib_helper.requires_hashdigest('sha256') + def test_legacy_block_size_warnings(self): + class MockCrazyHash(object): +@@ -351,6 +353,11 @@ class TestVectorsTestCase(unittest.TestCase): + def digest(self): + return self._x.digest() + ++ if get_fips_mode(): ++ with self.assertRaises(ValueError): ++ hmac.HMAC(b'a', b'b', digestmod=MockCrazyHash) ++ return ++ + with warnings.catch_warnings(): + warnings.simplefilter('error', RuntimeWarning) + with self.assertRaises(RuntimeWarning): +@@ -453,6 +460,7 @@ class ConstructorTestCase(unittest.TestCase): + with self.assertRaisesRegex(TypeError, "immutable type"): + C_HMAC.value = None + ++ @unittest.skipIf(get_fips_mode(), "_sha256 unavailable in FIPS mode") + @unittest.skipUnless(sha256_module is not None, 'need _sha256') + def test_with_sha256_module(self): + h = hmac.HMAC(b"key", b"hash this!", digestmod=sha256_module.sha256) +@@ -481,6 +489,7 @@ class SanityTestCase(unittest.TestCase): + + class CopyTestCase(unittest.TestCase): + ++ @unittest.skipIf(get_fips_mode(), "_init_old unavailable in FIPS mode") + @hashlib_helper.requires_hashdigest('sha256') + def test_attributes_old(self): + # Testing if attributes are of same type. +@@ -492,6 +501,7 @@ class CopyTestCase(unittest.TestCase): + self.assertEqual(type(h1._outer), type(h2._outer), + "Types of outer don't match.") + ++ @unittest.skipIf(get_fips_mode(), "_init_old unavailable in FIPS mode") + @hashlib_helper.requires_hashdigest('sha256') + def test_realcopy_old(self): + # Testing if the copy method created a real copy. +-- +2.43.0 + + +From 03f1dedfe5d29af20fb3686d76b045384d41d8dd Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Wed, 25 Aug 2021 16:44:43 +0200 +Subject: [PATCH 7/7] Disable hash-based PYCs in FIPS mode + +If FIPS mode is on, we can't use siphash-based HMAC +(_Py_KeyedHash), so: + +- Unchecked hash PYCs can be imported, but not created +- Checked hash PYCs can not be imported nor created +- The default mode is timestamp-based PYCs, even if + SOURCE_DATE_EPOCH is set. + +If FIPS mode is off, there are no changes in behavior. + +Resolves: https://bugzilla.redhat.com/show_bug.cgi?id=1835169 +--- + Lib/py_compile.py | 2 ++ + Lib/test/support/__init__.py | 14 +++++++++++++ + Lib/test/test_cmd_line_script.py | 2 ++ + Lib/test/test_compileall.py | 11 +++++++++- + Lib/test/test_imp.py | 2 ++ + .../test_importlib/source/test_file_loader.py | 6 ++++++ + Lib/test/test_py_compile.py | 11 ++++++++-- + Lib/test/test_zipimport.py | 2 ++ + Python/import.c | 20 +++++++++++++++++++ + 9 files changed, 67 insertions(+), 3 deletions(-) + +diff --git a/Lib/py_compile.py b/Lib/py_compile.py +index db52725..5fca65e 100644 +--- a/Lib/py_compile.py ++++ b/Lib/py_compile.py +@@ -70,7 +70,9 @@ class PycInvalidationMode(enum.Enum): + + + def _get_default_invalidation_mode(): ++ import _hashlib + if (os.environ.get('SOURCE_DATE_EPOCH') and not ++ _hashlib.get_fips_mode() and not + os.environ.get('RPM_BUILD_ROOT')): + return PycInvalidationMode.CHECKED_HASH + else: +diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py +index dc7a6e6..646b328 100644 +--- a/Lib/test/support/__init__.py ++++ b/Lib/test/support/__init__.py +@@ -2203,6 +2203,20 @@ def sleeping_retry(timeout, err_msg=None, /, + delay = min(delay * 2, max_delay) + + ++def fails_in_fips_mode(expected_error): ++ import _hashlib ++ if _hashlib.get_fips_mode(): ++ def _decorator(func): ++ def _wrapper(self, *args, **kwargs): ++ with self.assertRaises(expected_error): ++ func(self, *args, **kwargs) ++ return _wrapper ++ else: ++ def _decorator(func): ++ return func ++ return _decorator ++ ++ + @contextlib.contextmanager + def adjust_int_max_str_digits(max_digits): + """Temporarily change the integer string conversion length limit.""" +diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py +index 7fcd563..476b557 100644 +--- a/Lib/test/test_cmd_line_script.py ++++ b/Lib/test/test_cmd_line_script.py +@@ -286,6 +286,7 @@ class CmdLineTest(unittest.TestCase): + self._check_script(zip_name, run_name, zip_name, zip_name, '', + zipimport.zipimporter) + ++ @support.fails_in_fips_mode(ImportError) + def test_zipfile_compiled_checked_hash(self): + with os_helper.temp_dir() as script_dir: + script_name = _make_test_script(script_dir, '__main__') +@@ -296,6 +297,7 @@ class CmdLineTest(unittest.TestCase): + self._check_script(zip_name, run_name, zip_name, zip_name, '', + zipimport.zipimporter) + ++ @support.fails_in_fips_mode(ImportError) + def test_zipfile_compiled_unchecked_hash(self): + with os_helper.temp_dir() as script_dir: + script_name = _make_test_script(script_dir, '__main__') +diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py +index 9cd92ad..4ec29a1 100644 +--- a/Lib/test/test_compileall.py ++++ b/Lib/test/test_compileall.py +@@ -806,14 +806,23 @@ class CommandLineTestsBase: + out = self.assertRunOK('badfilename') + self.assertRegex(out, b"Can't list 'badfilename'") + +- def test_pyc_invalidation_mode(self): ++ @support.fails_in_fips_mode(AssertionError) ++ def test_pyc_invalidation_mode_checked(self): + script_helper.make_script(self.pkgdir, 'f1', '') + pyc = importlib.util.cache_from_source( + os.path.join(self.pkgdir, 'f1.py')) ++ + self.assertRunOK('--invalidation-mode=checked-hash', self.pkgdir) + with open(pyc, 'rb') as fp: + data = fp.read() + self.assertEqual(int.from_bytes(data[4:8], 'little'), 0b11) ++ ++ @support.fails_in_fips_mode(AssertionError) ++ def test_pyc_invalidation_mode_unchecked(self): ++ script_helper.make_script(self.pkgdir, 'f1', '') ++ pyc = importlib.util.cache_from_source( ++ os.path.join(self.pkgdir, 'f1.py')) ++ + self.assertRunOK('--invalidation-mode=unchecked-hash', self.pkgdir) + with open(pyc, 'rb') as fp: + data = fp.read() +diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py +index 4062afd..6bc276d 100644 +--- a/Lib/test/test_imp.py ++++ b/Lib/test/test_imp.py +@@ -352,6 +352,7 @@ class ImportTests(unittest.TestCase): + import _frozen_importlib + self.assertEqual(_frozen_importlib.__spec__.origin, "frozen") + ++ @support.fails_in_fips_mode(ImportError) + def test_source_hash(self): + self.assertEqual(_imp.source_hash(42, b'hi'), b'\xfb\xd9G\x05\xaf$\x9b~') + self.assertEqual(_imp.source_hash(43, b'hi'), b'\xd0/\x87C\xccC\xff\xe2') +@@ -371,6 +372,7 @@ class ImportTests(unittest.TestCase): + res = script_helper.assert_python_ok(*args) + self.assertEqual(res.out.strip().decode('utf-8'), expected) + ++ @support.fails_in_fips_mode(ImportError) + def test_find_and_load_checked_pyc(self): + # issue 34056 + with os_helper.temp_cwd(): +diff --git a/Lib/test/test_importlib/source/test_file_loader.py b/Lib/test/test_importlib/source/test_file_loader.py +index 378dcbe..7b223a1 100644 +--- a/Lib/test/test_importlib/source/test_file_loader.py ++++ b/Lib/test/test_importlib/source/test_file_loader.py +@@ -16,6 +16,7 @@ import types + import unittest + import warnings + ++from test import support + from test.support.import_helper import make_legacy_pyc, unload + + from test.test_py_compile import without_source_date_epoch +@@ -238,6 +239,7 @@ class SimpleTest(abc.LoaderTests): + loader.load_module('bad name') + + @util.writes_bytecode_files ++ @support.fails_in_fips_mode(ImportError) + def test_checked_hash_based_pyc(self): + with util.create_modules('_temp') as mapping: + source = mapping['_temp'] +@@ -269,6 +271,7 @@ class SimpleTest(abc.LoaderTests): + ) + + @util.writes_bytecode_files ++ @support.fails_in_fips_mode(ImportError) + def test_overridden_checked_hash_based_pyc(self): + with util.create_modules('_temp') as mapping, \ + unittest.mock.patch('_imp.check_hash_based_pycs', 'never'): +@@ -294,6 +297,7 @@ class SimpleTest(abc.LoaderTests): + self.assertEqual(mod.state, 'old') + + @util.writes_bytecode_files ++ @support.fails_in_fips_mode(ImportError) + def test_unchecked_hash_based_pyc(self): + with util.create_modules('_temp') as mapping: + source = mapping['_temp'] +@@ -324,6 +328,7 @@ class SimpleTest(abc.LoaderTests): + ) + + @util.writes_bytecode_files ++ @support.fails_in_fips_mode(ImportError) + def test_overridden_unchecked_hash_based_pyc(self): + with util.create_modules('_temp') as mapping, \ + unittest.mock.patch('_imp.check_hash_based_pycs', 'always'): +@@ -433,6 +438,7 @@ class BadBytecodeTest: + del_source=del_source) + test('_temp', mapping, bc_path) + ++ @support.fails_in_fips_mode(ImportError) + def _test_partial_hash(self, test, *, del_source=False): + with util.create_modules('_temp') as mapping: + bc_path = self.manipulate_bytecode( +diff --git a/Lib/test/test_py_compile.py b/Lib/test/test_py_compile.py +index 9b420d2..dd6460a 100644 +--- a/Lib/test/test_py_compile.py ++++ b/Lib/test/test_py_compile.py +@@ -143,13 +143,16 @@ class PyCompileTestsBase: + importlib.util.cache_from_source(bad_coding))) + + def test_source_date_epoch(self): ++ import _hashlib + py_compile.compile(self.source_path, self.pyc_path) + self.assertTrue(os.path.exists(self.pyc_path)) + self.assertFalse(os.path.exists(self.cache_path)) + with open(self.pyc_path, 'rb') as fp: + flags = importlib._bootstrap_external._classify_pyc( + fp.read(), 'test', {}) +- if os.environ.get('SOURCE_DATE_EPOCH'): ++ if _hashlib.get_fips_mode(): ++ expected_flags = 0b00 ++ elif os.environ.get('SOURCE_DATE_EPOCH'): + expected_flags = 0b11 + else: + expected_flags = 0b00 +@@ -180,7 +183,8 @@ class PyCompileTestsBase: + # Specifying optimized bytecode should lead to a path reflecting that. + self.assertIn('opt-2', py_compile.compile(self.source_path, optimize=2)) + +- def test_invalidation_mode(self): ++ @support.fails_in_fips_mode(ImportError) ++ def test_invalidation_mode_checked(self): + py_compile.compile( + self.source_path, + invalidation_mode=py_compile.PycInvalidationMode.CHECKED_HASH, +@@ -189,6 +193,9 @@ class PyCompileTestsBase: + flags = importlib._bootstrap_external._classify_pyc( + fp.read(), 'test', {}) + self.assertEqual(flags, 0b11) ++ ++ @support.fails_in_fips_mode(ImportError) ++ def test_invalidation_mode_unchecked(self): + py_compile.compile( + self.source_path, + invalidation_mode=py_compile.PycInvalidationMode.UNCHECKED_HASH, +diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py +index 59a5200..81fadb3 100644 +--- a/Lib/test/test_zipimport.py ++++ b/Lib/test/test_zipimport.py +@@ -190,6 +190,7 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase): + TESTMOD + pyc_ext: (NOW, test_pyc)} + self.doTest(pyc_ext, files, TESTMOD) + ++ @support.fails_in_fips_mode(ImportError) + def testUncheckedHashBasedPyc(self): + source = b"state = 'old'" + source_hash = importlib.util.source_hash(source) +@@ -204,6 +205,7 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase): + self.assertEqual(mod.state, 'old') + self.doTest(None, files, TESTMOD, call=check) + ++ @support.fails_in_fips_mode(ImportError) + @unittest.mock.patch('_imp.check_hash_based_pycs', 'always') + def test_checked_hash_based_change_pyc(self): + source = b"state = 'old'" +diff --git a/Python/import.c b/Python/import.c +index 39144d3..b439059 100644 +--- a/Python/import.c ++++ b/Python/import.c +@@ -2449,6 +2449,26 @@ static PyObject * + _imp_source_hash_impl(PyObject *module, long key, Py_buffer *source) + /*[clinic end generated code: output=edb292448cf399ea input=9aaad1e590089789]*/ + { ++ PyObject *_hashlib = PyImport_ImportModule("_hashlib"); ++ if (_hashlib == NULL) { ++ return NULL; ++ } ++ PyObject *fips_mode_obj = PyObject_CallMethod(_hashlib, "get_fips_mode", NULL); ++ Py_DECREF(_hashlib); ++ if (fips_mode_obj == NULL) { ++ return NULL; ++ } ++ int fips_mode = PyObject_IsTrue(fips_mode_obj); ++ Py_DECREF(fips_mode_obj); ++ if (fips_mode < 0) { ++ return NULL; ++ } ++ if (fips_mode) { ++ PyErr_SetString( ++ PyExc_ImportError, ++ "hash-based PYC validation (siphash24) not available in FIPS mode"); ++ return NULL; ++ }; + union { + uint64_t x; + char data[sizeof(uint64_t)]; +-- +2.43.0 + diff --git a/SOURCES/00371-revert-bpo-1596321-fix-threading-_shutdown-for-the-main-thread-gh-28549-gh-28589.patch b/SOURCES/00371-revert-bpo-1596321-fix-threading-_shutdown-for-the-main-thread-gh-28549-gh-28589.patch new file mode 100644 index 0000000..1d39233 --- /dev/null +++ b/SOURCES/00371-revert-bpo-1596321-fix-threading-_shutdown-for-the-main-thread-gh-28549-gh-28589.patch @@ -0,0 +1,103 @@ +From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Hrn=C4=8Diar?= +Date: Tue, 7 Dec 2021 14:41:59 +0100 +Subject: [PATCH] 00371: Revert "bpo-1596321: Fix threading._shutdown() for the + main thread (GH-28549) (GH-28589)" + +This reverts commit 38c67738c64304928c68d5c2bd78bbb01d979b94. It +introduced regression causing FreeIPA's tests to fail. + +For more info see: +https://bodhi.fedoraproject.org/updates/FEDORA-2021-e152ce5f31 +https://github.com/GrahamDumpleton/mod_wsgi/issues/730 +--- + Lib/test/test_threading.py | 33 --------------------------------- + Lib/threading.py | 25 ++++++++----------------- + 2 files changed, 8 insertions(+), 50 deletions(-) + +diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py +index 9c6561c099..84714c03fe 100644 +--- a/Lib/test/test_threading.py ++++ b/Lib/test/test_threading.py +@@ -956,39 +956,6 @@ def test_debug_deprecation(self): + b'is deprecated and will be removed in Python 3.12') + self.assertIn(msg, err) + +- def test_import_from_another_thread(self): +- # bpo-1596321: If the threading module is first import from a thread +- # different than the main thread, threading._shutdown() must handle +- # this case without logging an error at Python exit. +- code = textwrap.dedent(''' +- import _thread +- import sys +- +- event = _thread.allocate_lock() +- event.acquire() +- +- def import_threading(): +- import threading +- event.release() +- +- if 'threading' in sys.modules: +- raise Exception('threading is already imported') +- +- _thread.start_new_thread(import_threading, ()) +- +- # wait until the threading module is imported +- event.acquire() +- event.release() +- +- if 'threading' not in sys.modules: +- raise Exception('threading is not imported') +- +- # don't wait until the thread completes +- ''') +- rc, out, err = assert_python_ok("-c", code) +- self.assertEqual(out, b'') +- self.assertEqual(err, b'') +- + + class ThreadJoinOnShutdown(BaseTestCase): + +diff --git a/Lib/threading.py b/Lib/threading.py +index 4f72938551..18c10e6489 100644 +--- a/Lib/threading.py ++++ b/Lib/threading.py +@@ -1546,29 +1546,20 @@ def _shutdown(): + + global _SHUTTING_DOWN + _SHUTTING_DOWN = True ++ # Main thread ++ tlock = _main_thread._tstate_lock ++ # The main thread isn't finished yet, so its thread state lock can't have ++ # been released. ++ assert tlock is not None ++ assert tlock.locked() ++ tlock.release() ++ _main_thread._stop() + + # Call registered threading atexit functions before threads are joined. + # Order is reversed, similar to atexit. + for atexit_call in reversed(_threading_atexits): + atexit_call() + +- # Main thread +- if _main_thread.ident == get_ident(): +- tlock = _main_thread._tstate_lock +- # The main thread isn't finished yet, so its thread state lock can't +- # have been released. +- assert tlock is not None +- assert tlock.locked() +- tlock.release() +- _main_thread._stop() +- else: +- # bpo-1596321: _shutdown() must be called in the main thread. +- # If the threading module was not imported by the main thread, +- # _main_thread is the thread which imported the threading module. +- # In this case, ignore _main_thread, similar behavior than for threads +- # spawned by C libraries or using _thread.start_new_thread(). +- pass +- + # Join all non-deamon threads + while True: + with _shutdown_locks_lock: diff --git a/SOURCES/00378-support-expat-2-4-5.patch b/SOURCES/00378-support-expat-2-4-5.patch new file mode 100644 index 0000000..a69e111 --- /dev/null +++ b/SOURCES/00378-support-expat-2-4-5.patch @@ -0,0 +1,47 @@ +From db083095e3bdb93e4f8170d814664c482b1e94da Mon Sep 17 00:00:00 2001 +From: rpm-build +Date: Tue, 14 Jun 2022 06:38:43 +0200 +Subject: [PATCH] Fix test suite for Expat >= 2.4.5 + +--- + Lib/test/test_minidom.py | 17 +++++------------ + 1 file changed, 5 insertions(+), 12 deletions(-) + +diff --git a/Lib/test/test_minidom.py b/Lib/test/test_minidom.py +index 9762025..5f52ed1 100644 +--- a/Lib/test/test_minidom.py ++++ b/Lib/test/test_minidom.py +@@ -1149,14 +1149,10 @@ class MinidomTest(unittest.TestCase): + + # Verify that character decoding errors raise exceptions instead + # of crashing +- if pyexpat.version_info >= (2, 4, 5): +- self.assertRaises(ExpatError, parseString, +- b'') +- self.assertRaises(ExpatError, parseString, +- b'Comment \xe7a va ? Tr\xe8s bien ?') +- else: +- self.assertRaises(UnicodeDecodeError, parseString, +- b'Comment \xe7a va ? Tr\xe8s bien ?') ++ self.assertRaises(ExpatError, parseString, ++ b'') ++ self.assertRaises(ExpatError, parseString, ++ b'Comment \xe7a va ? Tr\xe8s bien ?') + + doc.unlink() + +@@ -1617,10 +1613,7 @@ class MinidomTest(unittest.TestCase): + self.confirm(doc2.namespaceURI == xml.dom.EMPTY_NAMESPACE) + + def testExceptionOnSpacesInXMLNSValue(self): +- if pyexpat.version_info >= (2, 4, 5): +- context = self.assertRaisesRegex(ExpatError, 'syntax error') +- else: +- context = self.assertRaisesRegex(ValueError, 'Unsupported syntax') ++ context = self.assertRaisesRegex(ExpatError, 'syntax error') + + with context: + parseString('') +-- +2.35.3 + diff --git a/SOURCES/00397-tarfile-filter.patch b/SOURCES/00397-tarfile-filter.patch new file mode 100644 index 0000000..3c4ebf4 --- /dev/null +++ b/SOURCES/00397-tarfile-filter.patch @@ -0,0 +1,251 @@ +From 8b70605b594b3831331a9340ba764ff751871612 Mon Sep 17 00:00:00 2001 +From: Petr Viktorin +Date: Mon, 6 Mar 2023 17:24:24 +0100 +Subject: [PATCH] CVE-2007-4559, PEP-706: Add filters for tarfile extraction + (downstream) + +Add and test RHEL-specific ways of configuring the default behavior: environment +variable and config file. +--- + Lib/tarfile.py | 42 +++++++++++++ + Lib/test/test_shutil.py | 3 +- + Lib/test/test_tarfile.py | 128 ++++++++++++++++++++++++++++++++++++++- + 3 files changed, 169 insertions(+), 4 deletions(-) + +diff --git a/Lib/tarfile.py b/Lib/tarfile.py +index 130b5e0..3b7d8d5 100755 +--- a/Lib/tarfile.py ++++ b/Lib/tarfile.py +@@ -72,6 +72,13 @@ __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError", + "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT", + "DEFAULT_FORMAT", "open"] + ++# If true, use the safer (but backwards-incompatible) 'tar' extraction filter, ++# rather than 'fully_trusted', by default. ++# The emitted warning is changed to match. ++_RH_SAFER_DEFAULT = True ++ ++# System-wide configuration file ++_CONFIG_FILENAME = '/etc/python/tarfile.cfg' + + #--------------------------------------------------------- + # tar constants +@@ -2211,6 +2218,41 @@ class TarFile(object): + if filter is None: + filter = self.extraction_filter + if filter is None: ++ name = os.environ.get('PYTHON_TARFILE_EXTRACTION_FILTER') ++ if name is None: ++ try: ++ file = bltn_open(_CONFIG_FILENAME) ++ except FileNotFoundError: ++ pass ++ else: ++ import configparser ++ conf = configparser.ConfigParser( ++ interpolation=None, ++ comment_prefixes=('#', ), ++ ) ++ with file: ++ conf.read_file(file) ++ name = conf.get('tarfile', ++ 'PYTHON_TARFILE_EXTRACTION_FILTER', ++ fallback='') ++ if name: ++ try: ++ filter = _NAMED_FILTERS[name] ++ except KeyError: ++ raise ValueError(f"filter {filter!r} not found") from None ++ self.extraction_filter = filter ++ return filter ++ if _RH_SAFER_DEFAULT: ++ warnings.warn( ++ 'The default behavior of tarfile extraction has been ' ++ + 'changed to disallow common exploits ' ++ + '(including CVE-2007-4559). ' ++ + 'By default, absolute/parent paths are disallowed ' ++ + 'and some mode bits are cleared. ' ++ + 'See https://access.redhat.com/articles/7004769 ' ++ + 'for more details.', ++ RuntimeWarning) ++ return tar_filter + return fully_trusted_filter + if isinstance(filter, str): + raise TypeError( +diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py +index 9bf4145..f247b82 100644 +--- a/Lib/test/test_shutil.py ++++ b/Lib/test/test_shutil.py +@@ -1665,7 +1665,8 @@ class TestArchives(BaseTest, unittest.TestCase): + def check_unpack_tarball(self, format): + self.check_unpack_archive(format, filter='fully_trusted') + self.check_unpack_archive(format, filter='data') +- with warnings_helper.check_no_warnings(self): ++ with warnings_helper.check_warnings( ++ ('.*CVE-2007-4559', RuntimeWarning)): + self.check_unpack_archive(format) + + def test_unpack_archive_tar(self): +diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py +index cdea033..4724285 100644 +--- a/Lib/test/test_tarfile.py ++++ b/Lib/test/test_tarfile.py +@@ -2,7 +2,7 @@ import sys + import os + import io + from hashlib import sha256 +-from contextlib import contextmanager ++from contextlib import contextmanager, ExitStack + from random import Random + import pathlib + import shutil +@@ -2999,7 +2999,11 @@ class NoneInfoExtractTests(ReadTest): + tar = tarfile.open(tarname, mode='r', encoding="iso8859-1") + cls.control_dir = pathlib.Path(TEMPDIR) / "extractall_ctrl" + tar.errorlevel = 0 +- tar.extractall(cls.control_dir, filter=cls.extraction_filter) ++ with ExitStack() as cm: ++ if cls.extraction_filter is None: ++ cm.enter_context(warnings.catch_warnings()) ++ warnings.simplefilter(action="ignore", category=RuntimeWarning) ++ tar.extractall(cls.control_dir, filter=cls.extraction_filter) + tar.close() + cls.control_paths = set( + p.relative_to(cls.control_dir) +@@ -3674,7 +3678,8 @@ class TestExtractionFilters(unittest.TestCase): + """Ensure the default filter does not warn (like in 3.12)""" + with ArchiveMaker() as arc: + arc.add('foo') +- with warnings_helper.check_no_warnings(self): ++ with warnings_helper.check_warnings( ++ ('.*CVE-2007-4559', RuntimeWarning)): + with self.check_context(arc.open(), None): + self.expect_file('foo') + +@@ -3844,6 +3849,123 @@ class TestExtractionFilters(unittest.TestCase): + self.expect_exception(TypeError) # errorlevel is not int + + ++ @contextmanager ++ def rh_config_context(self, config_lines=None): ++ """Set up for testing various ways of overriding the default filter ++ ++ return a triple with: ++ - temporary directory ++ - EnvironmentVarGuard() ++ - a test archive for use with check_* methods below ++ ++ If config_lines is given, write them to the config file. Otherwise ++ the config file is missing. ++ """ ++ tempdir = pathlib.Path(TEMPDIR) / 'tmp' ++ configfile = tempdir / 'tarfile.cfg' ++ with ArchiveMaker() as arc: ++ arc.add('good') ++ arc.add('ugly', symlink_to='/etc/passwd') ++ arc.add('../bad') ++ with ( ++ os_helper.temp_dir(tempdir), ++ support.swap_attr(tarfile, '_CONFIG_FILENAME', str(configfile)), ++ os_helper.EnvironmentVarGuard() as env, ++ arc.open() as tar, ++ ): ++ if config_lines is not None: ++ with configfile.open('w') as f: ++ for line in config_lines: ++ print(line, file=f) ++ yield tempdir, env, tar ++ ++ def check_rh_default_behavior(self, tar, tempdir): ++ """Check RH default: warn and refuse to extract dangerous files.""" ++ with ( ++ warnings_helper.check_warnings( ++ ('.*CVE-2007-4559', RuntimeWarning)), ++ self.assertRaises(tarfile.OutsideDestinationError), ++ ): ++ tar.extractall(tempdir / 'outdir') ++ ++ def check_trusted_default(self, tar, tempdir): ++ """Check 'fully_trusted' is configured as the default filter.""" ++ with ( ++ warnings_helper.check_no_warnings(self), ++ ): ++ tar.extractall(tempdir / 'outdir') ++ self.assertTrue((tempdir / 'outdir/good').exists()) ++ self.assertEqual((tempdir / 'outdir/ugly').readlink(), ++ pathlib.Path('/etc/passwd')) ++ self.assertTrue((tempdir / 'bad').exists()) ++ ++ def test_rh_default_no_conf(self): ++ with self.rh_config_context() as (tempdir, env, tar): ++ self.check_rh_default_behavior(tar, tempdir) ++ ++ def test_rh_default_from_file(self): ++ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=fully_trusted'] ++ with self.rh_config_context(lines) as (tempdir, env, tar): ++ self.check_trusted_default(tar, tempdir) ++ ++ def test_rh_empty_config_file(self): ++ """Empty config file -> default behavior""" ++ lines = [] ++ with self.rh_config_context(lines) as (tempdir, env, tar): ++ self.check_rh_default_behavior(tar, tempdir) ++ ++ def test_empty_config_section(self): ++ """Empty section in config file -> default behavior""" ++ lines = ['[tarfile]'] ++ with self.rh_config_context(lines) as (tempdir, env, tar): ++ self.check_rh_default_behavior(tar, tempdir) ++ ++ def test_rh_default_empty_config_option(self): ++ """Empty option value in config file -> default behavior""" ++ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER='] ++ with self.rh_config_context(lines) as (tempdir, env, tar): ++ self.check_rh_default_behavior(tar, tempdir) ++ ++ def test_bad_config_option(self): ++ """Bad option value in config file -> ValueError""" ++ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=unknown!'] ++ with self.rh_config_context(lines) as (tempdir, env, tar): ++ with self.assertRaises(ValueError): ++ tar.extractall(tempdir / 'outdir') ++ ++ def test_default_from_envvar(self): ++ with self.rh_config_context() as (tempdir, env, tar): ++ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'fully_trusted' ++ self.check_trusted_default(tar, tempdir) ++ ++ def test_empty_envvar(self): ++ """Empty env variable -> default behavior""" ++ with self.rh_config_context() as (tempdir, env, tar): ++ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = '' ++ self.check_rh_default_behavior(tar, tempdir) ++ ++ def test_bad_envvar(self): ++ with self.rh_config_context() as (tempdir, env, tar): ++ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'unknown!' ++ with self.assertRaises(ValueError): ++ tar.extractall(tempdir / 'outdir') ++ ++ def test_envvar_overrides_file(self): ++ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=data'] ++ with self.rh_config_context(lines) as (tempdir, env, tar): ++ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'fully_trusted' ++ self.check_trusted_default(tar, tempdir) ++ ++ def test_monkeypatch_overrides_envvar(self): ++ with self.rh_config_context(None) as (tempdir, env, tar): ++ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'data' ++ with support.swap_attr( ++ tarfile.TarFile, 'extraction_filter', ++ staticmethod(tarfile.fully_trusted_filter) ++ ): ++ self.check_trusted_default(tar, tempdir) ++ ++ + def setUpModule(): + os_helper.unlink(TEMPDIR) + os.makedirs(TEMPDIR) +-- +2.41.0 + diff --git a/SOURCES/00415-cve-2023-27043-gh-102988-reject-malformed-addresses-in-email-parseaddr-111116.patch b/SOURCES/00415-cve-2023-27043-gh-102988-reject-malformed-addresses-in-email-parseaddr-111116.patch new file mode 100644 index 0000000..73bf9b7 --- /dev/null +++ b/SOURCES/00415-cve-2023-27043-gh-102988-reject-malformed-addresses-in-email-parseaddr-111116.patch @@ -0,0 +1,755 @@ +From d8b0fafb202bf884135a3f7f0ce0b086217a2da2 Mon Sep 17 00:00:00 2001 +From: Victor Stinner +Date: Fri, 15 Dec 2023 16:10:40 +0100 +Subject: [PATCH 1/2] 00415: [CVE-2023-27043] gh-102988: Reject malformed + addresses in email.parseaddr() (#111116) + +Detect email address parsing errors and return empty tuple to +indicate the parsing error (old API). Add an optional 'strict' +parameter to getaddresses() and parseaddr() functions. Patch by +Thomas Dwyer. + +Co-Authored-By: Thomas Dwyer +--- + Doc/library/email.utils.rst | 19 +- + Lib/email/utils.py | 151 ++++++++++++- + Lib/test/test_email/test_email.py | 204 +++++++++++++++++- + ...-10-20-15-28-08.gh-issue-102988.dStNO7.rst | 8 + + 4 files changed, 361 insertions(+), 21 deletions(-) + create mode 100644 Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst + +diff --git a/Doc/library/email.utils.rst b/Doc/library/email.utils.rst +index 0e266b6..6723dc4 100644 +--- a/Doc/library/email.utils.rst ++++ b/Doc/library/email.utils.rst +@@ -60,13 +60,18 @@ of the new API. + begins with angle brackets, they are stripped off. + + +-.. function:: parseaddr(address) ++.. function:: parseaddr(address, *, strict=True) + + Parse address -- which should be the value of some address-containing field such + as :mailheader:`To` or :mailheader:`Cc` -- into its constituent *realname* and + *email address* parts. Returns a tuple of that information, unless the parse + fails, in which case a 2-tuple of ``('', '')`` is returned. + ++ If *strict* is true, use a strict parser which rejects malformed inputs. ++ ++ .. versionchanged:: 3.13 ++ Add *strict* optional parameter and reject malformed inputs by default. ++ + + .. function:: formataddr(pair, charset='utf-8') + +@@ -84,12 +89,15 @@ of the new API. + Added the *charset* option. + + +-.. function:: getaddresses(fieldvalues) ++.. function:: getaddresses(fieldvalues, *, strict=True) + + This method returns a list of 2-tuples of the form returned by ``parseaddr()``. + *fieldvalues* is a sequence of header field values as might be returned by +- :meth:`Message.get_all `. Here's a simple +- example that gets all the recipients of a message:: ++ :meth:`Message.get_all `. ++ ++ If *strict* is true, use a strict parser which rejects malformed inputs. ++ ++ Here's a simple example that gets all the recipients of a message:: + + from email.utils import getaddresses + +@@ -99,6 +107,9 @@ of the new API. + resent_ccs = msg.get_all('resent-cc', []) + all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs) + ++ .. versionchanged:: 3.13 ++ Add *strict* optional parameter and reject malformed inputs by default. ++ + + .. function:: parsedate(date) + +diff --git a/Lib/email/utils.py b/Lib/email/utils.py +index cfdfeb3..9522341 100644 +--- a/Lib/email/utils.py ++++ b/Lib/email/utils.py +@@ -48,6 +48,7 @@ TICK = "'" + specialsre = re.compile(r'[][\\()<>@,:;".]') + escapesre = re.compile(r'[\\"]') + ++ + def _has_surrogates(s): + """Return True if s contains surrogate-escaped binary data.""" + # This check is based on the fact that unless there are surrogates, utf8 +@@ -106,12 +107,127 @@ def formataddr(pair, charset='utf-8'): + return address + + ++def _iter_escaped_chars(addr): ++ pos = 0 ++ escape = False ++ for pos, ch in enumerate(addr): ++ if escape: ++ yield (pos, '\\' + ch) ++ escape = False ++ elif ch == '\\': ++ escape = True ++ else: ++ yield (pos, ch) ++ if escape: ++ yield (pos, '\\') ++ ++ ++def _strip_quoted_realnames(addr): ++ """Strip real names between quotes.""" ++ if '"' not in addr: ++ # Fast path ++ return addr ++ ++ start = 0 ++ open_pos = None ++ result = [] ++ for pos, ch in _iter_escaped_chars(addr): ++ if ch == '"': ++ if open_pos is None: ++ open_pos = pos ++ else: ++ if start != open_pos: ++ result.append(addr[start:open_pos]) ++ start = pos + 1 ++ open_pos = None ++ ++ if start < len(addr): ++ result.append(addr[start:]) ++ ++ return ''.join(result) + +-def getaddresses(fieldvalues): +- """Return a list of (REALNAME, EMAIL) for each fieldvalue.""" +- all = COMMASPACE.join(str(v) for v in fieldvalues) +- a = _AddressList(all) +- return a.addresslist ++ ++supports_strict_parsing = True ++ ++def getaddresses(fieldvalues, *, strict=True): ++ """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue. ++ ++ When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in ++ its place. ++ ++ If strict is true, use a strict parser which rejects malformed inputs. ++ """ ++ ++ # If strict is true, if the resulting list of parsed addresses is greater ++ # than the number of fieldvalues in the input list, a parsing error has ++ # occurred and consequently a list containing a single empty 2-tuple [('', ++ # '')] is returned in its place. This is done to avoid invalid output. ++ # ++ # Malformed input: getaddresses(['alice@example.com ']) ++ # Invalid output: [('', 'alice@example.com'), ('', 'bob@example.com')] ++ # Safe output: [('', '')] ++ ++ if not strict: ++ all = COMMASPACE.join(str(v) for v in fieldvalues) ++ a = _AddressList(all) ++ return a.addresslist ++ ++ fieldvalues = [str(v) for v in fieldvalues] ++ fieldvalues = _pre_parse_validation(fieldvalues) ++ addr = COMMASPACE.join(fieldvalues) ++ a = _AddressList(addr) ++ result = _post_parse_validation(a.addresslist) ++ ++ # Treat output as invalid if the number of addresses is not equal to the ++ # expected number of addresses. ++ n = 0 ++ for v in fieldvalues: ++ # When a comma is used in the Real Name part it is not a deliminator. ++ # So strip those out before counting the commas. ++ v = _strip_quoted_realnames(v) ++ # Expected number of addresses: 1 + number of commas ++ n += 1 + v.count(',') ++ if len(result) != n: ++ return [('', '')] ++ ++ return result ++ ++ ++def _check_parenthesis(addr): ++ # Ignore parenthesis in quoted real names. ++ addr = _strip_quoted_realnames(addr) ++ ++ opens = 0 ++ for pos, ch in _iter_escaped_chars(addr): ++ if ch == '(': ++ opens += 1 ++ elif ch == ')': ++ opens -= 1 ++ if opens < 0: ++ return False ++ return (opens == 0) ++ ++ ++def _pre_parse_validation(email_header_fields): ++ accepted_values = [] ++ for v in email_header_fields: ++ if not _check_parenthesis(v): ++ v = "('', '')" ++ accepted_values.append(v) ++ ++ return accepted_values ++ ++ ++def _post_parse_validation(parsed_email_header_tuples): ++ accepted_values = [] ++ # The parser would have parsed a correctly formatted domain-literal ++ # The existence of an [ after parsing indicates a parsing failure ++ for v in parsed_email_header_tuples: ++ if '[' in v[1]: ++ v = ('', '') ++ accepted_values.append(v) ++ ++ return accepted_values + + + def _format_timetuple_and_zone(timetuple, zone): +@@ -205,16 +321,33 @@ def parsedate_to_datetime(data): + tzinfo=datetime.timezone(datetime.timedelta(seconds=tz))) + + +-def parseaddr(addr): ++def parseaddr(addr, *, strict=True): + """ + Parse addr into its constituent realname and email address parts. + + Return a tuple of realname and email address, unless the parse fails, in + which case return a 2-tuple of ('', ''). ++ ++ If strict is True, use a strict parser which rejects malformed inputs. + """ +- addrs = _AddressList(addr).addresslist +- if not addrs: +- return '', '' ++ if not strict: ++ addrs = _AddressList(addr).addresslist ++ if not addrs: ++ return ('', '') ++ return addrs[0] ++ ++ if isinstance(addr, list): ++ addr = addr[0] ++ ++ if not isinstance(addr, str): ++ return ('', '') ++ ++ addr = _pre_parse_validation([addr])[0] ++ addrs = _post_parse_validation(_AddressList(addr).addresslist) ++ ++ if not addrs or len(addrs) > 1: ++ return ('', '') ++ + return addrs[0] + + +diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py +index 677f209..20b6779 100644 +--- a/Lib/test/test_email/test_email.py ++++ b/Lib/test/test_email/test_email.py +@@ -17,6 +17,7 @@ from unittest.mock import patch + + import email + import email.policy ++import email.utils + + from email.charset import Charset + from email.generator import Generator, DecodedGenerator, BytesGenerator +@@ -3321,15 +3322,154 @@ Foo + [('Al Person', 'aperson@dom.ain'), + ('Bud Person', 'bperson@dom.ain')]) + ++ def test_getaddresses_comma_in_name(self): ++ """GH-106669 regression test.""" ++ self.assertEqual( ++ utils.getaddresses( ++ [ ++ '"Bud, Person" ', ++ 'aperson@dom.ain (Al Person)', ++ '"Mariusz Felisiak" ', ++ ] ++ ), ++ [ ++ ('Bud, Person', 'bperson@dom.ain'), ++ ('Al Person', 'aperson@dom.ain'), ++ ('Mariusz Felisiak', 'to@example.com'), ++ ], ++ ) ++ ++ def test_parsing_errors(self): ++ """Test for parsing errors from CVE-2023-27043 and CVE-2019-16056""" ++ alice = 'alice@example.org' ++ bob = 'bob@example.com' ++ empty = ('', '') ++ ++ # Test utils.getaddresses() and utils.parseaddr() on malformed email ++ # addresses: default behavior (strict=True) rejects malformed address, ++ # and strict=False which tolerates malformed address. ++ for invalid_separator, expected_non_strict in ( ++ ('(', [(f'<{bob}>', alice)]), ++ (')', [('', alice), empty, ('', bob)]), ++ ('<', [('', alice), empty, ('', bob), empty]), ++ ('>', [('', alice), empty, ('', bob)]), ++ ('[', [('', f'{alice}[<{bob}>]')]), ++ (']', [('', alice), empty, ('', bob)]), ++ ('@', [empty, empty, ('', bob)]), ++ (';', [('', alice), empty, ('', bob)]), ++ (':', [('', alice), ('', bob)]), ++ ('.', [('', alice + '.'), ('', bob)]), ++ ('"', [('', alice), ('', f'<{bob}>')]), ++ ): ++ address = f'{alice}{invalid_separator}<{bob}>' ++ with self.subTest(address=address): ++ self.assertEqual(utils.getaddresses([address]), ++ [empty]) ++ self.assertEqual(utils.getaddresses([address], strict=False), ++ expected_non_strict) ++ ++ self.assertEqual(utils.parseaddr([address]), ++ empty) ++ self.assertEqual(utils.parseaddr([address], strict=False), ++ ('', address)) ++ ++ # Comma (',') is treated differently depending on strict parameter. ++ # Comma without quotes. ++ address = f'{alice},<{bob}>' ++ self.assertEqual(utils.getaddresses([address]), ++ [('', alice), ('', bob)]) ++ self.assertEqual(utils.getaddresses([address], strict=False), ++ [('', alice), ('', bob)]) ++ self.assertEqual(utils.parseaddr([address]), ++ empty) ++ self.assertEqual(utils.parseaddr([address], strict=False), ++ ('', address)) ++ ++ # Real name between quotes containing comma. ++ address = '"Alice, alice@example.org" ' ++ expected_strict = ('Alice, alice@example.org', 'bob@example.com') ++ self.assertEqual(utils.getaddresses([address]), [expected_strict]) ++ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict]) ++ self.assertEqual(utils.parseaddr([address]), expected_strict) ++ self.assertEqual(utils.parseaddr([address], strict=False), ++ ('', address)) ++ ++ # Valid parenthesis in comments. ++ address = 'alice@example.org (Alice)' ++ expected_strict = ('Alice', 'alice@example.org') ++ self.assertEqual(utils.getaddresses([address]), [expected_strict]) ++ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict]) ++ self.assertEqual(utils.parseaddr([address]), expected_strict) ++ self.assertEqual(utils.parseaddr([address], strict=False), ++ ('', address)) ++ ++ # Invalid parenthesis in comments. ++ address = 'alice@example.org )Alice(' ++ self.assertEqual(utils.getaddresses([address]), [empty]) ++ self.assertEqual(utils.getaddresses([address], strict=False), ++ [('', 'alice@example.org'), ('', ''), ('', 'Alice')]) ++ self.assertEqual(utils.parseaddr([address]), empty) ++ self.assertEqual(utils.parseaddr([address], strict=False), ++ ('', address)) ++ ++ # Two addresses with quotes separated by comma. ++ address = '"Jane Doe" , "John Doe" ' ++ self.assertEqual(utils.getaddresses([address]), ++ [('Jane Doe', 'jane@example.net'), ++ ('John Doe', 'john@example.net')]) ++ self.assertEqual(utils.getaddresses([address], strict=False), ++ [('Jane Doe', 'jane@example.net'), ++ ('John Doe', 'john@example.net')]) ++ self.assertEqual(utils.parseaddr([address]), empty) ++ self.assertEqual(utils.parseaddr([address], strict=False), ++ ('', address)) ++ ++ # Test email.utils.supports_strict_parsing attribute ++ self.assertEqual(email.utils.supports_strict_parsing, True) ++ + def test_getaddresses_nasty(self): +- eq = self.assertEqual +- eq(utils.getaddresses(['foo: ;']), [('', '')]) +- eq(utils.getaddresses( +- ['[]*-- =~$']), +- [('', ''), ('', ''), ('', '*--')]) +- eq(utils.getaddresses( +- ['foo: ;', '"Jason R. Mastaler" ']), +- [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]) ++ for addresses, expected in ( ++ (['"Sürname, Firstname" '], ++ [('Sürname, Firstname', 'to@example.com')]), ++ ++ (['foo: ;'], ++ [('', '')]), ++ ++ (['foo: ;', '"Jason R. Mastaler" '], ++ [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]), ++ ++ ([r'Pete(A nice \) chap) '], ++ [('Pete (A nice ) chap his account his host)', 'pete@silly.test')]), ++ ++ (['(Empty list)(start)Undisclosed recipients :(nobody(I know))'], ++ [('', '')]), ++ ++ (['Mary <@machine.tld:mary@example.net>, , jdoe@test . example'], ++ [('Mary', 'mary@example.net'), ('', ''), ('', 'jdoe@test.example')]), ++ ++ (['John Doe '], ++ [('John Doe (comment)', 'jdoe@machine.example')]), ++ ++ (['"Mary Smith: Personal Account" '], ++ [('Mary Smith: Personal Account', 'smith@home.example')]), ++ ++ (['Undisclosed recipients:;'], ++ [('', '')]), ++ ++ ([r', "Giant; \"Big\" Box" '], ++ [('', 'boss@nil.test'), ('Giant; "Big" Box', 'bob@example.net')]), ++ ): ++ with self.subTest(addresses=addresses): ++ self.assertEqual(utils.getaddresses(addresses), ++ expected) ++ self.assertEqual(utils.getaddresses(addresses, strict=False), ++ expected) ++ ++ addresses = ['[]*-- =~$'] ++ self.assertEqual(utils.getaddresses(addresses), ++ [('', '')]) ++ self.assertEqual(utils.getaddresses(addresses, strict=False), ++ [('', ''), ('', ''), ('', '*--')]) + + def test_getaddresses_embedded_comment(self): + """Test proper handling of a nested comment""" +@@ -3520,6 +3660,54 @@ multipart/report + m = cls(*constructor, policy=email.policy.default) + self.assertIs(m.policy, email.policy.default) + ++ def test_iter_escaped_chars(self): ++ self.assertEqual(list(utils._iter_escaped_chars(r'a\\b\"c\\"d')), ++ [(0, 'a'), ++ (2, '\\\\'), ++ (3, 'b'), ++ (5, '\\"'), ++ (6, 'c'), ++ (8, '\\\\'), ++ (9, '"'), ++ (10, 'd')]) ++ self.assertEqual(list(utils._iter_escaped_chars('a\\')), ++ [(0, 'a'), (1, '\\')]) ++ ++ def test_strip_quoted_realnames(self): ++ def check(addr, expected): ++ self.assertEqual(utils._strip_quoted_realnames(addr), expected) ++ ++ check('"Jane Doe" , "John Doe" ', ++ ' , ') ++ check(r'"Jane \"Doe\"." ', ++ ' ') ++ ++ # special cases ++ check(r'before"name"after', 'beforeafter') ++ check(r'before"name"', 'before') ++ check(r'b"name"', 'b') # single char ++ check(r'"name"after', 'after') ++ check(r'"name"a', 'a') # single char ++ check(r'"name"', '') ++ ++ # no change ++ for addr in ( ++ 'Jane Doe , John Doe ', ++ 'lone " quote', ++ ): ++ self.assertEqual(utils._strip_quoted_realnames(addr), addr) ++ ++ ++ def test_check_parenthesis(self): ++ addr = 'alice@example.net' ++ self.assertTrue(utils._check_parenthesis(f'{addr} (Alice)')) ++ self.assertFalse(utils._check_parenthesis(f'{addr} )Alice(')) ++ self.assertFalse(utils._check_parenthesis(f'{addr} (Alice))')) ++ self.assertFalse(utils._check_parenthesis(f'{addr} ((Alice)')) ++ ++ # Ignore real name between quotes ++ self.assertTrue(utils._check_parenthesis(f'")Alice((" {addr}')) ++ + + # Test the iterator/generators + class TestIterators(TestEmailBase): +diff --git a/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst b/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst +new file mode 100644 +index 0000000..3d0e9e4 +--- /dev/null ++++ b/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst +@@ -0,0 +1,8 @@ ++:func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now ++return ``('', '')`` 2-tuples in more situations where invalid email ++addresses are encountered instead of potentially inaccurate values. Add ++optional *strict* parameter to these two functions: use ``strict=False`` to ++get the old behavior, accept malformed inputs. ++``getattr(email.utils, 'supports_strict_parsing', False)`` can be use to check ++if the *strict* paramater is available. Patch by Thomas Dwyer and Victor ++Stinner to improve the CVE-2023-27043 fix. +-- +2.43.0 + + +From 6c34f5b95da90bd494e29776c0e807af44689fae Mon Sep 17 00:00:00 2001 +From: Lumir Balhar +Date: Wed, 10 Jan 2024 08:53:53 +0100 +Subject: [PATCH 2/2] Make it possible to disable strict parsing in email + module + +--- + Doc/library/email.utils.rst | 26 +++++++++++ + Lib/email/utils.py | 54 +++++++++++++++++++++- + Lib/test/test_email/test_email.py | 74 ++++++++++++++++++++++++++++++- + 3 files changed, 150 insertions(+), 4 deletions(-) + +diff --git a/Doc/library/email.utils.rst b/Doc/library/email.utils.rst +index 6723dc4..c89602d 100644 +--- a/Doc/library/email.utils.rst ++++ b/Doc/library/email.utils.rst +@@ -69,6 +69,19 @@ of the new API. + + If *strict* is true, use a strict parser which rejects malformed inputs. + ++ The default setting for *strict* is set to ``True``, but you can override ++ it by setting the environment variable ``PYTHON_EMAIL_DISABLE_STRICT_ADDR_PARSING`` ++ to non-empty string. ++ ++ Additionally, you can permanently set the default value for *strict* to ++ ``False`` by creating the configuration file ``/etc/python/email.cfg`` ++ with the following content: ++ ++ .. code-block:: ini ++ ++ [email_addr_parsing] ++ PYTHON_EMAIL_DISABLE_STRICT_ADDR_PARSING = true ++ + .. versionchanged:: 3.13 + Add *strict* optional parameter and reject malformed inputs by default. + +@@ -97,6 +110,19 @@ of the new API. + + If *strict* is true, use a strict parser which rejects malformed inputs. + ++ The default setting for *strict* is set to ``True``, but you can override ++ it by setting the environment variable ``PYTHON_EMAIL_DISABLE_STRICT_ADDR_PARSING`` ++ to non-empty string. ++ ++ Additionally, you can permanently set the default value for *strict* to ++ ``False`` by creating the configuration file ``/etc/python/email.cfg`` ++ with the following content: ++ ++ .. code-block:: ini ++ ++ [email_addr_parsing] ++ PYTHON_EMAIL_DISABLE_STRICT_ADDR_PARSING = true ++ + Here's a simple example that gets all the recipients of a message:: + + from email.utils import getaddresses +diff --git a/Lib/email/utils.py b/Lib/email/utils.py +index 9522341..2e30e09 100644 +--- a/Lib/email/utils.py ++++ b/Lib/email/utils.py +@@ -48,6 +48,46 @@ TICK = "'" + specialsre = re.compile(r'[][\\()<>@,:;".]') + escapesre = re.compile(r'[\\"]') + ++_EMAIL_CONFIG_FILE = "/etc/python/email.cfg" ++_cached_strict_addr_parsing = None ++ ++ ++def _use_strict_email_parsing(): ++ """"Cache implementation for _cached_strict_addr_parsing""" ++ global _cached_strict_addr_parsing ++ if _cached_strict_addr_parsing is None: ++ _cached_strict_addr_parsing = _use_strict_email_parsing_impl() ++ return _cached_strict_addr_parsing ++ ++ ++def _use_strict_email_parsing_impl(): ++ """Returns True if strict email parsing is not disabled by ++ config file or env variable. ++ """ ++ disabled = bool(os.environ.get("PYTHON_EMAIL_DISABLE_STRICT_ADDR_PARSING")) ++ if disabled: ++ return False ++ ++ try: ++ file = open(_EMAIL_CONFIG_FILE) ++ except FileNotFoundError: ++ pass ++ else: ++ with file: ++ import configparser ++ config = configparser.ConfigParser( ++ interpolation=None, ++ comment_prefixes=('#', ), ++ ++ ) ++ config.read_file(file) ++ disabled = config.getboolean('email_addr_parsing', "PYTHON_EMAIL_DISABLE_STRICT_ADDR_PARSING", fallback=None) ++ ++ if disabled: ++ return False ++ ++ return True ++ + + def _has_surrogates(s): + """Return True if s contains surrogate-escaped binary data.""" +@@ -149,7 +189,7 @@ def _strip_quoted_realnames(addr): + + supports_strict_parsing = True + +-def getaddresses(fieldvalues, *, strict=True): ++def getaddresses(fieldvalues, *, strict=None): + """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue. + + When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in +@@ -158,6 +198,11 @@ def getaddresses(fieldvalues, *, strict=True): + If strict is true, use a strict parser which rejects malformed inputs. + """ + ++ # If default is used, it's True unless disabled ++ # by env variable or config file. ++ if strict == None: ++ strict = _use_strict_email_parsing() ++ + # If strict is true, if the resulting list of parsed addresses is greater + # than the number of fieldvalues in the input list, a parsing error has + # occurred and consequently a list containing a single empty 2-tuple [('', +@@ -321,7 +366,7 @@ def parsedate_to_datetime(data): + tzinfo=datetime.timezone(datetime.timedelta(seconds=tz))) + + +-def parseaddr(addr, *, strict=True): ++def parseaddr(addr, *, strict=None): + """ + Parse addr into its constituent realname and email address parts. + +@@ -330,6 +375,11 @@ def parseaddr(addr, *, strict=True): + + If strict is True, use a strict parser which rejects malformed inputs. + """ ++ # If default is used, it's True unless disabled ++ # by env variable or config file. ++ if strict == None: ++ strict = _use_strict_email_parsing() ++ + if not strict: + addrs = _AddressList(addr).addresslist + if not addrs: +diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py +index 20b6779..d7d99f0 100644 +--- a/Lib/test/test_email/test_email.py ++++ b/Lib/test/test_email/test_email.py +@@ -8,6 +8,9 @@ import base64 + import unittest + import textwrap + import warnings ++import contextlib ++import tempfile ++import os + + from io import StringIO, BytesIO + from itertools import chain +@@ -41,8 +44,8 @@ from email import quoprimime + from email import utils + + from test import support +-from test.support import threading_helper +-from test.support.os_helper import unlink ++from test.support import threading_helper, swap_attr ++from test.support.os_helper import unlink, EnvironmentVarGuard + from test.test_email import openfile, TestEmailBase + + # These imports are documented to work, but we are testing them using a +@@ -3427,6 +3430,73 @@ Foo + # Test email.utils.supports_strict_parsing attribute + self.assertEqual(email.utils.supports_strict_parsing, True) + ++ def test_parsing_errors_strict_set_via_env_var(self): ++ address = 'alice@example.org )Alice(' ++ empty = ('', '') ++ ++ # Reset cached default value to make the function ++ # reload the config file provided below. ++ utils._cached_strict_addr_parsing = None ++ ++ # Strict disabled via env variable, old behavior expected ++ with EnvironmentVarGuard() as environ: ++ environ["PYTHON_EMAIL_DISABLE_STRICT_ADDR_PARSING"] = "1" ++ ++ self.assertEqual(utils.getaddresses([address]), ++ [('', 'alice@example.org'), ('', ''), ('', 'Alice')]) ++ self.assertEqual(utils.parseaddr([address]), ('', address)) ++ ++ # Clear cache again ++ utils._cached_strict_addr_parsing = None ++ ++ # Default strict=True, empty result expected ++ self.assertEqual(utils.getaddresses([address]), [empty]) ++ self.assertEqual(utils.parseaddr([address]), empty) ++ ++ # Clear cache again ++ utils._cached_strict_addr_parsing = None ++ ++ # Empty string in env variable = strict parsing enabled (default) ++ with EnvironmentVarGuard() as environ: ++ environ["PYTHON_EMAIL_DISABLE_STRICT_ADDR_PARSING"] = "" ++ ++ # Default strict=True, empty result expected ++ self.assertEqual(utils.getaddresses([address]), [empty]) ++ self.assertEqual(utils.parseaddr([address]), empty) ++ ++ @contextlib.contextmanager ++ def _email_strict_parsing_conf(self): ++ """Context for the given email strict parsing configured in config file""" ++ with tempfile.TemporaryDirectory() as tmpdirname: ++ filename = os.path.join(tmpdirname, 'conf.cfg') ++ with swap_attr(utils, "_EMAIL_CONFIG_FILE", filename): ++ with open(filename, 'w') as file: ++ file.write('[email_addr_parsing]\n') ++ file.write('PYTHON_EMAIL_DISABLE_STRICT_ADDR_PARSING = true') ++ utils._EMAIL_CONFIG_FILE = filename ++ yield ++ ++ def test_parsing_errors_strict_disabled_via_config_file(self): ++ address = 'alice@example.org )Alice(' ++ empty = ('', '') ++ ++ # Reset cached default value to make the function ++ # reload the config file provided below. ++ utils._cached_strict_addr_parsing = None ++ ++ # Strict disabled via config file, old results expected ++ with self._email_strict_parsing_conf(): ++ self.assertEqual(utils.getaddresses([address]), ++ [('', 'alice@example.org'), ('', ''), ('', 'Alice')]) ++ self.assertEqual(utils.parseaddr([address]), ('', address)) ++ ++ # Clear cache again ++ utils._cached_strict_addr_parsing = None ++ ++ # Default strict=True, empty result expected ++ self.assertEqual(utils.getaddresses([address]), [empty]) ++ self.assertEqual(utils.parseaddr([address]), empty) ++ + def test_getaddresses_nasty(self): + for addresses, expected in ( + (['"Sürname, Firstname" '], +-- +2.43.0 + diff --git a/SOURCES/Python-3.11.7.tar.xz.asc b/SOURCES/Python-3.11.7.tar.xz.asc new file mode 100644 index 0000000..5d3dcbe --- /dev/null +++ b/SOURCES/Python-3.11.7.tar.xz.asc @@ -0,0 +1,16 @@ +-----BEGIN PGP SIGNATURE----- + +iQIzBAABCAAdFiEEz9yiRbEEPPKl+Xhl/+h0BBaL2EcFAmVuFigACgkQ/+h0BBaL +2EeHPg/+LU5xs2ZDrQogDcH+A1v8RyursiggypdM5hXTrsFsTCIk4iekcI9xkhG1 +ltNX4UuCe5PUEbTgtaWP0ncXARrUnPCoQaQ1sHVDTYoHegancsk+sXZc1JM7qr0p +Y4Ig6mKjuHFMXCInQSI2GaH4t5r4Z1jGk/PGrecIHOPJgqfA/6Z3TBF5N+y3jEvS +2QazMB298q4RDhh9m3REe8LwFPHDlfw9eRohv0MB8xygg9KtxhLZrN7gLBQZvKGD +ihNw6EgJj5OZ0dvwKCCXnlZuwknuJW7vAOPHhYeenPdVdYCGoRSyN7JdD07L+5AG +O14l2rqZrz5Eu28by+kAUrcPYAfAXekw1PmtT3HSd9U/nqnUiTkkJcjyGG/e3cjJ +sUDKMNCSBq0G7j5DB3bB6VHkZjVuz+T+iR5QdfJ4kI2pYSuE/rUj1rhkUXApYsHl +7Wff0QbOW6QT1wCtQcMpJSzkTDVJVYxiqrko/ihlOhphDHYLdOIGOrxWAUwc06x/ +BhJD6tM1kEVZvifoJp1OsNwDzZ/Ku6CUs05E1vWxdeNVeANyKAgCZ5hOVmhnv866 +11zfgo/znRsMzMIyJuy0bhO0C6omVLzzfhipAbZM2jDorn37xxV0v/I0pceNtLrp +YR7Tjs7+Ihe6/oItjW53j9T7ANdgQ1RVDg98lKlPFNL+hxfctwY= +=0Pkd +-----END PGP SIGNATURE----- diff --git a/SOURCES/check-pyc-timestamps.py b/SOURCES/check-pyc-timestamps.py new file mode 100644 index 0000000..d619dae --- /dev/null +++ b/SOURCES/check-pyc-timestamps.py @@ -0,0 +1,56 @@ +"""Checks if all *.pyc files have later mtime than their *.py files.""" + +import os +import sys +from importlib.util import cache_from_source +from pathlib import Path + + +RPM_BUILD_ROOT = os.environ.get('RPM_BUILD_ROOT', '') + +# ...cpython-3X.pyc +# ...cpython-3X.opt-1.pyc +# ...cpython-3X.opt-2.pyc +LEVELS = (None, 1, 2) + +# list of globs of test and other files that we expect not to have bytecode +not_compiled = [ + '/usr/bin/*', + '/usr/lib/rpm/redhat/*', + '*/test/*/bad_coding.py', + '*/test/*/bad_coding2.py', + '*/test/*/badsyntax_*.py', + '*/lib2to3/tests/data/bom.py', + '*/lib2to3/tests/data/crlf.py', + '*/lib2to3/tests/data/different_encoding.py', + '*/lib2to3/tests/data/false_encoding.py', + '*/lib2to3/tests/data/py2_test_grammar.py', + '*.debug-gdb.py', +] + + +def bytecode_expected(path): + path = Path(path[len(RPM_BUILD_ROOT):]) + for glob in not_compiled: + if path.match(glob): + return False + return True + + +failed = 0 +compiled = (path for path in sys.argv[1:] if bytecode_expected(path)) +for path in compiled: + to_check = (cache_from_source(path, optimization=opt) for opt in LEVELS) + f_mtime = os.path.getmtime(path) + for pyc in to_check: + c_mtime = os.path.getmtime(pyc) + if c_mtime < f_mtime: + print('Failed bytecompilation timestamps check: ' + f'Bytecode file {pyc} is older than source file {path}', + file=sys.stderr) + failed += 1 + +if failed: + print(f'\n{failed} files failed bytecompilation timestamps check.', + file=sys.stderr) + sys.exit(1) diff --git a/SOURCES/idle3.appdata.xml b/SOURCES/idle3.appdata.xml new file mode 100644 index 0000000..94f87a2 --- /dev/null +++ b/SOURCES/idle3.appdata.xml @@ -0,0 +1,35 @@ + + + + + idle3.desktop + IDLE3 + CC0 + Python-2.0 + Python 3 Integrated Development and Learning Environment + +

+ IDLE is Python’s Integrated Development and Learning Environment. + The GUI is uniform between Windows, Unix, and Mac OS X. + IDLE provides an easy way to start writing, running, and debugging + Python code. +

+

+ IDLE is written in pure Python, and uses the tkinter GUI toolkit. + It provides: +

+
    +
  • a Python shell window (interactive interpreter) with colorizing of code input, output, and error messages,
  • +
  • a multi-window text editor with multiple undo, Python colorizing, smart indent, call tips, auto completion, and other features,
  • +
  • search within any window, replace within editor windows, and search through multiple files (grep),
  • +
  • a debugger with persistent breakpoints, stepping, and viewing of global and local namespaces.
  • +
+
+ https://docs.python.org/3/library/idle.html + + http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-main-window.png + http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-class-browser.png + http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-code-viewer.png + + zbyszek@in.waw.pl +
diff --git a/SOURCES/idle3.desktop b/SOURCES/idle3.desktop new file mode 100644 index 0000000..dc1d3c3 --- /dev/null +++ b/SOURCES/idle3.desktop @@ -0,0 +1,11 @@ +[Desktop Entry] +Version=1.0 +Name=IDLE 3 +Comment=Python 3 Integrated Development and Learning Environment +Exec=idle3 %F +TryExec=idle3 +Terminal=false +Type=Application +Icon=idle3 +Categories=Development;IDE; +MimeType=text/x-python; \ No newline at end of file diff --git a/SOURCES/import_all_modules_py3_11.py b/SOURCES/import_all_modules_py3_11.py new file mode 100644 index 0000000..3930236 --- /dev/null +++ b/SOURCES/import_all_modules_py3_11.py @@ -0,0 +1,171 @@ +'''Script to perform import of each module given to %%py_check_import +''' +import argparse +import importlib +import fnmatch +import os +import re +import site +import sys + +from contextlib import contextmanager +from pathlib import Path + + +def read_modules_files(file_paths): + '''Read module names from the files (modules must be newline separated). + + Return the module names list or, if no files were provided, an empty list. + ''' + + if not file_paths: + return [] + + modules = [] + for file in file_paths: + file_contents = file.read_text() + modules.extend(file_contents.split()) + return modules + + +def read_modules_from_cli(argv): + '''Read module names from command-line arguments (space or comma separated). + + Return the module names list. + ''' + + if not argv: + return [] + + # %%py3_check_import allows to separate module list with comma or whitespace, + # we need to unify the output to a list of particular elements + modules_as_str = ' '.join(argv) + modules = re.split(r'[\s,]+', modules_as_str) + # Because of shell expansion in some less typical cases it may happen + # that a trailing space will occur at the end of the list. + # Remove the empty items from the list before passing it further + modules = [m for m in modules if m] + return modules + + +def filter_top_level_modules_only(modules): + '''Filter out entries with nested modules (containing dot) ie. 'foo.bar'. + + Return the list of top-level modules. + ''' + + return [module for module in modules if '.' not in module] + + +def any_match(text, globs): + '''Return True if any of given globs fnmatchcase's the given text.''' + + return any(fnmatch.fnmatchcase(text, g) for g in globs) + + +def exclude_unwanted_module_globs(globs, modules): + '''Filter out entries which match the either of the globs given as argv. + + Return the list of filtered modules. + ''' + + return [m for m in modules if not any_match(m, globs)] + + +def read_modules_from_all_args(args): + '''Return a joined list of modules from all given command-line arguments. + ''' + + modules = read_modules_files(args.filename) + modules.extend(read_modules_from_cli(args.modules)) + if args.exclude: + modules = exclude_unwanted_module_globs(args.exclude, modules) + + if args.top_level: + modules = filter_top_level_modules_only(modules) + + # Error when someone accidentally managed to filter out everything + if len(modules) == 0: + raise ValueError('No modules to check were left') + + return modules + + +def import_modules(modules): + '''Procedure to perform import check for each module name from the given list of modules. + ''' + + for module in modules: + print('Check import:', module, file=sys.stderr) + importlib.import_module(module) + + +def argparser(): + parser = argparse.ArgumentParser( + description='Generate list of all importable modules for import check.' + ) + parser.add_argument( + 'modules', nargs='*', + help=('Add modules to check the import (space or comma separated).'), + ) + parser.add_argument( + '-f', '--filename', action='append', type=Path, + help='Add importable module names list from file.', + ) + parser.add_argument( + '-t', '--top-level', action='store_true', + help='Check only top-level modules.', + ) + parser.add_argument( + '-e', '--exclude', action='append', + help='Provide modules globs to be excluded from the check.', + ) + return parser + + +@contextmanager +def remove_unwanteds_from_sys_path(): + '''Remove cwd and this script's parent from sys.path for the import test. + Bring the original contents back after import is done (or failed) + ''' + + cwd_absolute = Path.cwd().absolute() + this_file_parent = Path(__file__).parent.absolute() + old_sys_path = list(sys.path) + for path in old_sys_path: + if Path(path).absolute() in (cwd_absolute, this_file_parent): + sys.path.remove(path) + try: + yield + finally: + sys.path = old_sys_path + + +def addsitedirs_from_environ(): + '''Load directories from the _PYTHONSITE environment variable (separated by :) + and load the ones already present in sys.path via site.addsitedir() + to handle .pth files in them. + + This is needed to properly import old-style namespace packages with nspkg.pth files. + See https://bugzilla.redhat.com/2018551 for a more detailed rationale.''' + for path in os.getenv('_PYTHONSITE', '').split(':'): + if path in sys.path: + site.addsitedir(path) + + +def main(argv=None): + + cli_args = argparser().parse_args(argv) + + if not cli_args.modules and not cli_args.filename: + raise ValueError('No modules to check were provided') + + modules = read_modules_from_all_args(cli_args) + + with remove_unwanteds_from_sys_path(): + addsitedirs_from_environ() + import_modules(modules) + + +if __name__ == '__main__': + main() diff --git a/SOURCES/macros.python3.11 b/SOURCES/macros.python3.11 new file mode 100644 index 0000000..adf5837 --- /dev/null +++ b/SOURCES/macros.python3.11 @@ -0,0 +1,77 @@ +%__python3 /usr/bin/python3.11 +%python3_pkgversion 3.11 + +# The following are macros from macros.python3 in Fedora that are newer/different than those in the python3-rpm-macros package in RHEL. +# These macros overwrite/supercede some of the macros in the python3-rpm-macros package in RHEL. + +# nb: $RPM_BUILD_ROOT is not set when the macros are expanded (at spec parse time) +# so we set it manually (to empty string), making our Python prefer the correct install scheme location +# platbase/base is explicitly set to %%{_prefix} to support custom values, such as /app for flatpaks +%python3_sitelib %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_path('purelib', vars={'platbase': '%{_prefix}', 'base': '%{_prefix}'}))") +%python3_sitearch %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_path('platlib', vars={'platbase': '%{_prefix}', 'base': '%{_prefix}'}))") +%python3_version %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; sys.stdout.write('{0.major}.{0.minor}'.format(sys.version_info))") +%python3_version_nodots %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; sys.stdout.write('{0.major}{0.minor}'.format(sys.version_info))") +%python3_platform %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_platform())") +%python3_platform_triplet %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_config_var('MULTIARCH'))") +%python3_ext_suffix %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_config_var('EXT_SUFFIX'))") +%python3_cache_tag %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; print(sys.implementation.cache_tag)") + +%_py3_shebang_s s +%_py3_shebang_P %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; print('P' if hasattr(sys.flags, 'safe_path') else '')") +%py3_shbang_opts -%{?_py3_shebang_s}%{?_py3_shebang_P} + +%py3_install() %{expand:\\\ + CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}"\\\ + %{__python3} %{py_setup} %{?py_setup_args} install -O1 --skip-build --root %{buildroot} --prefix %{_prefix} %{?*} + rm -rfv %{buildroot}%{_bindir}/__pycache__ +} + +%py3_install_egg() %{expand:\\\ + mkdir -p %{buildroot}%{python3_sitelib} + %{__python3} -m easy_install -m --prefix %{buildroot}%{_prefix} -Z dist/*-py%{python3_version}.egg %{?*} + rm -rfv %{buildroot}%{_bindir}/__pycache__ +} + +%py3_install_wheel() %{expand:\\\ + %{__python3} -m pip install -I dist/%{1} --root %{buildroot} --prefix %{_prefix} --no-deps --no-index --no-warn-script-location + rm -rfv %{buildroot}%{_bindir}/__pycache__ + for distinfo in %{buildroot}%{python3_sitelib}/*.dist-info %{buildroot}%{python3_sitearch}/*.dist-info; do + if [ -f ${distinfo}/direct_url.json ]; then + rm -fv ${distinfo}/direct_url.json + sed -i '/direct_url.json/d' ${distinfo}/RECORD + fi + done +} + +# With $PATH and $PYTHONPATH set to the %%buildroot, +# try to import the Python 3 module(s) given as command-line args or read from file (-f). +# Respect the custom values of %%py3_shebang_flags or set nothing if it's undefined. +# Filter and check import on only top-level modules using -t flag. +# Exclude unwanted modules by passing their globs to -e option. +# Useful as a smoke test in %%check when running tests is not feasible. +# Use spaces or commas as separators if providing list directly. +# Use newlines as separators if providing list in a file. +%py3_check_import(e:tf:) %{expand:\\\ + PATH="%{buildroot}%{_bindir}:$PATH"\\\ + PYTHONPATH="${PYTHONPATH:-%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}}"\\\ + _PYTHONSITE="%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}"\\\ + PYTHONDONTWRITEBYTECODE=1\\\ + %{lua: + local command = "%{__python3} " + if rpm.expand("%{?py3_shebang_flags}") ~= "" then + command = command .. "-%{py3_shebang_flags}" + end + command = command .. " %{_rpmconfigdir}/redhat/import_all_modules_py3_11.py " + -- handle multiline arguments correctly, see https://bugzilla.redhat.com/2018809 + local args=rpm.expand('%{?**}'):gsub("[%s\\\\]*%s+", " ") + print(command .. args) + } +} + +%pytest %{expand:\\\ + CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}"\\\ + PATH="%{buildroot}%{_bindir}:$PATH"\\\ + PYTHONPATH="${PYTHONPATH:-%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}}"\\\ + PYTHONDONTWRITEBYTECODE=1\\\ + %{?__pytest_addopts:PYTEST_ADDOPTS="${PYTEST_ADDOPTS:-} %{__pytest_addopts}"}\\\ + %__pytest} diff --git a/SOURCES/pgp_keys.asc b/SOURCES/pgp_keys.asc new file mode 100644 index 0000000..11dccb8 --- /dev/null +++ b/SOURCES/pgp_keys.asc @@ -0,0 +1,109 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBFq+ToQBEADRYvIVtbK6owynD3j3nxwpW2KEk/p+aDvtXmc2SR2dBcZ8sFW2 +R5vEsG8d3/D3wgv5pcL3KfNNXQYUnXVbobrFUUWQYc79qIsE3MgiPf5NVOtwKPUR +i5g9YJgKvpBxkQfqp3LYGm9ZBtwo3DVLA3yn7KsazCmAgTNFJYw7ku1XxgmIzY6K +5J30DfbJiqDqj4f9GslCCCCH3qiPnuLG/HUyVLHMpbWlaiy9NI0GcaLxjJewHj9w +W2D2lydkxe5JGo7egUkV3ILcuLVSVKA35SKY27dYqfuyqp9tAzaRbjDYjsYdHA6G +BqrNrKBn/GwlFDPrVdcvN3ZSY2wMLTxWE3Axc/FweuHxFnou/80FwX7F3JD+oEQ6 +rofmcxOBCC7J98I7HZAhP9jBn88XIS2hztbLq8d6rZJZRtcz0k61VR0ddO+TrFmf +9rMYCPgCckRtVxeFIVIabrN1IzKynLFeo040h8hSGswd6YKDOVwjJY6Oa6EmVefZ +a8QSt4+M65RSzH6SEPY008F3nJUAK6MEkzTak+tFltZNrVWu8p2xd1j9nmxAwEhZ +/lgbxLqzYgaUWmfyHeZ8yVA0MhHzdiAL8nVUEdG3KecIq0RWCJLGLWWIjd6KAJl1 +yAmhRYKK/sjPDsL3elHsFACfZbyx3o5GGQNlas1FYoPLWbaNGaJtgFTF2QARAQAB +tCtQYWJsbyBHYWxpbmRvIFNhbGdhZG8gPHBhYmxvZ3NhbEBnbWFpbC5jb20+iQJO +BBMBCgA4FiEEoDXIwZIZuoIezqhrZOYo+NaEaW0FAlq+ToQCGwMFCwkIBwMFFQoJ +CAsFFgIDAQACHgECF4AACgkQZOYo+NaEaW2bmA/+PXIap2udLoUVOHxnsIBdqYwp +sv1Aj5lfIJmNhmxPbHShwp1Jg+w4urxe+2Dj5ofKVlIo1i83bQkvnKJMDXDVuc/K +P6zqhBJ3rT4Q3qx2mzX8bIfQoJ2JHuH4lkP+I7doDcHHRyeNASyk72VdQmU4twNw +Ibn8nSNV6ThKHdoPYzVnO2rZUFcGIqH5HNsvR+B7cc1MBCHsgURYwSVhSePIFGlZ +iasdBD6QQkDSe4QWi7AcJFWFElw4kbOKJWxAWsrEk+tMXJVGRjnmL289EmPCx/vx +BqKy7Mse0yWCSRR3vB+O6TB1S5SgEyEgqlYsfGNv1qf/rfRD4KkyCbNU3LhY1Aim +vJP4pDW+KFxTk2Ks8vrx8gOSd2aFqPeO/pFDrpsF7PD62XwsfoXu4xc5V0Giw7r1 +Nai0nax7kOrldNF8TbbtRjW0jmoC7wLIDujAkwDIOroZ0CXA3N4HVHdSbrHm/urX +nyxJXupXAQNwGx64JCBcbF2fp3Kvu1VAXBEFnd01KaopthHcbG5pA50Kl2Vhe+98 +OdezUX42fHkQpQkB7HgtXfm6W1bw6YRBamrNvs1OoHBYmUjlECpe566IIu25Hc8s +x3qA+6eca7iqizyLG+WyMT8ZIYTWGAS59jxwR4esqGczbbZPSAPHFwLbGv7Wr0Rd +TPu5B0FcKpDkTd4IxQW5Ag0EWr5O2gEQAMjLe4CtbSfofmJrz5wfNkMVsZ81Gbqe +MoYd3dtkJnQYERUj8flzBj3ucaxGJ+Cuf7ybh3naPopKvEI1q0vkcgCDqrEgXK// +jKJbP28uPSMGhOG28q4PbamG55gy5FtM3ezzAxPWWKe9qBpV65GMmFy7eBQx2iJs +yiDIOOQQ4kraS+cTqNFimEXAGLCOQRNLcwIZzwAAHoW7HEpNUfVwaBD9kMlbo1ND +I60IKcNrNcmcmRxhJqfxjj8YBMwcKHO6GBE3AVpaE/+UO9zyr4TH+0YuQUgxKlPW +Dkg5XlkDo0S1GyLY5e9ckIDIlkTdDa2pOkoE2yB5MQCEga3YiHrKUVTTWaxn9XVJ +6x5ZjUF6bgSWGkrG5dUqSYoO1iDMuNVjtiujNyf/rvfj5cNxS7/lgxchhQKZHZXL +WVqxlneeVJ6s0P4+ROVG9ga2Sve7aUJ6wXIewZwulBcV2sE/W/DgxHgLBi53CUQt +vEzFzKvo48GnDqL5VYjA7l0HMYHd4GksCLi8E8U6Cgj+imXiM8voL7pHRZfs8mY8 +udR+UT4e1Scl2MYP2qBJ9/17B/X52B3s1EZdqI/r+hfOyqrhPs+dbAN0mtMPn68+ +nrvY1+nscvrSYEP6ZBlc9Hp2mgJdb6IcTvINXBEeLRjgc3pjViva443pkiFp9Axm +ecOckMKP3uSlABEBAAGJBGwEGAEKACAWIQSgNcjBkhm6gh7OqGtk5ij41oRpbQUC +Wr5O2gIbAgJACRBk5ij41oRpbcF0IAQZAQoAHRYhBM/cokWxBDzypfl4Zf/odAQW +i9hHBQJavk7aAAoJEP/odAQWi9hHr7YP/RCLre1CmOoWYpAtoa1yVCeYMDV6eQgL +B488/BEZHQE1zbrYy16XkhORob3JF/kUMjmJW7XaFF8FrWvRcdj/xaUGbOOEulKg +v+8zWfswYQRiZ4/JlwER4vRLi6fTE89MVER6Fkj2ASD4D2cifY+EztD4flV3sq3s +vIogGFaN9IvdrdeptOVGXs1RmAyoTsiS2mKQ6xsGh8B9ZAm55W8fBOGiSzLX21Xk +Ofdw53BrFQxn3cu/JgIKpdeZxgukcvEAI62B6X+YL6Na4j0eqEGLzsNtU1+xeJlo +WtVvmRwnRHGSxF6fzIZ3mk/p/aFiXAEq/xITCTY6tDv7x7pFE/RpdlJZyNJ+R5Y4 +SQiuDsylxNCa/4G5EB6q+7iVYtbEQ9MnZg2phowEE42tlj0rz8/rvDK3LH3xibot +KHIodCWKlWByxH99u2PuHUQ0c1oCVBUE1KkruMpvI236DpU/dvdq4JLSg/fWrys/ +VIjqLZgsIE5g/KO9XqngWHkLcBLh4CNAmHJ8Iia+s+/rfgsejQWB5uJb6eYg2JjB +4WP1EI0rULM6fdrCNB+MJ36wE2Lnb4bfT0phOMgjjH5/Ki7ZCbkxkOsBs4SRjiS+ +weCsmpAtMqodWY/Cnw9pWSA/qLSRD5/mKeb9SO6OZ/OPfAatwnGHsvZ2sAueC6rR +04W5BfXZWrnJUXQP/id/EKE1Ksp5fKoxSCbkKTCig+Sf5Afwe36yFN+niZBqzn5b +BgL/HIKaZM97oDHersPPANeEgS+JVlBf95iKIYnQbZP43FLVbvOuaINhBIVtFO54 +2Y7EYwl41kP7ILDElVy36KAmdQyBAfrjnZiRA70xShOxApLug1L0lxhR3YfmLwNi +RJ0V6KnYDKf0pfdhO9VFyFFWUojX1usn2SmSsXNizsNtvRqHXzPnX0rbJzZ9+N4O +9k1nxygYFG/2R/jGonVmTjRzcAHrAkNJETMWXMA7/8wRMDwluz8j+cCldey9x8Vk +JwgLGnZSbQtVpcFAnm5r/36Gt+9wc1VWMyrUrVr6Z679aqAbG7PMaeR5h5ygMj1k +VqRTYAUPSk1f8bZKRssQkQwEbp9dVIjm9SsR8VT7/tB+UuB85dABxgHfv3psJRT+ +tL8g9V7kSZqQfcLNGmvEVvr2Zl9NtxwXtsFM2OBprxCenwb+e9Ppm1LjfJG/NE72 +mAnOERfDaiLt4bqNo36Ei5sGCJ4Fx61phzNBXzkdRNM47i8J5UZRKFkE91c99BVM +HKUaY61NRK24fR0zP98ftDU82YFw0VRFJpTeBrO5ivN1MlQxUPzUWxKxMxO+20wa +UOXroEw11Tb4SRLGOla1pCl6lCUPJRy9IzadPDgTr/OTMkob/snt/XLdnV5/uQIN +BFq+TvoBEAC8Oy1g6pPWBbrCMhIq7VWY2fjylJ1fwg5BPXkOKVK1dsGYO4QD7oW9 +L0aSqcFSNFGF9Cl0Ri4TFXZC3hnG4HeSXUWApuKdBLn21H3jba36Ay1oGcGfdm0v +Zght4c6BlMVBpGCw2wIkJbUNEy6InMM+O8CCbbaH3iJkJ4141P7pODHignx5AmZI +conMui4YOhC+IXQXynVEv1Juk7erB1Nh1RcRvsA4lb44HWx49lIwe85ejOmoZ0O3 +6f9NJRer6bV0+rHWmg4IV5Q9h/Gn4IhEDZxA0DZl1RQI7dMgaMbIFbXGq7Kgzstz +EUnOoy29hXodxVmwIsMrAiQUYtwJ9hW+ESsw47+W2iPHVgviGWl7r/SgcgMYmf6m +5kiTBtwU7BQPS9G3zwwP2Rm3AA/6g39Q+tQKjOwi1I8+GZsY2On44Zly7BreBNg5 +4gJgdAGcMOYU9etr050clH3UpTYcAEtX++ahtOKhJgLIPNcIAQNlnifqvU0VYpgw +R4YpZ7hgg+AVDzC73PIM0lFI0XiDuqChbxE+K1jmLXWe5iJF0dzgVTwP+PmsifNZ +Wg3+YxSsS+hDMPQ2xPiQN49gT4JJDHcDuyhHyCGYgyMiVJCsku9KrkubbfVRivyN +ZF2Zfo3f+nbrRxsftz0yjAq8byCvb0V0XOpt4pJ/ddlug9ytRxALNwARAQABiQI2 +BBgBCgAgFiEEoDXIwZIZuoIezqhrZOYo+NaEaW0FAlq+TvoCGwwACgkQZOYo+NaE +aW3urA//UQ/cKQ7HvWjcLphzQOZc+6m5YL0wxvZkSjemU7mqjZdpacteIvRAoers +EqXHc208liIBtNfRzoreXdcXNzie65xXkrRnWoHVH/fTWy4lOnHr2CMXLeHjUgg/ +M6PYi8+sARm05YFB8nsYhlhx3IdLhcfeVVbJedQKO0yL3CK1okT30DUVq5Lq6X/K +DC6AxuJR3D6UMSoT0WLaoX8qbhAp88qLynInfBVL18d97h916WPLTPeP0eHwhwND +bYtKDCMDuKQ9XX5+QsNH0RmbxlX274LHrUMMvkLKxcfCBvP+iuqrBeIuoeVzXYJZ +j7ZJtEH79bW44eecl/CY/STFYgSQ2XGTp2BI2q60wAmtKlNhwxY5ena0FgyFl6Tm +5OBHW/Pwo+ndQJGfbrCyWkTgRay9c8er3gl3GQYIBH6X0kCiG7h/Epj0b5CHOPU5 +hCw0kEB8MB4poTIjeiY+Q01472/lQ68CL3DX158hR5d3XaPSIxAN+qFsfB1o316p +yjxhfK1MD/IfrOgjlggPPnc/KmLkCzpgdwKcZwLCdZq9hYBvF1Zs34HbaVMYbWTK +uxLowtXGU43vatCXXqmPOvl4/g4tZD6rysJDgOrHQnEHzT+Napn07s0BRC0IbbNn +FynUrkr5KMSuRz7Hg7xMApENOrb0nqdHSUJ914ZpuMIS6RhJgGu5Ag0EWr5PIAEQ +ALfh9vPD2B+miHDTMADI8aRZ7g9tnzynZYkk3+2sCiiusetsQQ+HIPJ/ASEJB7On +ane9dyT/LTRhrK9qaxgVMimk2COXB/xyh7Mnw7nJgFU0aRSbtX0vbvQz2suSzrQ6 +9mPKzan28JGoClqB0bw1vwf3VjjxHV2dgD57CmqFPv7kAC/2a56dE+etzXattZAL ++2JWTpmfQ0ePRRadtBm0VahQhnU8x0+jvAVrEawqpVW83ozYFyW/0WInM2J7jHgQ +16OosY4lj5L/DxpVxaArhRFoRfWPXfC37iE8Mou/I95isvPQIhp1wTo4jG0KM02B +oIVbp/QRNBQ6WtpOzvJs1gqQiJJTfqbKJXQ3NDEY9crpVS83HJ+Zv99PNsyNkFjG +QpU84U3ZhsI4ygjdY45mpZueqI1RVcRQdu8Hgvoo/78Q/Sir6gMGop3mVdVo2guI +kFcJrXh0Xk3ech4aVqrmKx/mPXGwOAQU0DAul4RW3fKg1QxQE7Tlw3+95Ee/+q5j +HARL0uDbCJpRO8Sl8NDEuL32n/2Ot6kQeCSHrU7KJRYAkTxkKvr8zNow7hFhHFPE +SnHvTnskI6noh0VY6NwMhmLvhm0wKkRxZPzUNc3sgLvbK1NymIZ9aKCZamzhZrmG +vnblEz/OSLwGUua465H3hM1vvBQiartj7+6ZqWIkSmBPABEBAAGJAjYEGAEKACAW +IQSgNcjBkhm6gh7OqGtk5ij41oRpbQUCWr5PIAIbIAAKCRBk5ij41oRpbWmeEACG ++axtDC8UoNp9ORiYwEWLzZWDuugE+ah7DYYGD4Vs633FXVZW3SgM/bFtJ/0Lg8CF +74jI4LMHyIjDzEjcoItwnhBLix+kUoJTvrY58GPydwekLuw1p4KXLqtRs4fsZbNQ +YTknl4jYtRWoxO98x7tun7Gq2gqmJkIB2uj630fKz5cBk6p6oDFKjzyrHe+V7BiK +3okQPaD4x7hq8OnTy7lOy92ZZAqztS4tNEb4DkYW1MpuwsJ7hbBZitc1siI+FVVb +GjVVGZz6ssXoW67Tz8+VxdWJxNLXlv27eMcj4sme5S0th/YYNA5fRRv6zuzqZAru +YNGLpYYU7JLvZJ+3lCwa5j5ycOGBF0GvsGs6gj6h+CHkjR/BgzAgWC+GgUgslt6q +aH04rWtV6rVz+Y91LcrX5P6OM4anmXD3Gp3kl35AypXb4KyASF19+11RUziD4Z7q +wQEWfbwOltNyZv2lD8s2jPr7P02axWRQUbZAEhxRmvOQev/FZPyCF6gqUo/HxRbQ +y3bzmnipyHSv1DlXNfCFCHvN8kGyZnRWARqIKRg+j9ediJgOUqlLhg6KmrTVxd5v +3Dfv52PW2UODDTM20s3cQGuX/UswzMRwPI/+P44iCMwEKdm7duM/5oisZT9Vhy7g +P15MreFZLcZvUVgjqgy0u57cstyGK1Bo9e2sFcK2fA== +=6Zb4 +-----END PGP PUBLIC KEY BLOCK----- diff --git a/SPECS/python3.11.spec b/SPECS/python3.11.spec new file mode 100644 index 0000000..eb79a67 --- /dev/null +++ b/SPECS/python3.11.spec @@ -0,0 +1,1935 @@ +%global __python3 /usr/bin/python3.11 +%global python3_pkgversion 3.11 + +# ================== +# Top-level metadata +# ================== + +%global pybasever 3.11 + +# pybasever without the dot: +%global pyshortver 311 + +Name: python%{pybasever} +Summary: Version %{pybasever} of the Python interpreter +URL: https://www.python.org/ + +# WARNING When rebasing to a new Python version, +# remember to update the python3-docs package as well +%global general_version %{pybasever}.7 +#global prerel ... +%global upstream_version %{general_version}%{?prerel} +Version: %{general_version}%{?prerel:~%{prerel}} +Release: 1%{?dist} +License: Python + + +# ================================== +# Conditionals controlling the build +# ================================== + +# Note that the bcond macros are named for the CLI option they create. +# "%%bcond_without" means "ENABLE by default and create a --without option" + +# RHEL: Disabled by default +%bcond_with main_python + +# If this is *not* Main Python, should it contain `Provides: python(abi) ...`? +# In Fedora no package shall depend on an alternative Python via this tag, so we do not provide it. +# In ELN/RHEL/CentOS we want to allow building against alternative stacks, so the Provide is enabled. +%if 0%{?fedora} +%bcond_with python_abi_provides_for_alt_pythons +%else +%bcond_without python_abi_provides_for_alt_pythons +%endif + +# When bootstrapping python3, we need to build setuptools. +# but setuptools BR python3-devel and that brings in python3-rpm-generators; +# python3-rpm-generators needs python3-setuptools, so we cannot have it yet. +# +# We also use the previous build of Python in "make regen-all" +# and in "distutils.tests.test_bdist_rpm". +# +# Procedure: https://fedoraproject.org/wiki/SIGs/Python/UpgradingPython +# +# IMPORTANT: When bootstrapping, it's very likely the wheels for pip and +# setuptools are not available. Turn off the rpmwheels bcond until +# the two packages are built with wheels to get around the issue. +%bcond_with bootstrap + +# Whether to use RPM build wheels from the python-{pip,setuptools}-wheel package +# Uses upstream bundled prebuilt wheels otherwise +%bcond_without rpmwheels +# If the rpmwheels condition is disabled, we use the bundled wheel packages +# from Python with the versions below. +# This needs to be manually updated when we update Python. +%global pip_version 23.2.1 +%global setuptools_version 65.5.0 + +# Expensive optimizations (mainly, profile-guided optimizations) +%bcond_without optimizations + +# Run the test suite in %%check +%bcond_without tests + +# Extra build for debugging the interpreter or C-API extensions +# (the -debug subpackages) +%bcond_without debug_build + +# Support for the GDB debugger +%bcond_without gdb_hooks + +# The dbm.gnu module (key-value database) +%bcond_without gdbm + +# Main interpreter loop optimization +%bcond_without computed_gotos + +# Support for the Valgrind debugger/profiler +%ifarch %{valgrind_arches} +%bcond_without valgrind +%else +%bcond_with valgrind +%endif + +# ===================== +# General global macros +# ===================== +%if %{with main_python} +%global pkgname python3 +%global exename python3 +%else +%global pkgname python%{pybasever} +%global exename python%{pybasever} +%endif + +%global pylibdir %{_libdir}/python%{pybasever} +%global dynload_dir %{pylibdir}/lib-dynload + +# ABIFLAGS, LDVERSION and SOABI are in the upstream configure.ac +# See PEP 3149 for some background: http://www.python.org/dev/peps/pep-3149/ +%global ABIFLAGS_optimized %{nil} +%global ABIFLAGS_debug d + +%global LDVERSION_optimized %{pybasever}%{ABIFLAGS_optimized} +%global LDVERSION_debug %{pybasever}%{ABIFLAGS_debug} + +# We use the upstream arch triplets, we convert them from %%{_arch}-linux%%{_gnu} +%global platform_triplet %{expand:%(echo %{_arch}-linux%{_gnu} | sed -E \\ + -e 's/^arm(eb)?-linux-gnueabi$/arm\\1-linux-gnueabihf/' \\ + -e 's/^mips64(el)?-linux-gnu$/mips64\\1-linux-gnuabi64/' \\ + -e 's/^ppc(64)?(le)?-linux-gnu$/powerpc\\1\\2-linux-gnu/')} + +%global SOABI_optimized cpython-%{pyshortver}%{ABIFLAGS_optimized}-%{platform_triplet} +%global SOABI_debug cpython-%{pyshortver}%{ABIFLAGS_debug}-%{platform_triplet} + +# All bytecode files are in a __pycache__ subdirectory, with a name +# reflecting the version of the bytecode. +# See PEP 3147: http://www.python.org/dev/peps/pep-3147/ +# For example, +# foo/bar.py +# has bytecode at: +# foo/__pycache__/bar.cpython-%%{pyshortver}.pyc +# foo/__pycache__/bar.cpython-%%{pyshortver}.opt-1.pyc +# foo/__pycache__/bar.cpython-%%{pyshortver}.opt-2.pyc +%global bytecode_suffixes .cpython-%{pyshortver}*.pyc + +# Python's configure script defines SOVERSION, and this is used in the Makefile +# to determine INSTSONAME, the name of the libpython DSO: +# LDLIBRARY='libpython$(VERSION).so' +# INSTSONAME="$LDLIBRARY".$SOVERSION +# We mirror this here in order to make it easier to add the -gdb.py hooks. +# (if these get out of sync, the payload of the libs subpackage will fail +# and halt the build) +%global py_SOVERSION 1.0 +%global py_INSTSONAME_optimized libpython%{LDVERSION_optimized}.so.%{py_SOVERSION} +%global py_INSTSONAME_debug libpython%{LDVERSION_debug}.so.%{py_SOVERSION} + +# Disable automatic bytecompilation. The python3 binary is not yet be +# available in /usr/bin when Python is built. Also, the bytecompilation fails +# on files that test invalid syntax. +%undefine py_auto_byte_compile + +%if %{with main_python} +# To keep the upgrade path clean, we Obsolete python3.X from the python3 +# package and python3.X-foo from individual subpackages. +# Note that using Obsoletes without package version is not standard practice. +# Here we assert that *any* version of the system's default interpreter is +# preferable to an "extra" interpreter. For example, python3-3.6.1 will +# replace python3.6-3.6.2. +%define unversioned_obsoletes_of_python3_X_if_main() %{expand:\ +Obsoletes: python%{pybasever}%{?1:-%{1}}\ +} +%else +%define unversioned_obsoletes_of_python3_X_if_main() %{nil} +%endif + +# ======================= +# Build-time requirements +# ======================= + +# (keep this list alphabetized) + +BuildRequires: autoconf +BuildRequires: bluez-libs-devel +BuildRequires: bzip2 +BuildRequires: bzip2-devel +BuildRequires: desktop-file-utils +BuildRequires: expat-devel + +BuildRequires: findutils +BuildRequires: gcc-c++ +%if %{with gdbm} +BuildRequires: gdbm-devel +%endif +BuildRequires: git-core +BuildRequires: glibc-all-langpacks +BuildRequires: glibc-devel +BuildRequires: gmp-devel +BuildRequires: gnupg2 +BuildRequires: libappstream-glib +# libb2 is not included in RHEL, we utilize +# the internal implementation of blake2 +#BuildRequires: libb2-devel +BuildRequires: libffi-devel +BuildRequires: libnsl2-devel +BuildRequires: libtirpc-devel +BuildRequires: libGL-devel +BuildRequires: libuuid-devel +BuildRequires: libX11-devel +BuildRequires: make +BuildRequires: mpdecimal-devel +BuildRequires: ncurses-devel + +BuildRequires: openssl-devel +BuildRequires: pkgconfig +BuildRequires: readline-devel +BuildRequires: redhat-rpm-config +BuildRequires: sqlite-devel +BuildRequires: gdb + +BuildRequires: tar +BuildRequires: tcl-devel +BuildRequires: tix-devel +BuildRequires: tk-devel +BuildRequires: tzdata + +%if %{with valgrind} +BuildRequires: valgrind-devel +%endif + +BuildRequires: xz-devel +BuildRequires: zlib-devel + +BuildRequires: /usr/bin/dtrace + +# workaround http://bugs.python.org/issue19804 (test_uuid requires ifconfig) +BuildRequires: /usr/sbin/ifconfig + +%if %{with rpmwheels} +BuildRequires: %{python_wheel_pkg_prefix}-setuptools-wheel +BuildRequires: %{python_wheel_pkg_prefix}-pip-wheel +%endif + +%if %{without bootstrap} +# for make regen-all and distutils.tests.test_bdist_rpm +# Note that we're not using the %%{pkgname} macro here on purpose, because when +# upgrading the main python3 to a new Python version, this would pull in the +# old version instead. +BuildRequires: python%{pybasever} +%endif + +%if %{without bootstrap} || %{without main_python} +# Generators run on the main Python 3 so we can take this dependency out of the bootstrap loop +BuildRequires: python3-rpm-generators +%endif + +# ======================= +# Source code and patches +# ======================= + +Source0: %{url}ftp/python/%{general_version}/Python-%{upstream_version}.tar.xz +Source1: %{url}ftp/python/%{general_version}/Python-%{upstream_version}.tar.xz.asc +# The release manager for Python 3.11 is pablogsal +Source2: https://keybase.io/pablogsal/pgp_keys.asc + +# Sources for the python3.11-rpm-macros +Source3: macros.python3.11 +Source4: import_all_modules_py3_11.py + +# A simple script to check timestamps of bytecode files +# Run in check section with Python that is currently being built +# Originally written by bkabrda +Source8: check-pyc-timestamps.py + +# Desktop menu entry for idle3 +Source10: idle3.desktop + +# AppData file for idle3 +Source11: idle3.appdata.xml + +# (Patches taken from github.com/fedora-python/cpython) + +# 00001 # d06a8853cf4bae9e115f45e1d531d2dc152c5cc8 +# Fixup distutils/unixccompiler.py to remove standard library path from rpath +# Was Patch0 in ivazquez' python3000 specfile +Patch1: 00001-rpath.patch + +# 00251 # af0f1ba72e01cb93371ff21fb7ca889daa43fa7a +# Change user install location +# +# Set values of base and platbase in sysconfig from /usr +# to /usr/local when RPM build is not detected +# to make pip and similar tools install into separate location. +# +# Set values of prefix and exec_prefix in distutils install command +# to /usr/local if executable is /usr/bin/python* and RPM build +# is not detected to make distutils and pypa/distutils install into separate location. +# +# Fedora Change: https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe +# Downstream only. +# +# We've tried to rework in Fedora 36/Python 3.10 to follow https://bugs.python.org/issue43976 +# but we have identified serious problems with that approach, +# see https://bugzilla.redhat.com/2026979 or https://bugzilla.redhat.com/2097183 +# +# pypa/distutils integration: https://github.com/pypa/distutils/pull/70 +Patch251: 00251-change-user-install-location.patch + +# 00328 # 318e500c98f5e59eb1f23e0fcd32db69b9bd17e1 +# Restore pyc to TIMESTAMP invalidation mode as default in rpmbuild +# +# Since Fedora 31, the $SOURCE_DATE_EPOCH is set in rpmbuild to the latest +# %%changelog date. This makes Python default to the CHECKED_HASH pyc +# invalidation mode, bringing more reproducible builds traded for an import +# performance decrease. To avoid that, we don't default to CHECKED_HASH +# when $RPM_BUILD_ROOT is set (i.e. when we are building RPM packages). +# +# See https://src.fedoraproject.org/rpms/redhat-rpm-config/pull-request/57#comment-27426 +# Downstream only: only used when building RPM packages +# Ideally, we should talk to upstream and explain why we don't want this +Patch328: 00328-pyc-timestamp-invalidation-mode.patch + +# 00329 # +# Support OpenSSL FIPS mode +# - In FIPS mode, OpenSSL wrappers are always used in hashlib +# - The "usedforsecurity" keyword argument can be used to the various digest +# algorithms in hashlib so that you can whitelist a callsite with +# "usedforsecurity=False" +# - OpenSSL wrappers for the hashes blake2{b512,s256}, +# - In FIPS mode, the blake2 hashes use OpenSSL wrappers +# and do not offer extended functionality (keys, tree hashing, custom digest size) +# +# - In FIPS mode, hmac.HMAC can only be instantiated with an OpenSSL wrapper +# or a string with OpenSSL hash name as the "digestmod" argument. +# The argument must be specified (instead of defaulting to ‘md5’). +Patch329: 00329-fips.patch + +# 00371 # c1754d9c2750f89cb702e1b63a99201f5f7cff00 +# Revert "bpo-1596321: Fix threading._shutdown() for the main thread (GH-28549) (GH-28589)" +# +# This reverts commit 38c67738c64304928c68d5c2bd78bbb01d979b94. It +# introduced regression causing FreeIPA's tests to fail. +# +# For more info see: +# https://bodhi.fedoraproject.org/updates/FEDORA-2021-e152ce5f31 +# https://github.com/GrahamDumpleton/mod_wsgi/issues/730 +Patch371: 00371-revert-bpo-1596321-fix-threading-_shutdown-for-the-main-thread-gh-28549-gh-28589.patch + +# 00378 # +# Support expat 2.4.5 +# +# Curly brackets were never allowed in namespace URIs +# according to RFC 3986, and so-called namespace-validating +# XML parsers have the right to reject them a invalid URIs. +# +# libexpat >=2.4.5 has become strcter in that regard due to +# related security issues; with ET.XML instantiating a +# namespace-aware parser under the hood, this test has no +# future in CPython. +# +# References: +# - https://datatracker.ietf.org/doc/html/rfc3968 +# - https://www.w3.org/TR/xml-names/ +# +# Also, test_minidom.py: Support Expat >=2.4.5 +# +# The patch has diverged from upstream as the python test +# suite was relying on checking the expat version, whereas +# in RHEL fixes get backported instead of rebasing packages. +# +# Upstream: https://bugs.python.org/issue46811 +Patch378: 00378-support-expat-2-4-5.patch + +# 00397 # +# Filters for tarfile extraction (CVE-2007-4559, PEP-706) +# First patch fixes determination of symlink targets, which were treated +# as relative to the root of the archive, +# rather than the directory containing the symlink. +# Not yet upstream as of this writing. +# The second patch is Red Hat configuration, see KB for documentation: +# - https://access.redhat.com/articles/7004769 +Patch397: 00397-tarfile-filter.patch + +# 00415 # +# [CVE-2023-27043] gh-102988: Reject malformed addresses in email.parseaddr() (#111116) +# +# Detect email address parsing errors and return empty tuple to +# indicate the parsing error (old API). Add an optional 'strict' +# parameter to getaddresses() and parseaddr() functions. Patch by +# Thomas Dwyer. +# +# Upstream PR: https://github.com/python/cpython/pull/111116 +# +# Second patch implmenets the possibility to restore the old behavior via +# config file or environment variable. +Patch415: 00415-cve-2023-27043-gh-102988-reject-malformed-addresses-in-email-parseaddr-111116.patch + +# (New patches go here ^^^) +# +# When adding new patches to "python" and "python3" in Fedora, EL, etc., +# please try to keep the patch numbers in-sync between all specfiles. +# +# More information, and a patch number catalog, is at: +# +# https://fedoraproject.org/wiki/SIGs/Python/PythonPatches +# +# The patches are stored and rebased at: +# +# https://github.com/fedora-python/cpython + + +# ========================================== +# Descriptions, and metadata for subpackages +# ========================================== + +# Require alternatives version that implements the --keep-foreign flag and fixes rhbz#2203820 +Requires: alternatives >= 1.19.2-1 +Requires(post): alternatives >= 1.19.2-1 +Requires(postun): alternatives >= 1.19.2-1 + +# When the user tries to `yum install python`, yum will list this package among +# the possible alternatives +Provides: alternative-for(python) + +%if %{with main_python} +# Description for the python3X SRPM only: +%description +Python %{pybasever} is an accessible, high-level, dynamically typed, interpreted +programming language, designed with an emphasis on code readability. +It includes an extensive standard library, and has a vast ecosystem of +third-party libraries. + +%package -n %{pkgname} +Summary: Python %{pybasever} interpreter + +# In order to support multiple Python interpreters for development purposes, +# packages with fully versioned naming scheme (e.g. python3.9*) exist for +# non-default versions of Python 3. +# For consistency, we provide python3.X from python3 as well. +Provides: python%{pybasever} = %{version}-%{release} +Provides: python%{pybasever}%{?_isa} = %{version}-%{release} + +%unversioned_obsoletes_of_python3_X_if_main + +# https://fedoraproject.org/wiki/Changes/Move_usr_bin_python_into_separate_package +# https://fedoraproject.org/wiki/Changes/Python_means_Python3 +# We recommend /usr/bin/python so users get it by default +# Versioned recommends are problematic, and we know that the package requires +# python3 back with fixed version, so we just use the path here: +Recommends: %{_bindir}/python +%endif + +# Python interpreter packages used to be named (or provide) name pythonXY (e.g. +# python39). However, to align it with the executable names and to prepare for +# Python 3.10, they were renamed to pythonX.Y (e.g. python3.9, python3.10). We +# provide the previous names. +Provides: python%{pyshortver} = %{version}-%{release} + +%if %{with main_python} || %{with python_abi_provides_for_alt_pythons} +# Packages with Python modules in standard locations automatically +# depend on python(abi). Provide that here. +Provides: python(abi) = %{pybasever} +%else +# We exclude the `python(abi)` Provides +%global __requires_exclude ^python\\(abi\\) = 3\\..+ +%global __provides_exclude ^python\\(abi\\) = 3\\..+ +%endif + +Requires: %{pkgname}-libs%{?_isa} = %{version}-%{release} + +# This prevents ALL subpackages built from this spec to require +# /usr/bin/python3* or python(abi). Granularity per subpackage is impossible. +# It's intended for the libs package not to drag in the interpreter, see +# https://bugzilla.redhat.com/show_bug.cgi?id=1547131 +# https://bugzilla.redhat.com/show_bug.cgi?id=1862082 +# All other packages require %%{pkgname} explicitly. +%global __requires_exclude ^(/usr/bin/python3|python\\(abi\\)) + +%description -n %{pkgname} +Python %{pybasever} is an accessible, high-level, dynamically typed, interpreted +programming language, designed with an emphasis on code readability. +It includes an extensive standard library, and has a vast ecosystem of +third-party libraries. + +The %{pkgname} package provides the "%{exename}" executable: the reference +interpreter for the Python language, version 3. +The majority of its standard library is provided in the %{pkgname}-libs package, +which should be installed automatically along with %{pkgname}. +The remaining parts of the Python standard library are broken out into the +%{pkgname}-tkinter and %{pkgname}-test packages, which may need to be installed +separately. + +Documentation for Python is provided in the %{pkgname}-docs package. + +Packages containing additional libraries for Python are generally named with +the "%{pkgname}-" prefix. + +For the unversioned "python" executable, see manual page "unversioned-python". + +%if %{with main_python} +# https://fedoraproject.org/wiki/Changes/Move_usr_bin_python_into_separate_package +# https://fedoraproject.org/wiki/Changes/Python_means_Python3 +%package -n python-unversioned-command +Summary: The "python" command that runs Python 3 +BuildArch: noarch + +# In theory this could require any python3 version +Requires: python3 == %{version}-%{release} +# But since we want to provide versioned python, we require exact version +Provides: python = %{version}-%{release} +# This also save us an explicit conflict for older python3 builds + +# Also provide the name of the Ubuntu package with the same function, +# to be nice to people who temporarily forgot which distro they're on. +# C.f. https://packages.ubuntu.com/hirsute/all/python-is-python3/filelist +Provides: python-is-python3 = %{version}-%{release} + +%description -n python-unversioned-command +This package contains /usr/bin/python - the "python" command that runs Python 3. + +%endif # with main_python + + +%package -n %{pkgname}-libs +Summary: Python runtime libraries + +%if %{with rpmwheels} +Requires: %{python_wheel_pkg_prefix}-setuptools-wheel +Requires: %{python_wheel_pkg_prefix}-pip-wheel +%else +Provides: bundled(python%{python3_pkgversion}dist(pip)) = %{pip_version} +Provides: bundled(python%{python3_pkgversion}dist(setuptools)) = %{setuptools_version} +%endif + +%unversioned_obsoletes_of_python3_X_if_main libs + +# There are files in the standard library that have python shebang. +# We've filtered the automatic requirement out so libs are installable without +# the main package. This however makes it pulled in by default. +# See https://bugzilla.redhat.com/show_bug.cgi?id=1547131 +Recommends: %{pkgname}%{?_isa} = %{version}-%{release} + +# tkinter is part of the standard library, +# but it is torn out to save an unwanted dependency on tk and X11. +# we recommend it when tk is already installed (for better UX) +Recommends: (%{pkgname}-tkinter%{?_isa} = %{version}-%{release} if tk%{?_isa}) + +# The zoneinfo module needs tzdata +Requires: tzdata + +%description -n %{pkgname}-libs +This package contains runtime libraries for use by Python: +- the majority of the Python standard library +- a dynamically linked library for use by applications that embed Python as + a scripting language, and by the main "%{exename}" executable + + +%package -n %{pkgname}-devel +Summary: Libraries and header files needed for Python development +Requires: %{pkgname} = %{version}-%{release} +Requires: %{pkgname}-libs%{?_isa} = %{version}-%{release} +# The RPM related dependencies bring nothing to a non-RPM Python developer +# But we want them when packages BuildRequire python3-devel +Requires: (python-rpm-macros if rpm-build) +Requires: (python3-rpm-macros if rpm-build) + +# Require alternatives version that implements the --keep-foreign flag and fixes rhbz#2203820 +Requires(postun): alternatives >= 1.19.2-1 + +# python3.11 installs the alternatives master symlink to which we attach a slave +Requires(post): %{pkgname} +Requires(postun): %{pkgname} + +%unversioned_obsoletes_of_python3_X_if_main devel + +%if %{with main_python} +# Python developers are very likely to need pip +Recommends: %{pkgname}-pip +%endif + +# tox users are likely to need the devel subpackage +Supplements: tox + +%if %{without bootstrap} || %{without main_python} +# Generators run on the main Python 3 so we can take this dependency out of the bootstrap loop +Requires: (python3-rpm-generators if rpm-build) +%endif + +Provides: %{pkgname}-2to3 = %{version}-%{release} + +%if %{with main_python} +Provides: 2to3 = %{version}-%{release} +%endif + +Conflicts: %{pkgname} < %{version}-%{release} + +%description -n %{pkgname}-devel +This package contains the header files and configuration needed to compile +Python extension modules (typically written in C or C++), to embed Python +into other programs, and to make binary distributions for Python libraries. + +It also contains the necessary macros to build RPM packages with Python modules +and 2to3 tool, an automatic source converter from Python 2.X. + + +%package -n %{pkgname}-idle +Summary: A basic graphical development environment for Python +Requires: %{pkgname} = %{version}-%{release} +Requires: %{pkgname}-tkinter = %{version}-%{release} + +%unversioned_obsoletes_of_python3_X_if_main idle + +%if %{with main_python} +Provides: idle3 = %{version}-%{release} +Provides: idle = %{version}-%{release} +%endif + +Provides: %{pkgname}-tools = %{version}-%{release} +Provides: %{pkgname}-tools%{?_isa} = %{version}-%{release} + +# Require alternatives version that implements the --keep-foreign flag and fixes rhbz#2203820 +Requires(postun): alternatives >= 1.19.2-1 + +# python3.11 installs the alternatives master symlink to which we attach a slave +Requires(post): %{pkgname} +Requires(postun): %{pkgname} + +%description -n %{pkgname}-idle +IDLE is Python’s Integrated Development and Learning Environment. + +IDLE has the following features: Python shell window (interactive +interpreter) with colorizing of code input, output, and error messages; +multi-window text editor with multiple undo, Python colorizing, +smart indent, call tips, auto completion, and other features; +search within any window, replace within editor windows, and +search through multiple files (grep); debugger with persistent +breakpoints, stepping, and viewing of global and local namespaces; +configuration, browsers, and other dialogs. + + +%package -n %{pkgname}-tkinter +Summary: A GUI toolkit for Python +Requires: %{pkgname} = %{version}-%{release} + +%unversioned_obsoletes_of_python3_X_if_main tkinter + +# The importable module "turtle" is here, so provide python3-turtle. +# (We don't provide python3-turtledemo, that's not too useful when imported.) +%py_provides %{pkgname}-turtle + +%description -n %{pkgname}-tkinter +The Tkinter (Tk interface) library is a graphical user interface toolkit for +the Python programming language. + + +%package -n %{pkgname}-test +Summary: The self-test suite for the main python3 package +Requires: %{pkgname} = %{version}-%{release} +Requires: %{pkgname}-libs%{?_isa} = %{version}-%{release} + +%unversioned_obsoletes_of_python3_X_if_main test + +%description -n %{pkgname}-test +The self-test suite for the Python interpreter. + +This is only useful to test Python itself. For testing general Python code, +you should use the unittest module from %{pkgname}-libs, or a library such as +%{pkgname}-pytest. + + +%if %{with debug_build} +%package -n %{pkgname}-debug +Summary: Debug version of the Python runtime + +# The debug build is an all-in-one package version of the regular build, and +# shares the same .py/.pyc files and directories as the regular build. Hence +# we depend on all of the subpackages of the regular build: +Requires: %{pkgname}%{?_isa} = %{version}-%{release} +Requires: %{pkgname}-libs%{?_isa} = %{version}-%{release} +Requires: %{pkgname}-devel%{?_isa} = %{version}-%{release} +Requires: %{pkgname}-test%{?_isa} = %{version}-%{release} +Requires: %{pkgname}-tkinter%{?_isa} = %{version}-%{release} +Requires: %{pkgname}-idle%{?_isa} = %{version}-%{release} + +%unversioned_obsoletes_of_python3_X_if_main debug + +# Require alternatives version that implements the --keep-foreign flag and fixes rhbz#2203820 +Requires(postun): alternatives >= 1.19.2-1 + +# python3.11 installs the alternatives master symlink to which we attach a slave +Requires(post): %{pkgname} +Requires(postun): %{pkgname} + +%description -n %{pkgname}-debug +python3-debug provides a version of the Python runtime with numerous debugging +features enabled, aimed at advanced Python users such as developers of Python +extension modules. + +This version uses more memory and will be slower than the regular Python build, +but is useful for tracking down reference-counting issues and other bugs. + +The debug build shares installation directories with the standard Python +runtime. Python modules -- source (.py), bytecode (.pyc), and C-API extensions +(.cpython*.so) -- are compatible between this and the standard version +of Python. + +The debug runtime additionally supports debug builds of C-API extensions +(with the "d" ABI flag) for debugging issues in those extensions. +%endif # with debug_build + +# We package the python3.11-rpm-macros in RHEL8 as to properly set the +# %%__python3 and %%python3_pkgversion macros as well as provide modern +# versions the current base macros. +%package -n %{pkgname}-rpm-macros +Summary: RPM macros for building RPMs with Python %{pybasever} +License: MIT +Provides: python-modular-rpm-macros == %{pybasever} +Conflicts: python-modular-rpm-macros > %{pybasever} +Requires: python3-rpm-macros +BuildArch: noarch + +%description -n %{pkgname}-rpm-macros +RPM macros for building RPMs with Python %{pybasever} from the python%{pyshortver} module. +If you want to build an RPM against the python%{pyshortver} module, you need to add: + + BuildRequire: %{pkgname}-rpm-macros. + +# ====================================================== +# The prep phase of the build: +# ====================================================== + +%prep +%gpgverify -k2 -s1 -d0 +%autosetup -S git_am -n Python-%{upstream_version} + +%if %{with rpmwheels} +rm Lib/ensurepip/_bundled/pip-%{pip_version}-py3-none-any.whl +rm Lib/ensurepip/_bundled/setuptools-%{setuptools_version}-py3-none-any.whl +%endif + +# Remove all exe files to ensure we are not shipping prebuilt binaries +# note that those are only used to create Microsoft Windows installers +# and that functionality is broken on Linux anyway +find -name '*.exe' -print -delete + +# Remove bundled libraries to ensure that we're using the system copy. +rm -r Modules/expat +rm -r Modules/_decimal/libmpdec + +# Remove files that should be generated by the build +# (This is after patching, so that we can use patches directly from upstream) +rm configure pyconfig.h.in + + +# ====================================================== +# Configuring and building the code: +# ====================================================== + +%build + +# The build process embeds version info extracted from the Git repository +# into the Py_GetBuildInfo and sys.version strings. +# Our Git repository is artificial, so we don't want that. +# Tell configure to not use git. +export HAS_GIT=not-found + +# Regenerate the configure script and pyconfig.h.in +autoconf +autoheader + +# Remember the current directory (which has sources and the configure script), +# so we can refer to it after we "cd" elsewhere. +topdir=$(pwd) + +# Get proper option names from bconds +%if %{with computed_gotos} +%global computed_gotos_flag yes +%else +%global computed_gotos_flag no +%endif + +%if %{with optimizations} +%global optimizations_flag "--enable-optimizations" +%else +%global optimizations_flag "--disable-optimizations" +%endif + +# Set common compiler/linker flags +# We utilize the %%extension_...flags macros here so users building C/C++ +# extensions with our python won't get all the compiler/linker flags used +# in Fedora RPMs. +# Standard library built here will still use the %%build_...flags, +# Fedora packages utilizing %%py3_build will use them as well +# https://fedoraproject.org/wiki/Changes/Python_Extension_Flags +export CFLAGS="%{extension_cflags} -D_GNU_SOURCE -fPIC -fwrapv" +export CFLAGS_NODIST="%{build_cflags} -D_GNU_SOURCE -fPIC -fwrapv" +export CXXFLAGS="%{extension_cxxflags} -D_GNU_SOURCE -fPIC -fwrapv" +export CPPFLAGS="$(pkg-config --cflags-only-I libffi)" +export OPT="%{extension_cflags} -D_GNU_SOURCE -fPIC -fwrapv" +export LINKCC="gcc" +export CFLAGS="$CFLAGS $(pkg-config --cflags openssl)" +export LDFLAGS="%{extension_ldflags} -g $(pkg-config --libs-only-L openssl)" +export LDFLAGS_NODIST="%{build_ldflags} -g $(pkg-config --libs-only-L openssl)" + +# We can build several different configurations of Python: regular and debug. +# Define a common function that does one build: +BuildPython() { + ConfName=$1 + ExtraConfigArgs=$2 + MoreCFlags=$3 + + # Each build is done in its own directory + ConfDir=build/$ConfName + echo STARTING: BUILD OF PYTHON FOR CONFIGURATION: $ConfName + mkdir -p $ConfDir + pushd $ConfDir + + # Normally, %%configure looks for the "configure" script in the current + # directory. + # Since we changed directories, we need to tell %%configure where to look. + %global _configure $topdir/configure + + # A workaround for https://bugs.python.org/issue39761 + export DFLAGS=" " + +%configure \ + --with-platlibdir=%{_lib} \ + --enable-ipv6 \ + --enable-shared \ + --with-computed-gotos=%{computed_gotos_flag} \ + --with-dbmliborder=gdbm:ndbm:bdb \ + --with-system-expat \ + --with-system-ffi \ + --with-system-libmpdec \ + --enable-loadable-sqlite-extensions \ + --with-dtrace \ + --with-lto \ + --with-ssl-default-suites=openssl \ + --with-builtin-hashlib-hashes=blake2 \ + --without-static-libpython \ +%if %{with rpmwheels} + --with-wheel-pkg-dir=%{python_wheel_dir} \ +%endif +%if %{with valgrind} + --with-valgrind \ +%endif + $ExtraConfigArgs \ + %{nil} + +%global flags_override EXTRA_CFLAGS="$MoreCFlags" CFLAGS_NODIST="$CFLAGS_NODIST $MoreCFlags" + +%if %{without bootstrap} + # Regenerate generated files (needs python3) + %make_build %{flags_override} regen-all PYTHON_FOR_REGEN="python%{pybasever}" +%endif + + # Invoke the build + %make_build %{flags_override} + + popd + echo FINISHED: BUILD OF PYTHON FOR CONFIGURATION: $ConfName +} + +# Call the above to build each configuration. + +%if %{with debug_build} +# The debug build is compiled with the lowest level of optimizations as to not optimize +# out frames. We also suppress the warnings as the default distro value of the FORTIFY_SOURCE +# option produces too many warnings when compiling at the O0 optimization level. +# See also: https://bugzilla.redhat.com/show_bug.cgi?id=1818857 +BuildPython debug \ + "--without-ensurepip --with-pydebug" \ + "-O0 -Wno-cpp" +%endif # with debug_build + +BuildPython optimized \ + "--without-ensurepip %{optimizations_flag}" \ + "" + +# ====================================================== +# Installing the built code: +# ====================================================== + +%install + +# As in %%build, remember the current directory +topdir=$(pwd) + +# We install a collection of hooks for gdb that make it easier to debug +# executables linked against libpython3* (such as /usr/bin/python3 itself) +# +# These hooks are implemented in Python itself (though they are for the version +# of python that gdb is linked with) +# +# gdb-archer looks for them in the same path as the ELF file or its .debug +# file, with a -gdb.py suffix. +# We put them next to the debug file, because ldconfig would complain if +# it found non-library files directly in /usr/lib/ +# (see https://bugzilla.redhat.com/show_bug.cgi?id=562980) +# +# We'll put these files in the debuginfo package by installing them to e.g.: +# /usr/lib/debug/usr/lib/libpython3.2.so.1.0.debug-gdb.py +# (note that the debug path is /usr/lib/debug for both 32/64 bit) +# +# See https://fedoraproject.org/wiki/Features/EasierPythonDebugging for more +# information + +%if %{with gdb_hooks} +DirHoldingGdbPy=%{_usr}/lib/debug/%{_libdir} +mkdir -p %{buildroot}$DirHoldingGdbPy +%endif # with gdb_hooks + +# Multilib support for pyconfig.h +# 32- and 64-bit versions of pyconfig.h are different. For multilib support +# (making it possible to install 32- and 64-bit versions simultaneously), +# we need to install them under different filenames, and to make the common +# "pyconfig.h" include the right file based on architecture. +# See https://bugzilla.redhat.com/show_bug.cgi?id=192747 +# Filanames are defined here: +%global _pyconfig32_h pyconfig-32.h +%global _pyconfig64_h pyconfig-64.h +%global _pyconfig_h pyconfig-%{__isa_bits}.h + +# Use a common function to do an install for all our configurations: +InstallPython() { + + ConfName=$1 + PyInstSoName=$2 + MoreCFlags=$3 + LDVersion=$4 + + # Switch to the directory with this configuration's built files + ConfDir=build/$ConfName + echo STARTING: INSTALL OF PYTHON FOR CONFIGURATION: $ConfName + mkdir -p $ConfDir + pushd $ConfDir + + %make_install EXTRA_CFLAGS="$MoreCFlags" + + popd + +%if %{with gdb_hooks} + # See comment on $DirHoldingGdbPy above + PathOfGdbPy=$DirHoldingGdbPy/$PyInstSoName-%{version}-%{release}.%{_arch}.debug-gdb.py + cp Tools/gdb/libpython.py %{buildroot}$PathOfGdbPy +%endif # with gdb_hooks + + # Rename the -devel script that differs on different arches to arch specific name + mv %{buildroot}%{_bindir}/python${LDVersion}-{,`uname -m`-}config + echo -e '#!/bin/sh\nexec %{_bindir}/python'${LDVersion}'-`uname -m`-config "$@"' > \ + %{buildroot}%{_bindir}/python${LDVersion}-config + chmod +x %{buildroot}%{_bindir}/python${LDVersion}-config + + # Make python3-devel multilib-ready + mv %{buildroot}%{_includedir}/python${LDVersion}/pyconfig.h \ + %{buildroot}%{_includedir}/python${LDVersion}/%{_pyconfig_h} + cat > %{buildroot}%{_includedir}/python${LDVersion}/pyconfig.h << EOF +#include + +#if __WORDSIZE == 32 +#include "%{_pyconfig32_h}" +#elif __WORDSIZE == 64 +#include "%{_pyconfig64_h}" +#else +#error "Unknown word size" +#endif +EOF + + echo FINISHED: INSTALL OF PYTHON FOR CONFIGURATION: $ConfName +} + +# Install the "debug" build first; any common files will be overridden with +# later builds +%if %{with debug_build} +InstallPython debug \ + %{py_INSTSONAME_debug} \ + -O0 \ + %{LDVERSION_debug} +%endif # with debug_build + +# Now the optimized build: +InstallPython optimized \ + %{py_INSTSONAME_optimized} \ + "" \ + %{LDVERSION_optimized} + +# Install directories for additional packages +install -d -m 0755 %{buildroot}%{pylibdir}/site-packages/__pycache__ +%if "%{_lib}" == "lib64" +# The 64-bit version needs to create "site-packages" in /usr/lib/ (for +# pure-Python modules) as well as in /usr/lib64/ (for packages with extension +# modules). +# Note that rpmlint will complain about hardcoded library path; +# this is intentional. +install -d -m 0755 %{buildroot}%{_prefix}/lib/python%{pybasever}/site-packages/__pycache__ +%endif + +%if %{with main_python} +# add idle3 to menu +install -D -m 0644 Lib/idlelib/Icons/idle_16.png %{buildroot}%{_datadir}/icons/hicolor/16x16/apps/idle3.png +install -D -m 0644 Lib/idlelib/Icons/idle_32.png %{buildroot}%{_datadir}/icons/hicolor/32x32/apps/idle3.png +install -D -m 0644 Lib/idlelib/Icons/idle_48.png %{buildroot}%{_datadir}/icons/hicolor/48x48/apps/idle3.png +install -D -m 0644 Lib/idlelib/Icons/idle_256.png %{buildroot}%{_datadir}/icons/hicolor/256x256/apps/idle3.png +desktop-file-install --dir=%{buildroot}%{_datadir}/applications %{SOURCE10} + +# Install and validate appdata file +mkdir -p %{buildroot}%{_metainfodir} +cp -a %{SOURCE11} %{buildroot}%{_metainfodir} +appstream-util validate-relax --nonet %{buildroot}%{_metainfodir}/idle3.appdata.xml +%endif + +# Make sure distutils looks at the right pyconfig.h file +# See https://bugzilla.redhat.com/show_bug.cgi?id=201434 +# Similar for sysconfig: sysconfig.get_config_h_filename tries to locate +# pyconfig.h so it can be parsed, and needs to do this at runtime in site.py +# when python starts up (see https://bugzilla.redhat.com/show_bug.cgi?id=653058) +# +# Split this out so it goes directly to the pyconfig-32.h/pyconfig-64.h +# variants: +sed -i -e "s/'pyconfig.h'/'%{_pyconfig_h}'/" \ + %{buildroot}%{pylibdir}/distutils/sysconfig.py \ + %{buildroot}%{pylibdir}/sysconfig.py + +# Install pathfix.py to bindir +# See https://github.com/fedora-python/python-rpm-porting/issues/24 +cp -p Tools/scripts/pathfix.py %{buildroot}%{_bindir}/pathfix%{pybasever}.py + +# Install i18n tools to bindir +# They are also in python2, so we version them +# https://bugzilla.redhat.com/show_bug.cgi?id=1571474 +for tool in pygettext msgfmt; do + cp -p Tools/i18n/${tool}.py %{buildroot}%{_bindir}/${tool}%{pybasever}.py + ln -s ${tool}%{pybasever}.py %{buildroot}%{_bindir}/${tool}3.py +done + +# Install missing test data +# Fixed upstream: https://github.com/python/cpython/pull/112784 +cp -rp Lib/test/regrtestdata/ %{buildroot}%{pylibdir}/test/ + +# Switch all shebangs to refer to the specific Python version. +# This currently only covers files matching ^[a-zA-Z0-9_]+\.py$, +# so handle files named using other naming scheme separately. +LD_LIBRARY_PATH=./build/optimized ./build/optimized/python \ + Tools/scripts/pathfix.py \ + -i "%{_bindir}/python%{pybasever}" -pn \ + %{buildroot} \ + %{buildroot}%{_bindir}/*%{pybasever}.py \ + %{?with_gdb_hooks:%{buildroot}$DirHoldingGdbPy/*.py} + +# Remove shebang lines from .py files that aren't executable, and +# remove executability from .py files that don't have a shebang line: +find %{buildroot} -name \*.py \ + \( \( \! -perm /u+x,g+x,o+x -exec sed -e '/^#!/Q 0' -e 'Q 1' {} \; \ + -print -exec sed -i '1d' {} \; \) -o \( \ + -perm /u+x,g+x,o+x ! -exec grep -m 1 -q '^#!' {} \; \ + -exec chmod a-x {} \; \) \) + +# Get rid of DOS batch files: +find %{buildroot} -name \*.bat -exec rm {} \; + +# Get rid of backup files: +find %{buildroot}/ -name "*~" -exec rm -f {} \; +find . -name "*~" -exec rm -f {} \; + +# Do bytecompilation with the newly installed interpreter. +# This is similar to the script in macros.pybytecompile +# compile *.pyc +# Python CMD line options: +# -s - don't add user site directory to sys.path +# -B - don't write .pyc files on import +# compileall CMD line options: +# -f - force rebuild even if timestamps are up to date +# -o - optimization levels to run compilation with +# -s - part of path to left-strip from path to source file (buildroot) +# -p - path to add as prefix to path to source file (/ to make it absolute) +# --hardlink-dupes - hardlink different optimization level pycs together if identical (saves space) +LD_LIBRARY_PATH="%{buildroot}%{dynload_dir}/:%{buildroot}%{_libdir}" \ +%{buildroot}%{_bindir}/python%{pybasever} -s -B -m compileall \ +-f %{_smp_mflags} -o 0 -o 1 -o 2 -s %{buildroot} -p / %{buildroot} --hardlink-dupes || : + +# Turn this BRP off, it is done by compileall2 --hardlink-dupes above +%global __brp_python_hardlink %{nil} + +# Since we have pathfix.py in bindir, this is created, but we don't want it +rm -rf %{buildroot}%{_bindir}/__pycache__ + +# Fixup permissions for shared libraries from non-standard 555 to standard 755: +find %{buildroot} -perm 555 -exec chmod 755 {} \; + +# Create "/usr/bin/python3-debug", a symlink to the python3 debug binary, to +# avoid the user having to know the precise version and ABI flags. +# See e.g. https://bugzilla.redhat.com/show_bug.cgi?id=676748 +%if %{with debug_build} && %{with main_python} +ln -s \ + %{_bindir}/python%{LDVERSION_debug} \ + %{buildroot}%{_bindir}/python3-debug +%endif + +%if %{without main_python} +# Remove stuff that would conflict with python3 package +rm %{buildroot}%{_bindir}/python3 +rm %{buildroot}%{_bindir}/pydoc3 +rm %{buildroot}%{_bindir}/pygettext3.py +rm %{buildroot}%{_bindir}/msgfmt3.py +rm %{buildroot}%{_bindir}/idle3 +rm %{buildroot}%{_bindir}/python3-* +rm %{buildroot}%{_bindir}/2to3 +rm %{buildroot}%{_libdir}/libpython3.so +rm %{buildroot}%{_mandir}/man1/python3.1 +rm %{buildroot}%{_libdir}/pkgconfig/python3.pc +rm %{buildroot}%{_libdir}/pkgconfig/python3-embed.pc +%else +# Link the unversioned stuff +# https://fedoraproject.org/wiki/Changes/Python_means_Python3 +ln -s ./python3 %{buildroot}%{_bindir}/python +ln -s ./pydoc3 %{buildroot}%{_bindir}/pydoc +ln -s ./pygettext3.py %{buildroot}%{_bindir}/pygettext.py +ln -s ./msgfmt3.py %{buildroot}%{_bindir}/msgfmt.py +ln -s ./idle3 %{buildroot}%{_bindir}/idle +ln -s ./python3-config %{buildroot}%{_bindir}/python-config +ln -s ./python3.1 %{buildroot}%{_mandir}/man1/python.1 +ln -s ./python3.pc %{buildroot}%{_libdir}/pkgconfig/python.pc +ln -s ./pathfix%{pybasever}.py %{buildroot}%{_bindir}/pathfix.py +%if %{with debug_build} +ln -s ./python3-debug %{buildroot}%{_bindir}/python-debug +%endif +%endif + +# Remove large, autogenerated sources and keep only the non-optimized pycache +for file in %{buildroot}%{pylibdir}/pydoc_data/topics.py $(grep --include='*.py' -lr %{buildroot}%{pylibdir}/encodings -e 'Python Character Mapping Codec .* from .* with gencodec.py'); do + directory=$(dirname ${file}) + module=$(basename ${file%%.py}) + mv ${directory}/{__pycache__/${module}.cpython-%{pyshortver}.pyc,${module}.pyc} + rm ${directory}/{__pycache__/${module}.cpython-%{pyshortver}.opt-?.pyc,${module}.py} +done + +# Python RPM macros +mkdir -p %{buildroot}%{rpmmacrodir}/ +install -m 644 %{SOURCE3} \ + %{buildroot}/%{rpmmacrodir}/ + +# Add a script that is being used by python3.11-rpm-macros +mkdir -p %{buildroot}%{_rpmconfigdir}/redhat +install -m 644 %{SOURCE4} %{buildroot}%{_rpmconfigdir}/redhat/ + +# All ghost files controlled by alternatives need to exist for the files +# section check to succeed +# - Don't list /usr/bin/python as a ghost file so `yum install /usr/bin/python` +# doesn't install this package +touch %{buildroot}%{_bindir}/unversioned-python +touch %{buildroot}%{_mandir}/man1/python.1.gz +touch %{buildroot}%{_bindir}/python3 +touch %{buildroot}%{_mandir}/man1/python3.1.gz +touch %{buildroot}%{_bindir}/pydoc3 +touch %{buildroot}%{_bindir}/pydoc-3 +touch %{buildroot}%{_bindir}/idle3 +touch %{buildroot}%{_bindir}/python3-config + +# ====================================================== +# Checks for packaging issues +# ====================================================== + +%check + +# first of all, check timestamps of bytecode files +find %{buildroot} -type f -a -name "*.py" -print0 | \ + LD_LIBRARY_PATH="%{buildroot}%{dynload_dir}/:%{buildroot}%{_libdir}" \ + PYTHONPATH="%{buildroot}%{_libdir}/python%{pybasever} %{buildroot}%{_libdir}/python%{pybasever}/site-packages" \ + xargs -0 %{buildroot}%{_bindir}/python%{pybasever} %{SOURCE8} + +# Ensure that the curses module was linked against libncursesw.so, rather than +# libncurses.so +# See https://bugzilla.redhat.com/show_bug.cgi?id=539917 +ldd %{buildroot}/%{dynload_dir}/_curses*.so \ + | grep curses \ + | grep libncurses.so && (echo "_curses.so linked against libncurses.so" ; exit 1) + +# Ensure that the debug modules are linked against the debug libpython, and +# likewise for the optimized modules and libpython: +for Module in %{buildroot}/%{dynload_dir}/*.so ; do + case $Module in + *.%{SOABI_debug}) + ldd $Module | grep %{py_INSTSONAME_optimized} && + (echo Debug module $Module linked against optimized %{py_INSTSONAME_optimized} ; exit 1) + + ;; + *.%{SOABI_optimized}) + ldd $Module | grep %{py_INSTSONAME_debug} && + (echo Optimized module $Module linked against debug %{py_INSTSONAME_debug} ; exit 1) + ;; + esac +done + + +# ====================================================== +# Running the upstream test suite +# ====================================================== + +topdir=$(pwd) +CheckPython() { + ConfName=$1 + ConfDir=$(pwd)/build/$ConfName + + echo STARTING: CHECKING OF PYTHON FOR CONFIGURATION: $ConfName + + # Note that we're running the tests using the version of the code in the + # builddir, not in the buildroot. + + # Show some info, helpful for debugging test failures + LD_LIBRARY_PATH=$ConfDir $ConfDir/python -m test.pythoninfo + + # Run the upstream test suite + # --timeout=1800: kill test running for longer than 30 minutes + # test_distutils + # distutils.tests.test_bdist_rpm tests fail when bootstraping the Python + # package: rpmbuild requires /usr/bin/pythonX.Y to be installed + # test_freeze_simple_script is skipped, because it fails when bundled wheels + # are removed in Fedora. + # upstream report: https://bugs.python.org/issue45783 + # test_check_probes is failing since it was introduced in 3.11.5, + # the test is skipped until it is fixed in upstream. + # see: https://github.com/python/cpython/issues/104280#issuecomment-1669249980 + + LD_LIBRARY_PATH=$ConfDir $ConfDir/python -m test.regrtest \ + -wW --slowest -j0 --timeout=1800 \ + -i test_freeze_simple_script \ + -i test_check_probes \ + %if %{with bootstrap} + -x test_distutils \ + %endif + %ifarch %{mips64} + -x test_ctypes \ + %endif + + echo FINISHED: CHECKING OF PYTHON FOR CONFIGURATION: $ConfName + +} + +%if %{with tests} + +# Check each of the configurations: +%if %{with debug_build} +CheckPython debug +%endif # with debug_build +CheckPython optimized + +%endif # with tests + + +# ====================================================== +# Scriptlets for alternatives on rhel8 +# ====================================================== +%post +# Alternative for /usr/bin/python -> /usr/bin/python3 + man page +alternatives --install %{_bindir}/unversioned-python \ + python \ + %{_bindir}/python3 \ + 300 \ + --slave %{_bindir}/python \ + unversioned-python \ + %{_bindir}/python3 \ + --slave %{_mandir}/man1/python.1.gz \ + unversioned-python-man \ + %{_mandir}/man1/python3.1.gz + +# Alternative for /usr/bin/python -> /usr/bin/python3.11 + man page +alternatives --install %{_bindir}/unversioned-python \ + python \ + %{_bindir}/python3.11 \ + 211 \ + --slave %{_bindir}/python \ + unversioned-python \ + %{_bindir}/python3.11 \ + --slave %{_mandir}/man1/python.1.gz \ + unversioned-python-man \ + %{_mandir}/man1/python3.11.1.gz + +# Alternative for /usr/bin/python3 -> /usr/bin/python3.11 + related files +# Create only if it doesn't exist already +EXISTS=`alternatives --display python3 | \ + grep -c "^/usr/bin/python3.11 - priority [0-9]*"` + +if [ $EXISTS -eq 0 ]; then + alternatives --install %{_bindir}/python3 \ + python3 \ + %{_bindir}/python3.11 \ + 31100 \ + --slave %{_mandir}/man1/python3.1.gz \ + python3-man \ + %{_mandir}/man1/python3.11.1.gz \ + --slave %{_bindir}/pydoc3 \ + pydoc3 \ + %{_bindir}/pydoc3.11 \ + --slave %{_bindir}/pydoc-3 \ + pydoc-3 \ + %{_bindir}/pydoc3.11 +fi + +%postun +# Do this only during uninstall process (not during update) +if [ $1 -eq 0 ]; then + alternatives --keep-foreign --remove python \ + %{_bindir}/python3.11 + + alternatives --keep-foreign --remove python3 \ + %{_bindir}/python3.11 + + # Remove link python → python3 if no other python3.* exists + if ! alternatives --display python3 > /dev/null; then + alternatives --keep-foreign --remove python \ + %{_bindir}/python3 + fi +fi + + +%post devel +alternatives --add-slave python3 %{_bindir}/python3.11 \ + %{_bindir}/python3-config \ + python3-config \ + %{_bindir}/python3.11-config + +%postun devel +# Do this only during uninstall process (not during update) +if [ $1 -eq 0 ]; then + alternatives --keep-foreign --remove-slave python3 %{_bindir}/python3.11 \ + python3-config +fi + +%post idle +alternatives --add-slave python3 %{_bindir}/python3.11 \ + %{_bindir}/idle3 \ + idle3 \ + %{_bindir}/idle3.11 + +%postun idle +# Do this only during uninstall process (not during update) +if [ $1 -eq 0 ]; then + alternatives --keep-foreign --remove-slave python3 %{_bindir}/python3.11 \ + idle3 +fi + +# ====================================================== +# Files for each RPM (sub)package +# ====================================================== + +%files -n %{pkgname}-rpm-macros +%{rpmmacrodir}/macros.python%{pybasever} +%{_rpmconfigdir}/redhat/import_all_modules_py3_11.py + +%files -n %{pkgname} +%doc README.rst + +# Alternatives +%ghost %{_bindir}/unversioned-python +%ghost %{_mandir}/man1/python.1.gz +%ghost %{_bindir}/python3 +%ghost %{_mandir}/man1/python3.1.gz +%ghost %{_bindir}/pydoc3 +%ghost %{_bindir}/pydoc-3 + +%if %{with main_python} +%{_bindir}/pydoc* +%{_bindir}/python3 +%else +%{_bindir}/pydoc%{pybasever} +%endif + +%{_bindir}/python%{pybasever} +%{_bindir}/python%{LDVERSION_optimized} +%{_mandir}/*/*3* + + +%if %{with main_python} +%files -n python-unversioned-command +%{_bindir}/python +%{_mandir}/*/python.1* +%endif + +%files -n %{pkgname}-libs +%doc README.rst + +%dir %{pylibdir} +%dir %{dynload_dir} + +%license %{pylibdir}/LICENSE.txt + +%{pylibdir}/lib2to3 +%exclude %{pylibdir}/lib2to3/tests + +%dir %{pylibdir}/unittest/ +%dir %{pylibdir}/unittest/__pycache__/ +%{pylibdir}/unittest/*.py +%{pylibdir}/unittest/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/asyncio/ +%dir %{pylibdir}/asyncio/__pycache__/ +%{pylibdir}/asyncio/*.py +%{pylibdir}/asyncio/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/venv/ +%dir %{pylibdir}/venv/__pycache__/ +%{pylibdir}/venv/*.py +%{pylibdir}/venv/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/venv/scripts + +%{pylibdir}/wsgiref +%{pylibdir}/xmlrpc + +%dir %{pylibdir}/ensurepip/ +%dir %{pylibdir}/ensurepip/__pycache__/ +%{pylibdir}/ensurepip/*.py +%{pylibdir}/ensurepip/__pycache__/*%{bytecode_suffixes} + +%if %{with rpmwheels} +%exclude %{pylibdir}/ensurepip/_bundled +%else +%dir %{pylibdir}/ensurepip/_bundled +%{pylibdir}/ensurepip/_bundled/pip-%{pip_version}-py3-none-any.whl +%{pylibdir}/ensurepip/_bundled/setuptools-%{setuptools_version}-py3-none-any.whl +%endif + +%dir %{pylibdir}/concurrent/ +%dir %{pylibdir}/concurrent/__pycache__/ +%{pylibdir}/concurrent/*.py +%{pylibdir}/concurrent/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/concurrent/futures/ +%dir %{pylibdir}/concurrent/futures/__pycache__/ +%{pylibdir}/concurrent/futures/*.py +%{pylibdir}/concurrent/futures/__pycache__/*%{bytecode_suffixes} + +%{pylibdir}/pydoc_data + +%{dynload_dir}/_blake2.%{SOABI_optimized}.so + +%{dynload_dir}/_asyncio.%{SOABI_optimized}.so +%{dynload_dir}/_bisect.%{SOABI_optimized}.so +%{dynload_dir}/_bz2.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_cn.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_hk.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_iso2022.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_jp.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_kr.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_tw.%{SOABI_optimized}.so +%{dynload_dir}/_contextvars.%{SOABI_optimized}.so +%{dynload_dir}/_crypt.%{SOABI_optimized}.so +%{dynload_dir}/_csv.%{SOABI_optimized}.so +%{dynload_dir}/_ctypes.%{SOABI_optimized}.so +%{dynload_dir}/_curses.%{SOABI_optimized}.so +%{dynload_dir}/_curses_panel.%{SOABI_optimized}.so +%{dynload_dir}/_dbm.%{SOABI_optimized}.so +%{dynload_dir}/_decimal.%{SOABI_optimized}.so +%{dynload_dir}/_elementtree.%{SOABI_optimized}.so +%if %{with gdbm} +%{dynload_dir}/_gdbm.%{SOABI_optimized}.so +%endif +%{dynload_dir}/_hashlib.%{SOABI_optimized}.so +%{dynload_dir}/_heapq.%{SOABI_optimized}.so +%{dynload_dir}/_json.%{SOABI_optimized}.so +%{dynload_dir}/_lsprof.%{SOABI_optimized}.so +%{dynload_dir}/_lzma.%{SOABI_optimized}.so +%{dynload_dir}/_multibytecodec.%{SOABI_optimized}.so +%{dynload_dir}/_multiprocessing.%{SOABI_optimized}.so +%{dynload_dir}/_opcode.%{SOABI_optimized}.so +%{dynload_dir}/_pickle.%{SOABI_optimized}.so +%{dynload_dir}/_posixsubprocess.%{SOABI_optimized}.so +%{dynload_dir}/_queue.%{SOABI_optimized}.so +%{dynload_dir}/_random.%{SOABI_optimized}.so +%{dynload_dir}/_socket.%{SOABI_optimized}.so +%{dynload_dir}/_sqlite3.%{SOABI_optimized}.so +%{dynload_dir}/_ssl.%{SOABI_optimized}.so +%{dynload_dir}/_statistics.%{SOABI_optimized}.so +%{dynload_dir}/_struct.%{SOABI_optimized}.so +%{dynload_dir}/_typing.%{SOABI_optimized}.so +%{dynload_dir}/array.%{SOABI_optimized}.so +%{dynload_dir}/audioop.%{SOABI_optimized}.so +%{dynload_dir}/binascii.%{SOABI_optimized}.so +%{dynload_dir}/cmath.%{SOABI_optimized}.so +%{dynload_dir}/_datetime.%{SOABI_optimized}.so +%{dynload_dir}/fcntl.%{SOABI_optimized}.so +%{dynload_dir}/grp.%{SOABI_optimized}.so +%{dynload_dir}/math.%{SOABI_optimized}.so +%{dynload_dir}/mmap.%{SOABI_optimized}.so +%{dynload_dir}/nis.%{SOABI_optimized}.so +%{dynload_dir}/ossaudiodev.%{SOABI_optimized}.so +%{dynload_dir}/_posixshmem.%{SOABI_optimized}.so +%{dynload_dir}/pyexpat.%{SOABI_optimized}.so +%{dynload_dir}/readline.%{SOABI_optimized}.so +%{dynload_dir}/resource.%{SOABI_optimized}.so +%{dynload_dir}/select.%{SOABI_optimized}.so +%{dynload_dir}/spwd.%{SOABI_optimized}.so +%{dynload_dir}/syslog.%{SOABI_optimized}.so +%{dynload_dir}/termios.%{SOABI_optimized}.so +%{dynload_dir}/unicodedata.%{SOABI_optimized}.so +%{dynload_dir}/_uuid.%{SOABI_optimized}.so +%{dynload_dir}/xxlimited.%{SOABI_optimized}.so +%{dynload_dir}/xxlimited_35.%{SOABI_optimized}.so +%{dynload_dir}/_xxsubinterpreters.%{SOABI_optimized}.so +%{dynload_dir}/zlib.%{SOABI_optimized}.so +%{dynload_dir}/_zoneinfo.%{SOABI_optimized}.so + +%dir %{pylibdir}/site-packages/ +%dir %{pylibdir}/site-packages/__pycache__/ +%{pylibdir}/site-packages/README.txt + +%if %{with debug_build} +%exclude %{pylibdir}/_sysconfigdata_%{ABIFLAGS_debug}_linux_%{platform_triplet}.py +%exclude %{pylibdir}/__pycache__/_sysconfigdata_%{ABIFLAGS_debug}_linux_%{platform_triplet}%{bytecode_suffixes} +%endif + +%{pylibdir}/*.py +%dir %{pylibdir}/__pycache__/ +%{pylibdir}/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/collections/ +%dir %{pylibdir}/collections/__pycache__/ +%{pylibdir}/collections/*.py +%{pylibdir}/collections/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/ctypes/ +%dir %{pylibdir}/ctypes/__pycache__/ +%{pylibdir}/ctypes/*.py +%{pylibdir}/ctypes/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/ctypes/macholib + +%{pylibdir}/curses + +%dir %{pylibdir}/dbm/ +%dir %{pylibdir}/dbm/__pycache__/ +%{pylibdir}/dbm/*.py +%{pylibdir}/dbm/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/distutils/ +%dir %{pylibdir}/distutils/__pycache__/ +%{pylibdir}/distutils/*.py +%{pylibdir}/distutils/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/distutils/README +%{pylibdir}/distutils/command + +%dir %{pylibdir}/email/ +%dir %{pylibdir}/email/__pycache__/ +%{pylibdir}/email/*.py +%{pylibdir}/email/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/email/mime +%doc %{pylibdir}/email/architecture.rst + +%{pylibdir}/encodings + +%{pylibdir}/html +%{pylibdir}/http + +%dir %{pylibdir}/importlib/ +%dir %{pylibdir}/importlib/__pycache__/ +%{pylibdir}/importlib/*.py +%{pylibdir}/importlib/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/importlib/metadata/ +%dir %{pylibdir}/importlib/metadata/__pycache__/ +%{pylibdir}/importlib/metadata/*.py +%{pylibdir}/importlib/metadata/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/importlib/resources/ +%dir %{pylibdir}/importlib/resources/__pycache__/ +%{pylibdir}/importlib/resources/*.py +%{pylibdir}/importlib/resources/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/json/ +%dir %{pylibdir}/json/__pycache__/ +%{pylibdir}/json/*.py +%{pylibdir}/json/__pycache__/*%{bytecode_suffixes} + +%{pylibdir}/logging +%{pylibdir}/multiprocessing + +%dir %{pylibdir}/re/ +%{pylibdir}/re/*.py +%{pylibdir}/re/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/sqlite3/ +%dir %{pylibdir}/sqlite3/__pycache__/ +%{pylibdir}/sqlite3/*.py +%{pylibdir}/sqlite3/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/tomllib/ +%{pylibdir}/tomllib/*.py +%{pylibdir}/tomllib/__pycache__/*%{bytecode_suffixes} +%exclude %{pylibdir}/turtle.py +%exclude %{pylibdir}/__pycache__/turtle*%{bytecode_suffixes} + +%{pylibdir}/urllib +%{pylibdir}/xml +%{pylibdir}/zoneinfo + +%dir %{pylibdir}/__phello__ +%{pylibdir}/__phello__/__init__.py +%{pylibdir}/__phello__/spam.py +%{pylibdir}/__phello__/__pycache__/*%{bytecode_suffixes} + +%if "%{_lib}" == "lib64" +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever} +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever}/site-packages +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever}/site-packages/__pycache__/ +%endif + +# "Makefile" and the config-32/64.h file are needed by +# distutils/sysconfig.py:_init_posix(), so we include them in the core +# package, along with their parent directories (bug 531901): +%dir %{pylibdir}/config-%{LDVERSION_optimized}-%{platform_triplet}/ +%{pylibdir}/config-%{LDVERSION_optimized}-%{platform_triplet}/Makefile +%dir %{_includedir}/python%{LDVERSION_optimized}/ +%{_includedir}/python%{LDVERSION_optimized}/%{_pyconfig_h} + +%{_libdir}/%{py_INSTSONAME_optimized} +%if %{with main_python} +%{_libdir}/libpython3.so +%endif + + +%files -n %{pkgname}-devel +%{pylibdir}/config-%{LDVERSION_optimized}-%{platform_triplet}/* +%exclude %{pylibdir}/config-%{LDVERSION_optimized}-%{platform_triplet}/Makefile +%exclude %{_includedir}/python%{LDVERSION_optimized}/%{_pyconfig_h} +%{_includedir}/python%{LDVERSION_optimized}/*.h +%{_includedir}/python%{LDVERSION_optimized}/internal/ +%{_includedir}/python%{LDVERSION_optimized}/cpython/ +%doc Misc/README.valgrind Misc/valgrind-python.supp Misc/gdbinit + +%if %{with main_python} +%{_bindir}/2to3 +%{_bindir}/python3-config +%{_bindir}/python-config +%{_libdir}/pkgconfig/python3.pc +%{_libdir}/pkgconfig/python.pc +%{_libdir}/pkgconfig/python3-embed.pc +%{_bindir}/pathfix.py +%{_bindir}/pygettext3.py +%{_bindir}/pygettext.py +%{_bindir}/msgfmt3.py +%{_bindir}/msgfmt.py +%endif + +%{_bindir}/2to3-%{pybasever} +%{_bindir}/pathfix%{pybasever}.py +%{_bindir}/pygettext%{pybasever}.py +%{_bindir}/msgfmt%{pybasever}.py + +%{_bindir}/python%{pybasever}-config +%{_bindir}/python%{LDVERSION_optimized}-config +%{_bindir}/python%{LDVERSION_optimized}-*-config +# Alternatives +%ghost %{_bindir}/python3-config + +%{_libdir}/libpython%{LDVERSION_optimized}.so +%{_libdir}/pkgconfig/python-%{LDVERSION_optimized}.pc +%{_libdir}/pkgconfig/python-%{LDVERSION_optimized}-embed.pc +%{_libdir}/pkgconfig/python-%{pybasever}.pc +%{_libdir}/pkgconfig/python-%{pybasever}-embed.pc + + +%files -n %{pkgname}-idle +%if %{with main_python} +%{_bindir}/idle* +%else +%{_bindir}/idle%{pybasever} +# Alternatives +%ghost %{_bindir}/idle3 +%endif + +%{pylibdir}/idlelib + +%if %{with main_python} +%{_metainfodir}/idle3.appdata.xml +%{_datadir}/applications/idle3.desktop +%{_datadir}/icons/hicolor/*/apps/idle3.* +%endif + +%files -n %{pkgname}-tkinter +%{pylibdir}/tkinter +%exclude %{pylibdir}/tkinter/test +%{dynload_dir}/_tkinter.%{SOABI_optimized}.so +%{pylibdir}/turtle.py +%{pylibdir}/__pycache__/turtle*%{bytecode_suffixes} +%dir %{pylibdir}/turtledemo +%{pylibdir}/turtledemo/*.py +%{pylibdir}/turtledemo/*.cfg +%dir %{pylibdir}/turtledemo/__pycache__/ +%{pylibdir}/turtledemo/__pycache__/*%{bytecode_suffixes} + + +%files -n %{pkgname}-test +%{pylibdir}/ctypes/test +%{pylibdir}/distutils/tests +%{pylibdir}/test +%{dynload_dir}/_ctypes_test.%{SOABI_optimized}.so +%{dynload_dir}/_testbuffer.%{SOABI_optimized}.so +%{dynload_dir}/_testcapi.%{SOABI_optimized}.so +%{dynload_dir}/_testclinic.%{SOABI_optimized}.so +%{dynload_dir}/_testimportmultiple.%{SOABI_optimized}.so +%{dynload_dir}/_testinternalcapi.%{SOABI_optimized}.so +%{dynload_dir}/_testmultiphase.%{SOABI_optimized}.so +%{dynload_dir}/_xxtestfuzz.%{SOABI_optimized}.so +%{pylibdir}/lib2to3/tests +%{pylibdir}/tkinter/test +%{pylibdir}/unittest/test + +# We don't bother splitting the debug build out into further subpackages: +# if you need it, you're probably a developer. + +# Hence the manifest is the combination of analogous files in the manifests of +# all of the other subpackages + +%if %{with debug_build} +%files -n %{pkgname}-debug +%if %{with main_python} +%{_bindir}/python3-debug +%{_bindir}/python-debug +%endif + +# Analog of the core subpackage's files: +%{_bindir}/python%{LDVERSION_debug} + +# Analog of the -libs subpackage's files: +# ...with debug builds of the built-in "extension" modules: + +%{dynload_dir}/_blake2.%{SOABI_debug}.so + +%{dynload_dir}/_asyncio.%{SOABI_debug}.so +%{dynload_dir}/_bisect.%{SOABI_debug}.so +%{dynload_dir}/_bz2.%{SOABI_debug}.so +%{dynload_dir}/_codecs_cn.%{SOABI_debug}.so +%{dynload_dir}/_codecs_hk.%{SOABI_debug}.so +%{dynload_dir}/_codecs_iso2022.%{SOABI_debug}.so +%{dynload_dir}/_codecs_jp.%{SOABI_debug}.so +%{dynload_dir}/_codecs_kr.%{SOABI_debug}.so +%{dynload_dir}/_codecs_tw.%{SOABI_debug}.so +%{dynload_dir}/_contextvars.%{SOABI_debug}.so +%{dynload_dir}/_crypt.%{SOABI_debug}.so +%{dynload_dir}/_csv.%{SOABI_debug}.so +%{dynload_dir}/_ctypes.%{SOABI_debug}.so +%{dynload_dir}/_curses.%{SOABI_debug}.so +%{dynload_dir}/_curses_panel.%{SOABI_debug}.so +%{dynload_dir}/_dbm.%{SOABI_debug}.so +%{dynload_dir}/_decimal.%{SOABI_debug}.so +%{dynload_dir}/_elementtree.%{SOABI_debug}.so +%if %{with gdbm} +%{dynload_dir}/_gdbm.%{SOABI_debug}.so +%endif +%{dynload_dir}/_hashlib.%{SOABI_debug}.so +%{dynload_dir}/_heapq.%{SOABI_debug}.so +%{dynload_dir}/_json.%{SOABI_debug}.so +%{dynload_dir}/_lsprof.%{SOABI_debug}.so +%{dynload_dir}/_lzma.%{SOABI_debug}.so +%{dynload_dir}/_multibytecodec.%{SOABI_debug}.so +%{dynload_dir}/_multiprocessing.%{SOABI_debug}.so +%{dynload_dir}/_opcode.%{SOABI_debug}.so +%{dynload_dir}/_pickle.%{SOABI_debug}.so +%{dynload_dir}/_posixsubprocess.%{SOABI_debug}.so +%{dynload_dir}/_queue.%{SOABI_debug}.so +%{dynload_dir}/_random.%{SOABI_debug}.so +%{dynload_dir}/_socket.%{SOABI_debug}.so +%{dynload_dir}/_sqlite3.%{SOABI_debug}.so +%{dynload_dir}/_ssl.%{SOABI_debug}.so +%{dynload_dir}/_statistics.%{SOABI_debug}.so +%{dynload_dir}/_struct.%{SOABI_debug}.so +%{dynload_dir}/_typing.%{SOABI_debug}.so +%{dynload_dir}/array.%{SOABI_debug}.so +%{dynload_dir}/audioop.%{SOABI_debug}.so +%{dynload_dir}/binascii.%{SOABI_debug}.so +%{dynload_dir}/cmath.%{SOABI_debug}.so +%{dynload_dir}/_datetime.%{SOABI_debug}.so +%{dynload_dir}/fcntl.%{SOABI_debug}.so +%{dynload_dir}/grp.%{SOABI_debug}.so +%{dynload_dir}/math.%{SOABI_debug}.so +%{dynload_dir}/mmap.%{SOABI_debug}.so +%{dynload_dir}/nis.%{SOABI_debug}.so +%{dynload_dir}/ossaudiodev.%{SOABI_debug}.so +%{dynload_dir}/_posixshmem.%{SOABI_debug}.so +%{dynload_dir}/pyexpat.%{SOABI_debug}.so +%{dynload_dir}/readline.%{SOABI_debug}.so +%{dynload_dir}/resource.%{SOABI_debug}.so +%{dynload_dir}/select.%{SOABI_debug}.so +%{dynload_dir}/spwd.%{SOABI_debug}.so +%{dynload_dir}/syslog.%{SOABI_debug}.so +%{dynload_dir}/termios.%{SOABI_debug}.so +%{dynload_dir}/unicodedata.%{SOABI_debug}.so +%{dynload_dir}/_uuid.%{SOABI_debug}.so +%{dynload_dir}/xxlimited.%{SOABI_debug}.so +%{dynload_dir}/xxlimited_35.%{SOABI_debug}.so +%{dynload_dir}/_xxsubinterpreters.%{SOABI_debug}.so +%{dynload_dir}/_xxtestfuzz.%{SOABI_debug}.so +%{dynload_dir}/zlib.%{SOABI_debug}.so +%{dynload_dir}/_zoneinfo.%{SOABI_debug}.so + +# No need to split things out the "Makefile" and the config-32/64.h file as we +# do for the regular build above (bug 531901), since they're all in one package +# now; they're listed below, under "-devel": + +%{_libdir}/%{py_INSTSONAME_debug} + +# Analog of the -devel subpackage's files: +%{pylibdir}/config-%{LDVERSION_debug}-%{platform_triplet} +%{_includedir}/python%{LDVERSION_debug} +%{_bindir}/python%{LDVERSION_debug}-config +%{_bindir}/python%{LDVERSION_debug}-*-config +%{_libdir}/libpython%{LDVERSION_debug}.so +%{_libdir}/libpython%{LDVERSION_debug}.so.%{py_SOVERSION} +%{_libdir}/pkgconfig/python-%{LDVERSION_debug}.pc +%{_libdir}/pkgconfig/python-%{LDVERSION_debug}-embed.pc + +# Analog of the -tools subpackage's files: +# None for now; we could build precanned versions that have the appropriate +# shebang if needed + +# Analog of the tkinter subpackage's files: +%{dynload_dir}/_tkinter.%{SOABI_debug}.so + +# Analog of the -test subpackage's files: +%{dynload_dir}/_ctypes_test.%{SOABI_debug}.so +%{dynload_dir}/_testbuffer.%{SOABI_debug}.so +%{dynload_dir}/_testcapi.%{SOABI_debug}.so +%{dynload_dir}/_testclinic.%{SOABI_debug}.so +%{dynload_dir}/_testimportmultiple.%{SOABI_debug}.so +%{dynload_dir}/_testinternalcapi.%{SOABI_debug}.so +%{dynload_dir}/_testmultiphase.%{SOABI_debug}.so + +%{pylibdir}/_sysconfigdata_%{ABIFLAGS_debug}_linux_%{platform_triplet}.py +%{pylibdir}/__pycache__/_sysconfigdata_%{ABIFLAGS_debug}_linux_%{platform_triplet}%{bytecode_suffixes} + +%endif # with debug_build + +# We put the debug-gdb.py file inside /usr/lib/debug to avoid noise from ldconfig +# See https://bugzilla.redhat.com/show_bug.cgi?id=562980 +# +# The /usr/lib/rpm/redhat/macros defines %%__debug_package to use +# debugfiles.list, and it appears that everything below /usr/lib/debug and +# (/usr/src/debug) gets added to this file (via LISTFILES) in +# /usr/lib/rpm/find-debuginfo.sh +# +# Hence by installing it below /usr/lib/debug we ensure it is added to the +# -debuginfo subpackage +# (if it doesn't, then the rpmbuild ought to fail since the debug-gdb.py +# payload file would be unpackaged) + +# Workaround for https://bugzilla.redhat.com/show_bug.cgi?id=1476593 +%undefine _debuginfo_subpackages + +# ====================================================== +# Finally, the changelog: +# ====================================================== + +%changelog +* Fri Mar 29 2024 MSVSphere Packaging Team - 3.11.7-1 +- Rebuilt for MSVSphere 8.10 beta + +* Mon Jan 22 2024 Charalampos Stratakis - 3.11.7-1 +- Rebase to 3.11.7 +Resolves: RHEL-21915 + +* Tue Jan 09 2024 Lumír Balhar - 3.11.5-2 +- Security fix for CVE-2023-27043 +Resolves: RHEL-7842 + +* Thu Sep 07 2023 Charalampos Stratakis - 3.11.5-1 +- Rebase to 3.11.5 +- Security fixes for CVE-2023-40217 and CVE-2023-41105 +Resolves: RHEL-3047, RHEL-3267 + +* Thu Aug 10 2023 Tomas Orsava - 3.11.4-4 +- Add the import_all_modules_py3_11.py file for the python3.11-rpm-macros subpackage +Resolves: rhbz#2207631 + +* Wed Aug 09 2023 Petr Viktorin - 3.11.4-3 +- Fix symlink handling in the fix for CVE-2023-24329 +Resolves: rhbz#263261 + +* Fri Jun 30 2023 Charalampos Stratakis - 3.11.4-2 +- Security fix for CVE-2007-4559 +Resolves: rhbz#263261 + +* Mon Jun 26 2023 Charalampos Stratakis - 3.11.4-1 +- Update to 3.11.4 +- Security fix for CVE-2023-24329 +Resolves: rhbz#2173917 + +* Thu Feb 16 2023 Charalampos Stratakis - 3.11.2-2 +- Support OpenSSL FIPS mode + +* Thu Feb 09 2023 Charalampos Stratakis - 3.11.2-1 +- Update to 3.11.2 + +* Tue Jan 31 2023 Charalampos Stratakis - 3.11.1-4 +- Disable the builtin hashlib hashes except blake2 + +* Mon Jan 30 2023 Charalampos Stratakis - 3.11.1-3 +- Disable bootstrap +- Revert python3.11-rpm-macros requirement + +* Mon Jan 30 2023 Charalampos Stratakis - 3.11.1-2 +- Fix macros requirements + +* Tue Dec 13 2022 Charalampos Stratakis - 3.11.1-1 +- Initial package +- Fedora contributions by: + Björn Esser + Charalampos Stratakis + Dan Horák + David Malcolm + Dennis Gilmore + Florian Weimer + Gwyn Ciesla + Igor Gnatenko + Iryna Shcherbina + Jaroslav Škarvada + Jason ティビツ + Kalev Lember + Karsten Hopp + Lumir Balhar + Marcel Plch + Matej Stuchlik + Michal Cyprian + Michal Toman + Miro Hrončok + Nicolas Chauvet + Orion Poplawski + Patrik Kopkan + Peter Robinson + Petr Šplíchal + Petr Viktorin + Rex Dieter + Richard W.M. Jones + Robert Kuska + Sahana Prasad + Slavek Kabrda + Stephen Gallagher + Than Ngo + Thomas Spura + Till Maas + Tomáš Hrnčiar + Tomas Mraz + Tomas Orsava + Tomas Radej + Toshio Kuratomi + Victor Stinner + Ville Skyttä + Zbigniew Jędrzejewski-Szmek