commit
e1070de5cf
@ -0,0 +1 @@
|
||||
SOURCES/Python-3.12.3.tar.xz
|
@ -0,0 +1 @@
|
||||
3df73004a9b224d021fd397724e8bd4f9b6cc824 SOURCES/Python-3.12.3.tar.xz
|
@ -0,0 +1,173 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= <miro@hroncok.cz>
|
||||
Date: Mon, 15 Feb 2021 12:19:27 +0100
|
||||
Subject: [PATCH] 00251: Change user install location
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
Set values of base and platbase in sysconfig from /usr
|
||||
to /usr/local when RPM build is not detected
|
||||
to make pip and similar tools install into separate location.
|
||||
|
||||
Fedora Change: https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
Downstream only.
|
||||
|
||||
We've tried to rework in Fedora 36/Python 3.10 to follow https://bugs.python.org/issue43976
|
||||
but we have identified serious problems with that approach,
|
||||
see https://bugzilla.redhat.com/2026979 or https://bugzilla.redhat.com/2097183
|
||||
|
||||
pypa/distutils integration: https://github.com/pypa/distutils/pull/70
|
||||
|
||||
Co-authored-by: Petr Viktorin <encukou@gmail.com>
|
||||
Co-authored-by: Miro Hrončok <miro@hroncok.cz>
|
||||
Co-authored-by: Michal Cyprian <m.cyprian@gmail.com>
|
||||
Co-authored-by: Lumír Balhar <frenzy.madness@gmail.com>
|
||||
---
|
||||
Lib/site.py | 9 ++++++-
|
||||
Lib/sysconfig.py | 49 +++++++++++++++++++++++++++++++++++++-
|
||||
Lib/test/test_sysconfig.py | 17 +++++++++++--
|
||||
3 files changed, 71 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/Lib/site.py b/Lib/site.py
|
||||
index 924b2460d9..51b5baca93 100644
|
||||
--- a/Lib/site.py
|
||||
+++ b/Lib/site.py
|
||||
@@ -387,8 +387,15 @@ def getsitepackages(prefixes=None):
|
||||
return sitepackages
|
||||
|
||||
def addsitepackages(known_paths, prefixes=None):
|
||||
- """Add site-packages to sys.path"""
|
||||
+ """Add site-packages to sys.path
|
||||
+
|
||||
+ '/usr/local' is included in PREFIXES if RPM build is not detected
|
||||
+ to make packages installed into this location visible.
|
||||
+
|
||||
+ """
|
||||
_trace("Processing global site-packages")
|
||||
+ if ENABLE_USER_SITE and 'RPM_BUILD_ROOT' not in os.environ:
|
||||
+ PREFIXES.insert(0, "/usr/local")
|
||||
for sitedir in getsitepackages(prefixes):
|
||||
if os.path.isdir(sitedir):
|
||||
addsitedir(sitedir, known_paths)
|
||||
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
|
||||
index 122d441bd1..2d354a11da 100644
|
||||
--- a/Lib/sysconfig.py
|
||||
+++ b/Lib/sysconfig.py
|
||||
@@ -104,6 +104,11 @@
|
||||
else:
|
||||
_INSTALL_SCHEMES['venv'] = _INSTALL_SCHEMES['posix_venv']
|
||||
|
||||
+# For a brief period of time in the Fedora 36 life cycle,
|
||||
+# this installation scheme existed and was documented in the release notes.
|
||||
+# For backwards compatibility, we keep it here (at least on 3.10 and 3.11).
|
||||
+_INSTALL_SCHEMES['rpm_prefix'] = _INSTALL_SCHEMES['posix_prefix']
|
||||
+
|
||||
|
||||
# NOTE: site.py has copy of this function.
|
||||
# Sync it when modify this function.
|
||||
@@ -163,6 +168,19 @@ def joinuser(*args):
|
||||
},
|
||||
}
|
||||
|
||||
+# This is used by distutils.command.install in the stdlib
|
||||
+# as well as pypa/distutils (e.g. bundled in setuptools).
|
||||
+# The self.prefix value is set to sys.prefix + /local/
|
||||
+# if neither RPM build nor virtual environment is
|
||||
+# detected to make distutils install packages
|
||||
+# into the separate location.
|
||||
+# https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
+if (not (hasattr(sys, 'real_prefix') or
|
||||
+ sys.prefix != sys.base_prefix) and
|
||||
+ 'RPM_BUILD_ROOT' not in os.environ):
|
||||
+ _prefix_addition = '/local'
|
||||
+
|
||||
+
|
||||
_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
|
||||
'scripts', 'data')
|
||||
|
||||
@@ -263,11 +281,40 @@ def _extend_dict(target_dict, other_dict):
|
||||
target_dict[key] = value
|
||||
|
||||
|
||||
+_CONFIG_VARS_LOCAL = None
|
||||
+
|
||||
+
|
||||
+def _config_vars_local():
|
||||
+ # This function returns the config vars with prefixes amended to /usr/local
|
||||
+ # https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
+ global _CONFIG_VARS_LOCAL
|
||||
+ if _CONFIG_VARS_LOCAL is None:
|
||||
+ _CONFIG_VARS_LOCAL = dict(get_config_vars())
|
||||
+ _CONFIG_VARS_LOCAL['base'] = '/usr/local'
|
||||
+ _CONFIG_VARS_LOCAL['platbase'] = '/usr/local'
|
||||
+ return _CONFIG_VARS_LOCAL
|
||||
+
|
||||
+
|
||||
def _expand_vars(scheme, vars):
|
||||
res = {}
|
||||
if vars is None:
|
||||
vars = {}
|
||||
- _extend_dict(vars, get_config_vars())
|
||||
+
|
||||
+ # when we are not in a virtual environment or an RPM build
|
||||
+ # we change '/usr' to '/usr/local'
|
||||
+ # to avoid surprises, we explicitly check for the /usr/ prefix
|
||||
+ # Python virtual environments have different prefixes
|
||||
+ # we only do this for posix_prefix, not to mangle the venv scheme
|
||||
+ # posix_prefix is used by sudo pip install
|
||||
+ # we only change the defaults here, so explicit --prefix will take precedence
|
||||
+ # https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
+ if (scheme == 'posix_prefix' and
|
||||
+ _PREFIX == '/usr' and
|
||||
+ 'RPM_BUILD_ROOT' not in os.environ):
|
||||
+ _extend_dict(vars, _config_vars_local())
|
||||
+ else:
|
||||
+ _extend_dict(vars, get_config_vars())
|
||||
+
|
||||
if os.name == 'nt':
|
||||
# On Windows we want to substitute 'lib' for schemes rather
|
||||
# than the native value (without modifying vars, in case it
|
||||
diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py
|
||||
index 1137c2032b..8fc2b84f52 100644
|
||||
--- a/Lib/test/test_sysconfig.py
|
||||
+++ b/Lib/test/test_sysconfig.py
|
||||
@@ -110,8 +110,19 @@ def test_get_path(self):
|
||||
for scheme in _INSTALL_SCHEMES:
|
||||
for name in _INSTALL_SCHEMES[scheme]:
|
||||
expected = _INSTALL_SCHEMES[scheme][name].format(**config_vars)
|
||||
+ tested = get_path(name, scheme)
|
||||
+ # https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
+ if tested.startswith('/usr/local'):
|
||||
+ # /usr/local should only be used in posix_prefix
|
||||
+ self.assertEqual(scheme, 'posix_prefix')
|
||||
+ # Fedora CI runs tests for venv and virtualenv that check for other prefixes
|
||||
+ self.assertEqual(sys.prefix, '/usr')
|
||||
+ # When building the RPM of Python, %check runs this with RPM_BUILD_ROOT set
|
||||
+ # Fedora CI runs this with RPM_BUILD_ROOT unset
|
||||
+ self.assertNotIn('RPM_BUILD_ROOT', os.environ)
|
||||
+ tested = tested.replace('/usr/local', '/usr')
|
||||
self.assertEqual(
|
||||
- os.path.normpath(get_path(name, scheme)),
|
||||
+ os.path.normpath(tested),
|
||||
os.path.normpath(expected),
|
||||
)
|
||||
|
||||
@@ -344,7 +355,7 @@ def test_get_config_h_filename(self):
|
||||
self.assertTrue(os.path.isfile(config_h), config_h)
|
||||
|
||||
def test_get_scheme_names(self):
|
||||
- wanted = ['nt', 'posix_home', 'posix_prefix', 'posix_venv', 'nt_venv', 'venv']
|
||||
+ wanted = ['nt', 'posix_home', 'posix_prefix', 'posix_venv', 'nt_venv', 'venv', 'rpm_prefix']
|
||||
if HAS_USER_BASE:
|
||||
wanted.extend(['nt_user', 'osx_framework_user', 'posix_user'])
|
||||
self.assertEqual(get_scheme_names(), tuple(sorted(wanted)))
|
||||
@@ -356,6 +367,8 @@ def test_symlink(self): # Issue 7880
|
||||
cmd = "-c", "import sysconfig; print(sysconfig.get_platform())"
|
||||
self.assertEqual(py.call_real(*cmd), py.call_link(*cmd))
|
||||
|
||||
+ @unittest.skipIf('RPM_BUILD_ROOT' not in os.environ,
|
||||
+ "Test doesn't expect Fedora's paths")
|
||||
def test_user_similar(self):
|
||||
# Issue #8759: make sure the posix scheme for the users
|
||||
# is similar to the global posix_prefix one
|
@ -0,0 +1,833 @@
|
||||
From 43ce74d971fad62db6ccd723fe6b01da9c7ff407 Mon Sep 17 00:00:00 2001
|
||||
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||
Date: Thu, 12 Dec 2019 16:58:31 +0100
|
||||
Subject: [PATCH 1/5] Expose blake2b and blake2s hashes from OpenSSL
|
||||
|
||||
These aren't as powerful as Python's own implementation, but they can be
|
||||
used under FIPS.
|
||||
---
|
||||
Lib/test/test_hashlib.py | 6 ++
|
||||
Modules/_hashopenssl.c | 37 ++++++++
|
||||
Modules/clinic/_hashopenssl.c.h | 152 +++++++++++++++++++++++++++++++-
|
||||
3 files changed, 194 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
|
||||
index 73d758a..5921360 100644
|
||||
--- a/Lib/test/test_hashlib.py
|
||||
+++ b/Lib/test/test_hashlib.py
|
||||
@@ -375,6 +375,12 @@ class HashLibTestCase(unittest.TestCase):
|
||||
# 2 is for hashlib.name(...) and hashlib.new(name, ...)
|
||||
self.assertGreaterEqual(len(constructors), 2)
|
||||
for hash_object_constructor in constructors:
|
||||
+
|
||||
+ # OpenSSL's blake2s & blake2d don't support `key`
|
||||
+ _name = hash_object_constructor.__name__
|
||||
+ if 'key' in kwargs and _name.startswith('openssl_blake2'):
|
||||
+ return
|
||||
+
|
||||
m = hash_object_constructor(data, **kwargs)
|
||||
computed = m.hexdigest() if not shake else m.hexdigest(length)
|
||||
self.assertEqual(
|
||||
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
|
||||
index af6d1b2..980712f 100644
|
||||
--- a/Modules/_hashopenssl.c
|
||||
+++ b/Modules/_hashopenssl.c
|
||||
@@ -1079,6 +1079,41 @@ _hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj,
|
||||
}
|
||||
|
||||
|
||||
+/*[clinic input]
|
||||
+_hashlib.openssl_blake2b
|
||||
+ string as data_obj: object(py_default="b''") = NULL
|
||||
+ *
|
||||
+ usedforsecurity: bool = True
|
||||
+Returns a blake2b hash object; optionally initialized with a string
|
||||
+[clinic start generated code]*/
|
||||
+
|
||||
+static PyObject *
|
||||
+_hashlib_openssl_blake2b_impl(PyObject *module, PyObject *data_obj,
|
||||
+ int usedforsecurity)
|
||||
+/*[clinic end generated code: output=7a838b1643cde13e input=4ad7fd54268f3689]*/
|
||||
+
|
||||
+{
|
||||
+ return py_evp_fromname(module, Py_hash_blake2b, data_obj, usedforsecurity);
|
||||
+}
|
||||
+
|
||||
+/*[clinic input]
|
||||
+_hashlib.openssl_blake2s
|
||||
+ string as data_obj: object(py_default="b''") = NULL
|
||||
+ *
|
||||
+ usedforsecurity: bool = True
|
||||
+Returns a blake2s hash object; optionally initialized with a string
|
||||
+[clinic start generated code]*/
|
||||
+
|
||||
+static PyObject *
|
||||
+_hashlib_openssl_blake2s_impl(PyObject *module, PyObject *data_obj,
|
||||
+ int usedforsecurity)
|
||||
+/*[clinic end generated code: output=4eda6b40757471da input=1ed39481ffa4e26a]*/
|
||||
+
|
||||
+{
|
||||
+ return py_evp_fromname(module, Py_hash_blake2s, data_obj, usedforsecurity);
|
||||
+}
|
||||
+
|
||||
+
|
||||
#ifdef PY_OPENSSL_HAS_SHA3
|
||||
|
||||
/*[clinic input]
|
||||
@@ -2067,6 +2102,8 @@ static struct PyMethodDef EVP_functions[] = {
|
||||
_HASHLIB_OPENSSL_SHA256_METHODDEF
|
||||
_HASHLIB_OPENSSL_SHA384_METHODDEF
|
||||
_HASHLIB_OPENSSL_SHA512_METHODDEF
|
||||
+ _HASHLIB_OPENSSL_BLAKE2B_METHODDEF
|
||||
+ _HASHLIB_OPENSSL_BLAKE2S_METHODDEF
|
||||
_HASHLIB_OPENSSL_SHA3_224_METHODDEF
|
||||
_HASHLIB_OPENSSL_SHA3_256_METHODDEF
|
||||
_HASHLIB_OPENSSL_SHA3_384_METHODDEF
|
||||
diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h
|
||||
index fb61a44..1e42b87 100644
|
||||
--- a/Modules/clinic/_hashopenssl.c.h
|
||||
+++ b/Modules/clinic/_hashopenssl.c.h
|
||||
@@ -743,6 +743,156 @@ exit:
|
||||
return return_value;
|
||||
}
|
||||
|
||||
+PyDoc_STRVAR(_hashlib_openssl_blake2b__doc__,
|
||||
+"openssl_blake2b($module, /, string=b\'\', *, usedforsecurity=True)\n"
|
||||
+"--\n"
|
||||
+"\n"
|
||||
+"Returns a blake2b hash object; optionally initialized with a string");
|
||||
+
|
||||
+#define _HASHLIB_OPENSSL_BLAKE2B_METHODDEF \
|
||||
+ {"openssl_blake2b", _PyCFunction_CAST(_hashlib_openssl_blake2b), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_blake2b__doc__},
|
||||
+
|
||||
+static PyObject *
|
||||
+_hashlib_openssl_blake2b_impl(PyObject *module, PyObject *data_obj,
|
||||
+ int usedforsecurity);
|
||||
+
|
||||
+static PyObject *
|
||||
+_hashlib_openssl_blake2b(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
|
||||
+{
|
||||
+ PyObject *return_value = NULL;
|
||||
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
|
||||
+
|
||||
+ #define NUM_KEYWORDS 2
|
||||
+ static struct {
|
||||
+ PyGC_Head _this_is_not_used;
|
||||
+ PyObject_VAR_HEAD
|
||||
+ PyObject *ob_item[NUM_KEYWORDS];
|
||||
+ } _kwtuple = {
|
||||
+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
|
||||
+ .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
|
||||
+ };
|
||||
+ #undef NUM_KEYWORDS
|
||||
+ #define KWTUPLE (&_kwtuple.ob_base.ob_base)
|
||||
+
|
||||
+ #else // !Py_BUILD_CORE
|
||||
+ # define KWTUPLE NULL
|
||||
+ #endif // !Py_BUILD_CORE
|
||||
+
|
||||
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
|
||||
+ static _PyArg_Parser _parser = {
|
||||
+ .keywords = _keywords,
|
||||
+ .fname = "openssl_blake2b",
|
||||
+ .kwtuple = KWTUPLE,
|
||||
+ };
|
||||
+ #undef KWTUPLE
|
||||
+ PyObject *argsbuf[2];
|
||||
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
|
||||
+ PyObject *data_obj = NULL;
|
||||
+ int usedforsecurity = 1;
|
||||
+
|
||||
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
|
||||
+ if (!args) {
|
||||
+ goto exit;
|
||||
+ }
|
||||
+ if (!noptargs) {
|
||||
+ goto skip_optional_pos;
|
||||
+ }
|
||||
+ if (args[0]) {
|
||||
+ data_obj = args[0];
|
||||
+ if (!--noptargs) {
|
||||
+ goto skip_optional_pos;
|
||||
+ }
|
||||
+ }
|
||||
+skip_optional_pos:
|
||||
+ if (!noptargs) {
|
||||
+ goto skip_optional_kwonly;
|
||||
+ }
|
||||
+ usedforsecurity = PyObject_IsTrue(args[1]);
|
||||
+ if (usedforsecurity < 0) {
|
||||
+ goto exit;
|
||||
+ }
|
||||
+skip_optional_kwonly:
|
||||
+ return_value = _hashlib_openssl_blake2b_impl(module, data_obj, usedforsecurity);
|
||||
+
|
||||
+exit:
|
||||
+ return return_value;
|
||||
+}
|
||||
+
|
||||
+PyDoc_STRVAR(_hashlib_openssl_blake2s__doc__,
|
||||
+"openssl_blake2s($module, /, string=b\'\', *, usedforsecurity=True)\n"
|
||||
+"--\n"
|
||||
+"\n"
|
||||
+"Returns a blake2s hash object; optionally initialized with a string");
|
||||
+
|
||||
+#define _HASHLIB_OPENSSL_BLAKE2S_METHODDEF \
|
||||
+ {"openssl_blake2s", _PyCFunction_CAST(_hashlib_openssl_blake2s), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_blake2s__doc__},
|
||||
+
|
||||
+static PyObject *
|
||||
+_hashlib_openssl_blake2s_impl(PyObject *module, PyObject *data_obj,
|
||||
+ int usedforsecurity);
|
||||
+
|
||||
+static PyObject *
|
||||
+_hashlib_openssl_blake2s(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
|
||||
+{
|
||||
+ PyObject *return_value = NULL;
|
||||
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
|
||||
+
|
||||
+ #define NUM_KEYWORDS 2
|
||||
+ static struct {
|
||||
+ PyGC_Head _this_is_not_used;
|
||||
+ PyObject_VAR_HEAD
|
||||
+ PyObject *ob_item[NUM_KEYWORDS];
|
||||
+ } _kwtuple = {
|
||||
+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
|
||||
+ .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
|
||||
+ };
|
||||
+ #undef NUM_KEYWORDS
|
||||
+ #define KWTUPLE (&_kwtuple.ob_base.ob_base)
|
||||
+
|
||||
+ #else // !Py_BUILD_CORE
|
||||
+ # define KWTUPLE NULL
|
||||
+ #endif // !Py_BUILD_CORE
|
||||
+
|
||||
+ static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
|
||||
+ static _PyArg_Parser _parser = {
|
||||
+ .keywords = _keywords,
|
||||
+ .fname = "openssl_blake2s",
|
||||
+ .kwtuple = KWTUPLE,
|
||||
+ };
|
||||
+ #undef KWTUPLE
|
||||
+ PyObject *argsbuf[2];
|
||||
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
|
||||
+ PyObject *data_obj = NULL;
|
||||
+ int usedforsecurity = 1;
|
||||
+
|
||||
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
|
||||
+ if (!args) {
|
||||
+ goto exit;
|
||||
+ }
|
||||
+ if (!noptargs) {
|
||||
+ goto skip_optional_pos;
|
||||
+ }
|
||||
+ if (args[0]) {
|
||||
+ data_obj = args[0];
|
||||
+ if (!--noptargs) {
|
||||
+ goto skip_optional_pos;
|
||||
+ }
|
||||
+ }
|
||||
+skip_optional_pos:
|
||||
+ if (!noptargs) {
|
||||
+ goto skip_optional_kwonly;
|
||||
+ }
|
||||
+ usedforsecurity = PyObject_IsTrue(args[1]);
|
||||
+ if (usedforsecurity < 0) {
|
||||
+ goto exit;
|
||||
+ }
|
||||
+skip_optional_kwonly:
|
||||
+ return_value = _hashlib_openssl_blake2s_impl(module, data_obj, usedforsecurity);
|
||||
+
|
||||
+exit:
|
||||
+ return return_value;
|
||||
+}
|
||||
+
|
||||
#if defined(PY_OPENSSL_HAS_SHA3)
|
||||
|
||||
PyDoc_STRVAR(_hashlib_openssl_sha3_224__doc__,
|
||||
@@ -1851,4 +2001,4 @@ exit:
|
||||
#ifndef _HASHLIB_SCRYPT_METHODDEF
|
||||
#define _HASHLIB_SCRYPT_METHODDEF
|
||||
#endif /* !defined(_HASHLIB_SCRYPT_METHODDEF) */
|
||||
-/*[clinic end generated code: output=b339e255db698147 input=a9049054013a1b77]*/
|
||||
+/*[clinic end generated code: output=1d988d457a8beebe input=a9049054013a1b77]*/
|
||||
--
|
||||
2.45.0
|
||||
|
||||
|
||||
From 6872b634078a2c69644235781ebffb07f8edcb83 Mon Sep 17 00:00:00 2001
|
||||
From: Petr Viktorin <pviktori@redhat.com>
|
||||
Date: Thu, 25 Jul 2019 17:19:06 +0200
|
||||
Subject: [PATCH 2/5] Disable Python's hash implementations in FIPS mode,
|
||||
forcing OpenSSL
|
||||
|
||||
---
|
||||
Lib/hashlib.py | 11 +++++++----
|
||||
Lib/test/test_hashlib.py | 17 ++++++++++++-----
|
||||
Modules/_blake2/blake2b_impl.c | 4 ++++
|
||||
Modules/_blake2/blake2module.c | 5 ++++-
|
||||
Modules/_blake2/blake2s_impl.c | 4 ++++
|
||||
Modules/hashlib.h | 23 +++++++++++++++++++++++
|
||||
configure.ac | 3 ++-
|
||||
7 files changed, 56 insertions(+), 11 deletions(-)
|
||||
|
||||
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
|
||||
index 1b16441..9897402 100644
|
||||
--- a/Lib/hashlib.py
|
||||
+++ b/Lib/hashlib.py
|
||||
@@ -70,14 +70,17 @@ __all__ = __always_supported + ('new', 'algorithms_guaranteed',
|
||||
|
||||
__builtin_constructor_cache = {}
|
||||
|
||||
-# Prefer our blake2 implementation
|
||||
+# Prefer our blake2 implementation (unless in FIPS mode)
|
||||
# OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. The OpenSSL
|
||||
# implementations neither support keyed blake2 (blake2 MAC) nor advanced
|
||||
# features like salt, personalization, or tree hashing. OpenSSL hash-only
|
||||
# variants are available as 'blake2b512' and 'blake2s256', though.
|
||||
-__block_openssl_constructor = {
|
||||
- 'blake2b', 'blake2s',
|
||||
-}
|
||||
+import _hashlib
|
||||
+if _hashlib.get_fips_mode():
|
||||
+ __block_openssl_constructor = set()
|
||||
+else:
|
||||
+ __block_openssl_constructor = {'blake2b', 'blake2s'}
|
||||
+
|
||||
|
||||
def __get_builtin_constructor(name):
|
||||
cache = __builtin_constructor_cache
|
||||
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
|
||||
index 5921360..dd61a9a 100644
|
||||
--- a/Lib/test/test_hashlib.py
|
||||
+++ b/Lib/test/test_hashlib.py
|
||||
@@ -35,14 +35,15 @@ else:
|
||||
m.strip() for m in builtin_hashes.strip('"').lower().split(",")
|
||||
}
|
||||
|
||||
-# hashlib with and without OpenSSL backend for PBKDF2
|
||||
-# only import builtin_hashlib when all builtin hashes are available.
|
||||
-# Otherwise import prints noise on stderr
|
||||
+# RHEL: `_hashlib` is always importable and `hashlib` can't be imported
|
||||
+# without it.
|
||||
openssl_hashlib = import_fresh_module('hashlib', fresh=['_hashlib'])
|
||||
-if builtin_hashes == default_builtin_hashes:
|
||||
+try:
|
||||
builtin_hashlib = import_fresh_module('hashlib', blocked=['_hashlib'])
|
||||
-else:
|
||||
+except ImportError:
|
||||
builtin_hashlib = None
|
||||
+else:
|
||||
+ raise AssertionError('hashlib is importable without _hashlib')
|
||||
|
||||
try:
|
||||
from _hashlib import HASH, HASHXOF, openssl_md_meth_names, get_fips_mode
|
||||
@@ -114,6 +115,12 @@ class HashLibTestCase(unittest.TestCase):
|
||||
except ModuleNotFoundError as error:
|
||||
if self._warn_on_extension_import and module_name in builtin_hashes:
|
||||
warnings.warn(f'Did a C extension fail to compile? {error}')
|
||||
+ except ImportError:
|
||||
+ if get_fips_mode() and module_name == '_blake2':
|
||||
+ # blake2b & blake2s disabled under FIPS
|
||||
+ return None
|
||||
+ else:
|
||||
+ raise
|
||||
return None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
diff --git a/Modules/_blake2/blake2b_impl.c b/Modules/_blake2/blake2b_impl.c
|
||||
index c2cac98..55b1677 100644
|
||||
--- a/Modules/_blake2/blake2b_impl.c
|
||||
+++ b/Modules/_blake2/blake2b_impl.c
|
||||
@@ -98,6 +98,8 @@ py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
|
||||
BLAKE2bObject *self = NULL;
|
||||
Py_buffer buf;
|
||||
|
||||
+ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2");
|
||||
+
|
||||
self = new_BLAKE2bObject(type);
|
||||
if (self == NULL) {
|
||||
goto error;
|
||||
@@ -276,6 +278,8 @@ _blake2_blake2b_update(BLAKE2bObject *self, PyObject *data)
|
||||
{
|
||||
Py_buffer buf;
|
||||
|
||||
+ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2");
|
||||
+
|
||||
GET_BUFFER_VIEW_OR_ERROUT(data, &buf);
|
||||
|
||||
if (self->lock == NULL && buf.len >= HASHLIB_GIL_MINSIZE)
|
||||
diff --git a/Modules/_blake2/blake2module.c b/Modules/_blake2/blake2module.c
|
||||
index 5df9fd3..10736c2 100644
|
||||
--- a/Modules/_blake2/blake2module.c
|
||||
+++ b/Modules/_blake2/blake2module.c
|
||||
@@ -13,6 +13,7 @@
|
||||
#endif
|
||||
|
||||
#include "Python.h"
|
||||
+#include "../hashlib.h"
|
||||
#include "blake2module.h"
|
||||
|
||||
extern PyType_Spec blake2b_type_spec;
|
||||
@@ -83,6 +84,7 @@ _blake2_free(void *module)
|
||||
static int
|
||||
blake2_exec(PyObject *m)
|
||||
{
|
||||
+
|
||||
Blake2State* st = blake2_get_state(m);
|
||||
|
||||
st->blake2b_type = (PyTypeObject *)PyType_FromModuleAndSpec(
|
||||
@@ -155,5 +157,6 @@ static struct PyModuleDef blake2_module = {
|
||||
PyMODINIT_FUNC
|
||||
PyInit__blake2(void)
|
||||
{
|
||||
+ FAIL_RETURN_IN_FIPS_MODE(PyExc_ImportError, "blake2");
|
||||
return PyModuleDef_Init(&blake2_module);
|
||||
-}
|
||||
+}
|
||||
\ No newline at end of file
|
||||
diff --git a/Modules/_blake2/blake2s_impl.c b/Modules/_blake2/blake2s_impl.c
|
||||
index 1c47328..cd4a202 100644
|
||||
--- a/Modules/_blake2/blake2s_impl.c
|
||||
+++ b/Modules/_blake2/blake2s_impl.c
|
||||
@@ -98,6 +98,8 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
|
||||
BLAKE2sObject *self = NULL;
|
||||
Py_buffer buf;
|
||||
|
||||
+ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2");
|
||||
+
|
||||
self = new_BLAKE2sObject(type);
|
||||
if (self == NULL) {
|
||||
goto error;
|
||||
@@ -276,6 +278,8 @@ _blake2_blake2s_update(BLAKE2sObject *self, PyObject *data)
|
||||
{
|
||||
Py_buffer buf;
|
||||
|
||||
+ FAIL_RETURN_IN_FIPS_MODE(PyExc_ValueError, "_blake2");
|
||||
+
|
||||
GET_BUFFER_VIEW_OR_ERROUT(data, &buf);
|
||||
|
||||
if (self->lock == NULL && buf.len >= HASHLIB_GIL_MINSIZE)
|
||||
diff --git a/Modules/hashlib.h b/Modules/hashlib.h
|
||||
index a8bad9d..1b1d937 100644
|
||||
--- a/Modules/hashlib.h
|
||||
+++ b/Modules/hashlib.h
|
||||
@@ -1,5 +1,11 @@
|
||||
/* Common code for use by all hashlib related modules. */
|
||||
|
||||
+// RHEL: use OpenSSL to turn off unsupported modules under FIPS mode
|
||||
+// EVP_default_properties_is_fips_enabled() on OpenSSL >= 3.0.0
|
||||
+#include <openssl/evp.h>
|
||||
+// FIPS_mode() on OpenSSL < 3.0.0
|
||||
+#include <openssl/crypto.h>
|
||||
+
|
||||
/*
|
||||
* Given a PyObject* obj, fill in the Py_buffer* viewp with the result
|
||||
* of PyObject_GetBuffer. Sets an exception and issues the erraction
|
||||
@@ -64,3 +70,20 @@
|
||||
* to allow the user to optimize based on the platform they're using. */
|
||||
#define HASHLIB_GIL_MINSIZE 2048
|
||||
|
||||
+__attribute__((__unused__))
|
||||
+static int
|
||||
+_Py_hashlib_fips_error(PyObject *exc, char *name) {
|
||||
+#if OPENSSL_VERSION_NUMBER >= 0x30000000L
|
||||
+ if (EVP_default_properties_is_fips_enabled(NULL)) {
|
||||
+#else
|
||||
+ if (FIPS_mode()) {
|
||||
+#endif
|
||||
+ PyErr_Format(exc, "%s is not available in FIPS mode", name);
|
||||
+ return 1;
|
||||
+ }
|
||||
+ return 0;
|
||||
+}
|
||||
+
|
||||
+#define FAIL_RETURN_IN_FIPS_MODE(exc, name) do { \
|
||||
+ if (_Py_hashlib_fips_error(exc, name)) return NULL; \
|
||||
+} while (0)
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index 65ad1c2..b5f9ab5 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -7463,7 +7463,8 @@ PY_STDLIB_MOD([_sha2],
|
||||
PY_STDLIB_MOD([_sha3], [test "$with_builtin_sha3" = yes])
|
||||
PY_STDLIB_MOD([_blake2],
|
||||
[test "$with_builtin_blake2" = yes], [],
|
||||
- [$LIBB2_CFLAGS], [$LIBB2_LIBS])
|
||||
+ [$LIBB2_CFLAGS $OPENSSL_INCLUDES],
|
||||
+ [$LIBB2_LIBS $OPENSSL_LDFLAGS $OPENSSL_LDFLAGS_RPATH $OPENSSL_LIBS])
|
||||
|
||||
PY_STDLIB_MOD([_crypt],
|
||||
[], [test "$ac_cv_crypt_crypt" = yes],
|
||||
--
|
||||
2.45.0
|
||||
|
||||
|
||||
From f904abdd7a607282c2cdfd18288045cedfa28414 Mon Sep 17 00:00:00 2001
|
||||
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||
Date: Fri, 29 Jan 2021 14:16:21 +0100
|
||||
Subject: [PATCH 3/5] Use python's fall back crypto implementations only if we
|
||||
are not in FIPS mode
|
||||
|
||||
---
|
||||
Lib/hashlib.py | 8 +++++---
|
||||
Lib/test/test_hashlib.py | 22 +++++++++++++++++++++-
|
||||
2 files changed, 26 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
|
||||
index 9897402..159215a 100644
|
||||
--- a/Lib/hashlib.py
|
||||
+++ b/Lib/hashlib.py
|
||||
@@ -83,6 +83,8 @@ else:
|
||||
|
||||
|
||||
def __get_builtin_constructor(name):
|
||||
+ if _hashlib.get_fips_mode():
|
||||
+ raise ValueError('unsupported hash type ' + name + '(in FIPS mode)')
|
||||
cache = __builtin_constructor_cache
|
||||
constructor = cache.get(name)
|
||||
if constructor is not None:
|
||||
@@ -178,21 +180,21 @@ try:
|
||||
except ImportError:
|
||||
_hashlib = None
|
||||
new = __py_new
|
||||
- __get_hash = __get_builtin_constructor
|
||||
+ raise # importing _hashlib should never fail on RHEL
|
||||
|
||||
try:
|
||||
# OpenSSL's PKCS5_PBKDF2_HMAC requires OpenSSL 1.0+ with HMAC and SHA
|
||||
from _hashlib import pbkdf2_hmac
|
||||
__all__ += ('pbkdf2_hmac',)
|
||||
except ImportError:
|
||||
- pass
|
||||
+ raise # importing _hashlib should never fail on RHEL
|
||||
|
||||
|
||||
try:
|
||||
# OpenSSL's scrypt requires OpenSSL 1.1+
|
||||
from _hashlib import scrypt
|
||||
except ImportError:
|
||||
- pass
|
||||
+ raise # importing _hashlib should never fail on RHEL
|
||||
|
||||
|
||||
def file_digest(fileobj, digest, /, *, _bufsize=2**18):
|
||||
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
|
||||
index dd61a9a..6031b02 100644
|
||||
--- a/Lib/test/test_hashlib.py
|
||||
+++ b/Lib/test/test_hashlib.py
|
||||
@@ -167,7 +167,13 @@ class HashLibTestCase(unittest.TestCase):
|
||||
constructors.add(constructor)
|
||||
|
||||
def add_builtin_constructor(name):
|
||||
- constructor = getattr(hashlib, "__get_builtin_constructor")(name)
|
||||
+ try:
|
||||
+ constructor = getattr(hashlib, "__get_builtin_constructor")(name)
|
||||
+ except ValueError:
|
||||
+ if get_fips_mode():
|
||||
+ return
|
||||
+ else:
|
||||
+ raise
|
||||
self.constructors_to_test[name].add(constructor)
|
||||
|
||||
_md5 = self._conditional_import_module('_md5')
|
||||
@@ -260,6 +266,20 @@ class HashLibTestCase(unittest.TestCase):
|
||||
def test_new_upper_to_lower(self):
|
||||
self.assertEqual(hashlib.new("SHA256").name, "sha256")
|
||||
|
||||
+ @unittest.skipUnless(get_fips_mode(), "Builtin constructor only usable in FIPS mode")
|
||||
+ def test_get_builtin_constructor_fips(self):
|
||||
+ get_builtin_constructor = getattr(hashlib,
|
||||
+ '__get_builtin_constructor')
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ get_builtin_constructor('md5')
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ get_builtin_constructor('sha256')
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ get_builtin_constructor('blake2s')
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ get_builtin_constructor('test')
|
||||
+
|
||||
+ @unittest.skipIf(get_fips_mode(), "No builtin constructors in FIPS mode")
|
||||
def test_get_builtin_constructor(self):
|
||||
get_builtin_constructor = getattr(hashlib,
|
||||
'__get_builtin_constructor')
|
||||
--
|
||||
2.45.0
|
||||
|
||||
|
||||
From 9bf0a53b7831409613c44fd7feecb56476f5e5e7 Mon Sep 17 00:00:00 2001
|
||||
From: Charalampos Stratakis <cstratak@redhat.com>
|
||||
Date: Wed, 31 Jul 2019 15:43:43 +0200
|
||||
Subject: [PATCH 4/5] Test equivalence of hashes for the various digests with
|
||||
usedforsecurity=True/False
|
||||
|
||||
---
|
||||
Lib/test/test_fips.py | 24 ++++++++++++++++++++
|
||||
Lib/test/test_hashlib.py | 47 ++++++++++++++++++++++++++++++----------
|
||||
2 files changed, 60 insertions(+), 11 deletions(-)
|
||||
create mode 100644 Lib/test/test_fips.py
|
||||
|
||||
diff --git a/Lib/test/test_fips.py b/Lib/test/test_fips.py
|
||||
new file mode 100644
|
||||
index 0000000..1f99dd7
|
||||
--- /dev/null
|
||||
+++ b/Lib/test/test_fips.py
|
||||
@@ -0,0 +1,24 @@
|
||||
+import unittest
|
||||
+import hashlib, _hashlib
|
||||
+
|
||||
+
|
||||
+
|
||||
+class HashlibFipsTests(unittest.TestCase):
|
||||
+
|
||||
+ @unittest.skipUnless(_hashlib.get_fips_mode(), "Test only when FIPS is enabled")
|
||||
+ def test_fips_imports(self):
|
||||
+ """blake2s and blake2b should fail to import in FIPS mode
|
||||
+ """
|
||||
+ with self.assertRaises(ValueError, msg='blake2s not available in FIPS'):
|
||||
+ m = hashlib.blake2s()
|
||||
+ with self.assertRaises(ValueError, msg='blake2b not available in FIPS'):
|
||||
+ m = hashlib.blake2b()
|
||||
+
|
||||
+ @unittest.skipIf(_hashlib.get_fips_mode(), "blake2 hashes are not available under FIPS")
|
||||
+ def test_blake2_hashes(self):
|
||||
+ self.assertEqual(hashlib.blake2b(b'abc').hexdigest(), _hashlib.openssl_blake2b(b'abc').hexdigest())
|
||||
+ self.assertEqual(hashlib.blake2s(b'abc').hexdigest(), _hashlib.openssl_blake2s(b'abc').hexdigest())
|
||||
+
|
||||
+
|
||||
+if __name__ == "__main__":
|
||||
+ unittest.main()
|
||||
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
|
||||
index 6031b02..5bd5297 100644
|
||||
--- a/Lib/test/test_hashlib.py
|
||||
+++ b/Lib/test/test_hashlib.py
|
||||
@@ -23,6 +23,7 @@ from test.support import os_helper
|
||||
from test.support import requires_resource
|
||||
from test.support import threading_helper
|
||||
from http.client import HTTPException
|
||||
+from functools import partial
|
||||
|
||||
|
||||
default_builtin_hashes = {'md5', 'sha1', 'sha256', 'sha512', 'sha3', 'blake2'}
|
||||
@@ -55,6 +56,11 @@ except ImportError:
|
||||
def get_fips_mode():
|
||||
return 0
|
||||
|
||||
+if get_fips_mode():
|
||||
+ FIPS_DISABLED = {'md5'}
|
||||
+else:
|
||||
+ FIPS_DISABLED = set()
|
||||
+
|
||||
try:
|
||||
import _blake2
|
||||
except ImportError:
|
||||
@@ -94,6 +100,11 @@ def read_vectors(hash_name):
|
||||
parts[0] = bytes.fromhex(parts[0])
|
||||
yield parts
|
||||
|
||||
+def _is_blake2_constructor(constructor):
|
||||
+ if isinstance(constructor, partial):
|
||||
+ constructor = constructor.func
|
||||
+ return getattr(constructor, '__name__', '').startswith('openssl_blake2')
|
||||
+
|
||||
|
||||
class HashLibTestCase(unittest.TestCase):
|
||||
supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
|
||||
@@ -138,15 +149,21 @@ class HashLibTestCase(unittest.TestCase):
|
||||
continue
|
||||
self.constructors_to_test[algorithm] = set()
|
||||
|
||||
+ def _add_constructor(algorithm, constructor):
|
||||
+ constructors.add(partial(constructor, usedforsecurity=False))
|
||||
+ if algorithm not in FIPS_DISABLED:
|
||||
+ constructors.add(constructor)
|
||||
+ constructors.add(partial(constructor, usedforsecurity=True))
|
||||
+
|
||||
# For each algorithm, test the direct constructor and the use
|
||||
# of hashlib.new given the algorithm name.
|
||||
for algorithm, constructors in self.constructors_to_test.items():
|
||||
- constructors.add(getattr(hashlib, algorithm))
|
||||
+ _add_constructor(algorithm, getattr(hashlib, algorithm))
|
||||
def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm, **kwargs):
|
||||
if data is None:
|
||||
return hashlib.new(_alg, **kwargs)
|
||||
return hashlib.new(_alg, data, **kwargs)
|
||||
- constructors.add(_test_algorithm_via_hashlib_new)
|
||||
+ _add_constructor(algorithm, _test_algorithm_via_hashlib_new)
|
||||
|
||||
_hashlib = self._conditional_import_module('_hashlib')
|
||||
self._hashlib = _hashlib
|
||||
@@ -158,13 +175,7 @@ class HashLibTestCase(unittest.TestCase):
|
||||
for algorithm, constructors in self.constructors_to_test.items():
|
||||
constructor = getattr(_hashlib, 'openssl_'+algorithm, None)
|
||||
if constructor:
|
||||
- try:
|
||||
- constructor()
|
||||
- except ValueError:
|
||||
- # default constructor blocked by crypto policy
|
||||
- pass
|
||||
- else:
|
||||
- constructors.add(constructor)
|
||||
+ _add_constructor(algorithm, constructor)
|
||||
|
||||
def add_builtin_constructor(name):
|
||||
try:
|
||||
@@ -340,6 +351,8 @@ class HashLibTestCase(unittest.TestCase):
|
||||
self.assertIn(h.name, self.supported_hash_names)
|
||||
else:
|
||||
self.assertNotIn(h.name, self.supported_hash_names)
|
||||
+ if not h.name.startswith('blake2') and h.name not in FIPS_DISABLED:
|
||||
+ self.assertEqual(h.name, hashlib.new(h.name).name)
|
||||
self.assertEqual(
|
||||
h.name,
|
||||
hashlib.new(h.name, usedforsecurity=False).name
|
||||
@@ -404,8 +417,10 @@ class HashLibTestCase(unittest.TestCase):
|
||||
for hash_object_constructor in constructors:
|
||||
|
||||
# OpenSSL's blake2s & blake2d don't support `key`
|
||||
- _name = hash_object_constructor.__name__
|
||||
- if 'key' in kwargs and _name.startswith('openssl_blake2'):
|
||||
+ if (
|
||||
+ 'key' in kwargs
|
||||
+ and _is_blake2_constructor(hash_object_constructor)
|
||||
+ ):
|
||||
return
|
||||
|
||||
m = hash_object_constructor(data, **kwargs)
|
||||
@@ -1036,6 +1051,16 @@ class HashLibTestCase(unittest.TestCase):
|
||||
with self.assertRaisesRegex(TypeError, "immutable type"):
|
||||
hash_type.value = False
|
||||
|
||||
+ @unittest.skipUnless(get_fips_mode(), 'Needs FIPS mode.')
|
||||
+ def test_usedforsecurity_repeat(self):
|
||||
+ """Make sure usedforsecurity flag isn't copied to other contexts"""
|
||||
+ for i in range(3):
|
||||
+ for cons in hashlib.md5, partial(hashlib.new, 'md5'):
|
||||
+ self.assertRaises(ValueError, cons)
|
||||
+ self.assertRaises(ValueError, partial(cons, usedforsecurity=True))
|
||||
+ self.assertEqual(cons(usedforsecurity=False).hexdigest(),
|
||||
+ 'd41d8cd98f00b204e9800998ecf8427e')
|
||||
+
|
||||
|
||||
class KDFTests(unittest.TestCase):
|
||||
|
||||
--
|
||||
2.45.0
|
||||
|
||||
|
||||
From 8a76571515a64a57b4ea0586ae8376cf2ef0ac60 Mon Sep 17 00:00:00 2001
|
||||
From: Petr Viktorin <pviktori@redhat.com>
|
||||
Date: Mon, 26 Aug 2019 19:39:48 +0200
|
||||
Subject: [PATCH 5/5] Guard against Python HMAC in FIPS mode
|
||||
|
||||
---
|
||||
Lib/hmac.py | 13 +++++++++----
|
||||
Lib/test/test_hmac.py | 10 ++++++++++
|
||||
2 files changed, 19 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/Lib/hmac.py b/Lib/hmac.py
|
||||
index 8b4eb2f..e8e4864 100644
|
||||
--- a/Lib/hmac.py
|
||||
+++ b/Lib/hmac.py
|
||||
@@ -16,8 +16,9 @@ else:
|
||||
|
||||
import hashlib as _hashlib
|
||||
|
||||
-trans_5C = bytes((x ^ 0x5C) for x in range(256))
|
||||
-trans_36 = bytes((x ^ 0x36) for x in range(256))
|
||||
+if not _hashopenssl.get_fips_mode():
|
||||
+ trans_5C = bytes((x ^ 0x5C) for x in range(256))
|
||||
+ trans_36 = bytes((x ^ 0x36) for x in range(256))
|
||||
|
||||
# The size of the digests returned by HMAC depends on the underlying
|
||||
# hashing module used. Use digest_size from the instance of HMAC instead.
|
||||
@@ -48,17 +49,18 @@ class HMAC:
|
||||
msg argument. Passing it as a keyword argument is
|
||||
recommended, though not required for legacy API reasons.
|
||||
"""
|
||||
-
|
||||
if not isinstance(key, (bytes, bytearray)):
|
||||
raise TypeError("key: expected bytes or bytearray, but got %r" % type(key).__name__)
|
||||
|
||||
if not digestmod:
|
||||
raise TypeError("Missing required argument 'digestmod'.")
|
||||
|
||||
- if _hashopenssl and isinstance(digestmod, (str, _functype)):
|
||||
+ if _hashopenssl.get_fips_mode() or (_hashopenssl and isinstance(digestmod, (str, _functype))):
|
||||
try:
|
||||
self._init_hmac(key, msg, digestmod)
|
||||
except _hashopenssl.UnsupportedDigestmodError:
|
||||
+ if _hashopenssl.get_fips_mode():
|
||||
+ raise
|
||||
self._init_old(key, msg, digestmod)
|
||||
else:
|
||||
self._init_old(key, msg, digestmod)
|
||||
@@ -69,6 +71,9 @@ class HMAC:
|
||||
self.block_size = self._hmac.block_size
|
||||
|
||||
def _init_old(self, key, msg, digestmod):
|
||||
+ if _hashopenssl.get_fips_mode():
|
||||
+ # In FIPS mode, use OpenSSL anyway: raise the appropriate error
|
||||
+ return self._init_hmac(key, msg, digestmod)
|
||||
if callable(digestmod):
|
||||
digest_cons = digestmod
|
||||
elif isinstance(digestmod, str):
|
||||
diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py
|
||||
index 1502fba..7997073 100644
|
||||
--- a/Lib/test/test_hmac.py
|
||||
+++ b/Lib/test/test_hmac.py
|
||||
@@ -5,6 +5,7 @@ import hashlib
|
||||
import unittest
|
||||
import unittest.mock
|
||||
import warnings
|
||||
+from _hashlib import get_fips_mode
|
||||
|
||||
from test.support import hashlib_helper, check_disallow_instantiation
|
||||
|
||||
@@ -351,6 +352,11 @@ class TestVectorsTestCase(unittest.TestCase):
|
||||
def digest(self):
|
||||
return self._x.digest()
|
||||
|
||||
+ if get_fips_mode():
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ hmac.HMAC(b'a', b'b', digestmod=MockCrazyHash)
|
||||
+ return
|
||||
+
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter('error', RuntimeWarning)
|
||||
with self.assertRaises(RuntimeWarning):
|
||||
@@ -373,6 +379,7 @@ class TestVectorsTestCase(unittest.TestCase):
|
||||
with self.assertRaisesRegex(TypeError, r'required.*digestmod'):
|
||||
hmac.HMAC(key, msg=data, digestmod='')
|
||||
|
||||
+ @unittest.skipIf(get_fips_mode(), "No builtin constructors in FIPS mode")
|
||||
def test_with_fallback(self):
|
||||
cache = getattr(hashlib, '__builtin_constructor_cache')
|
||||
try:
|
||||
@@ -453,6 +460,7 @@ class ConstructorTestCase(unittest.TestCase):
|
||||
with self.assertRaisesRegex(TypeError, "immutable type"):
|
||||
C_HMAC.value = None
|
||||
|
||||
+ @unittest.skipIf(get_fips_mode(), "_sha256 unavailable in FIPS mode")
|
||||
@unittest.skipUnless(sha256_module is not None, 'need _sha256')
|
||||
def test_with_sha256_module(self):
|
||||
h = hmac.HMAC(b"key", b"hash this!", digestmod=sha256_module.sha256)
|
||||
@@ -489,6 +497,7 @@ class UpdateTestCase(unittest.TestCase):
|
||||
|
||||
class CopyTestCase(unittest.TestCase):
|
||||
|
||||
+ @unittest.skipIf(get_fips_mode(), "_init_old unavailable in FIPS mode")
|
||||
@hashlib_helper.requires_hashdigest('sha256')
|
||||
def test_attributes_old(self):
|
||||
# Testing if attributes are of same type.
|
||||
@@ -500,6 +509,7 @@ class CopyTestCase(unittest.TestCase):
|
||||
self.assertEqual(type(h1._outer), type(h2._outer),
|
||||
"Types of outer don't match.")
|
||||
|
||||
+ @unittest.skipIf(get_fips_mode(), "_init_old unavailable in FIPS mode")
|
||||
@hashlib_helper.requires_hashdigest('sha256')
|
||||
def test_realcopy_old(self):
|
||||
# Testing if the copy method created a real copy.
|
||||
--
|
||||
2.45.0
|
||||
|
@ -0,0 +1,103 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Hrn=C4=8Diar?= <thrnciar@redhat.com>
|
||||
Date: Tue, 25 Oct 2022 12:02:33 +0200
|
||||
Subject: [PATCH] 00371: Revert "bpo-1596321: Fix threading._shutdown() for the
|
||||
main thread (GH-28549) (GH-28589)"
|
||||
|
||||
This reverts commit 38c67738c64304928c68d5c2bd78bbb01d979b94. It
|
||||
introduced regression causing FreeIPA's tests to fail.
|
||||
|
||||
For more info see:
|
||||
https://bodhi.fedoraproject.org/updates/FEDORA-2021-e152ce5f31
|
||||
https://github.com/GrahamDumpleton/mod_wsgi/issues/730
|
||||
---
|
||||
Lib/test/test_threading.py | 33 ---------------------------------
|
||||
Lib/threading.py | 25 ++++++++-----------------
|
||||
2 files changed, 8 insertions(+), 50 deletions(-)
|
||||
|
||||
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
|
||||
index 2e4b860b97..3066b23ee1 100644
|
||||
--- a/Lib/test/test_threading.py
|
||||
+++ b/Lib/test/test_threading.py
|
||||
@@ -1100,39 +1100,6 @@ def noop(): pass
|
||||
threading.Thread(target=noop).start()
|
||||
# Thread.join() is not called
|
||||
|
||||
- def test_import_from_another_thread(self):
|
||||
- # bpo-1596321: If the threading module is first import from a thread
|
||||
- # different than the main thread, threading._shutdown() must handle
|
||||
- # this case without logging an error at Python exit.
|
||||
- code = textwrap.dedent('''
|
||||
- import _thread
|
||||
- import sys
|
||||
-
|
||||
- event = _thread.allocate_lock()
|
||||
- event.acquire()
|
||||
-
|
||||
- def import_threading():
|
||||
- import threading
|
||||
- event.release()
|
||||
-
|
||||
- if 'threading' in sys.modules:
|
||||
- raise Exception('threading is already imported')
|
||||
-
|
||||
- _thread.start_new_thread(import_threading, ())
|
||||
-
|
||||
- # wait until the threading module is imported
|
||||
- event.acquire()
|
||||
- event.release()
|
||||
-
|
||||
- if 'threading' not in sys.modules:
|
||||
- raise Exception('threading is not imported')
|
||||
-
|
||||
- # don't wait until the thread completes
|
||||
- ''')
|
||||
- rc, out, err = assert_python_ok("-c", code)
|
||||
- self.assertEqual(out, b'')
|
||||
- self.assertEqual(err, b'')
|
||||
-
|
||||
def test_start_new_thread_at_finalization(self):
|
||||
code = """if 1:
|
||||
import _thread
|
||||
diff --git a/Lib/threading.py b/Lib/threading.py
|
||||
index 98cb43c697..ee647f8549 100644
|
||||
--- a/Lib/threading.py
|
||||
+++ b/Lib/threading.py
|
||||
@@ -1585,29 +1585,20 @@ def _shutdown():
|
||||
|
||||
global _SHUTTING_DOWN
|
||||
_SHUTTING_DOWN = True
|
||||
+ # Main thread
|
||||
+ tlock = _main_thread._tstate_lock
|
||||
+ # The main thread isn't finished yet, so its thread state lock can't have
|
||||
+ # been released.
|
||||
+ assert tlock is not None
|
||||
+ assert tlock.locked()
|
||||
+ tlock.release()
|
||||
+ _main_thread._stop()
|
||||
|
||||
# Call registered threading atexit functions before threads are joined.
|
||||
# Order is reversed, similar to atexit.
|
||||
for atexit_call in reversed(_threading_atexits):
|
||||
atexit_call()
|
||||
|
||||
- # Main thread
|
||||
- if _main_thread.ident == get_ident():
|
||||
- tlock = _main_thread._tstate_lock
|
||||
- # The main thread isn't finished yet, so its thread state lock can't
|
||||
- # have been released.
|
||||
- assert tlock is not None
|
||||
- assert tlock.locked()
|
||||
- tlock.release()
|
||||
- _main_thread._stop()
|
||||
- else:
|
||||
- # bpo-1596321: _shutdown() must be called in the main thread.
|
||||
- # If the threading module was not imported by the main thread,
|
||||
- # _main_thread is the thread which imported the threading module.
|
||||
- # In this case, ignore _main_thread, similar behavior than for threads
|
||||
- # spawned by C libraries or using _thread.start_new_thread().
|
||||
- pass
|
||||
-
|
||||
# Join all non-deamon threads
|
||||
while True:
|
||||
with _shutdown_locks_lock:
|
@ -0,0 +1,243 @@
|
||||
From 73d2995223c725638d53b9cb8e1d26b82daf0874 Mon Sep 17 00:00:00 2001
|
||||
From: Petr Viktorin <encukou@gmail.com>
|
||||
Date: Mon, 6 Mar 2023 17:24:24 +0100
|
||||
Subject: [PATCH] CVE-2007-4559, PEP-706: Add filters for tarfile extraction
|
||||
(downstream)
|
||||
|
||||
Add and test RHEL-specific ways of configuring the default behavior: environment
|
||||
variable and config file.
|
||||
---
|
||||
Lib/tarfile.py | 47 +++++++++++++--
|
||||
Lib/test/test_shutil.py | 2 +-
|
||||
Lib/test/test_tarfile.py | 123 ++++++++++++++++++++++++++++++++++++++-
|
||||
3 files changed, 163 insertions(+), 9 deletions(-)
|
||||
|
||||
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
|
||||
index 02f5e3b..f7109f3 100755
|
||||
--- a/Lib/tarfile.py
|
||||
+++ b/Lib/tarfile.py
|
||||
@@ -71,6 +71,13 @@ __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
|
||||
"OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
|
||||
"LinkOutsideDestinationError"]
|
||||
|
||||
+# If true, use the safer (but backwards-incompatible) 'tar' extraction filter,
|
||||
+# rather than 'fully_trusted', by default.
|
||||
+# The emitted warning is changed to match.
|
||||
+_RH_SAFER_DEFAULT = True
|
||||
+
|
||||
+# System-wide configuration file
|
||||
+_CONFIG_FILENAME = '/etc/python/tarfile.cfg'
|
||||
|
||||
#---------------------------------------------------------
|
||||
# tar constants
|
||||
@@ -2217,11 +2224,41 @@ class TarFile(object):
|
||||
if filter is None:
|
||||
filter = self.extraction_filter
|
||||
if filter is None:
|
||||
- warnings.warn(
|
||||
- 'Python 3.14 will, by default, filter extracted tar '
|
||||
- + 'archives and reject files or modify their metadata. '
|
||||
- + 'Use the filter argument to control this behavior.',
|
||||
- DeprecationWarning)
|
||||
+ name = os.environ.get('PYTHON_TARFILE_EXTRACTION_FILTER')
|
||||
+ if name is None:
|
||||
+ try:
|
||||
+ file = bltn_open(_CONFIG_FILENAME)
|
||||
+ except FileNotFoundError:
|
||||
+ pass
|
||||
+ else:
|
||||
+ import configparser
|
||||
+ conf = configparser.ConfigParser(
|
||||
+ interpolation=None,
|
||||
+ comment_prefixes=('#', ),
|
||||
+ )
|
||||
+ with file:
|
||||
+ conf.read_file(file)
|
||||
+ name = conf.get('tarfile',
|
||||
+ 'PYTHON_TARFILE_EXTRACTION_FILTER',
|
||||
+ fallback='')
|
||||
+ if name:
|
||||
+ try:
|
||||
+ filter = _NAMED_FILTERS[name]
|
||||
+ except KeyError:
|
||||
+ raise ValueError(f"filter {filter!r} not found") from None
|
||||
+ self.extraction_filter = filter
|
||||
+ return filter
|
||||
+ if _RH_SAFER_DEFAULT:
|
||||
+ warnings.warn(
|
||||
+ 'The default behavior of tarfile extraction has been '
|
||||
+ + 'changed to disallow common exploits '
|
||||
+ + '(including CVE-2007-4559). '
|
||||
+ + 'By default, absolute/parent paths are disallowed '
|
||||
+ + 'and some mode bits are cleared. '
|
||||
+ + 'See https://access.redhat.com/articles/7004769 '
|
||||
+ + 'for more details.',
|
||||
+ RuntimeWarning)
|
||||
+ return tar_filter
|
||||
return fully_trusted_filter
|
||||
if isinstance(filter, str):
|
||||
raise TypeError(
|
||||
diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py
|
||||
index 5fd8fb4..501da8f 100644
|
||||
--- a/Lib/test/test_shutil.py
|
||||
+++ b/Lib/test/test_shutil.py
|
||||
@@ -1950,7 +1950,7 @@ class TestArchives(BaseTest, unittest.TestCase):
|
||||
self.check_unpack_archive(format, filter='fully_trusted')
|
||||
self.check_unpack_archive(format, filter='data')
|
||||
with warnings_helper.check_warnings(
|
||||
- ('Python 3.14', DeprecationWarning)):
|
||||
+ ('.*CVE-2007-4559', RuntimeWarning)):
|
||||
self.check_unpack_archive(format)
|
||||
|
||||
def test_unpack_archive_tar(self):
|
||||
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
|
||||
index c5fc76d..397e334 100644
|
||||
--- a/Lib/test/test_tarfile.py
|
||||
+++ b/Lib/test/test_tarfile.py
|
||||
@@ -3097,8 +3097,8 @@ class NoneInfoExtractTests(ReadTest):
|
||||
tar.errorlevel = 0
|
||||
with ExitStack() as cm:
|
||||
if cls.extraction_filter is None:
|
||||
- cm.enter_context(warnings.catch_warnings(
|
||||
- action="ignore", category=DeprecationWarning))
|
||||
+ cm.enter_context(warnings.catch_warnings())
|
||||
+ warnings.simplefilter(action="ignore", category=RuntimeWarning)
|
||||
tar.extractall(cls.control_dir, filter=cls.extraction_filter)
|
||||
tar.close()
|
||||
cls.control_paths = set(
|
||||
@@ -3919,7 +3919,7 @@ class TestExtractionFilters(unittest.TestCase):
|
||||
with ArchiveMaker() as arc:
|
||||
arc.add('foo')
|
||||
with warnings_helper.check_warnings(
|
||||
- ('Python 3.14', DeprecationWarning)):
|
||||
+ ('.*CVE-2007-4559', RuntimeWarning)):
|
||||
with self.check_context(arc.open(), None):
|
||||
self.expect_file('foo')
|
||||
|
||||
@@ -4089,6 +4089,123 @@ class TestExtractionFilters(unittest.TestCase):
|
||||
self.expect_exception(TypeError) # errorlevel is not int
|
||||
|
||||
|
||||
+ @contextmanager
|
||||
+ def rh_config_context(self, config_lines=None):
|
||||
+ """Set up for testing various ways of overriding the default filter
|
||||
+
|
||||
+ return a triple with:
|
||||
+ - temporary directory
|
||||
+ - EnvironmentVarGuard()
|
||||
+ - a test archive for use with check_* methods below
|
||||
+
|
||||
+ If config_lines is given, write them to the config file. Otherwise
|
||||
+ the config file is missing.
|
||||
+ """
|
||||
+ tempdir = pathlib.Path(TEMPDIR) / 'tmp'
|
||||
+ configfile = tempdir / 'tarfile.cfg'
|
||||
+ with ArchiveMaker() as arc:
|
||||
+ arc.add('good')
|
||||
+ arc.add('ugly', symlink_to='/etc/passwd')
|
||||
+ arc.add('../bad')
|
||||
+ with (
|
||||
+ os_helper.temp_dir(tempdir),
|
||||
+ support.swap_attr(tarfile, '_CONFIG_FILENAME', str(configfile)),
|
||||
+ os_helper.EnvironmentVarGuard() as env,
|
||||
+ arc.open() as tar,
|
||||
+ ):
|
||||
+ if config_lines is not None:
|
||||
+ with configfile.open('w') as f:
|
||||
+ for line in config_lines:
|
||||
+ print(line, file=f)
|
||||
+ yield tempdir, env, tar
|
||||
+
|
||||
+ def check_rh_default_behavior(self, tar, tempdir):
|
||||
+ """Check RH default: warn and refuse to extract dangerous files."""
|
||||
+ with (
|
||||
+ warnings_helper.check_warnings(
|
||||
+ ('.*CVE-2007-4559', RuntimeWarning)),
|
||||
+ self.assertRaises(tarfile.OutsideDestinationError),
|
||||
+ ):
|
||||
+ tar.extractall(tempdir / 'outdir')
|
||||
+
|
||||
+ def check_trusted_default(self, tar, tempdir):
|
||||
+ """Check 'fully_trusted' is configured as the default filter."""
|
||||
+ with (
|
||||
+ warnings_helper.check_no_warnings(self),
|
||||
+ ):
|
||||
+ tar.extractall(tempdir / 'outdir')
|
||||
+ self.assertTrue((tempdir / 'outdir/good').exists())
|
||||
+ self.assertEqual((tempdir / 'outdir/ugly').readlink(),
|
||||
+ pathlib.Path('/etc/passwd'))
|
||||
+ self.assertTrue((tempdir / 'bad').exists())
|
||||
+
|
||||
+ def test_rh_default_no_conf(self):
|
||||
+ with self.rh_config_context() as (tempdir, env, tar):
|
||||
+ self.check_rh_default_behavior(tar, tempdir)
|
||||
+
|
||||
+ def test_rh_default_from_file(self):
|
||||
+ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=fully_trusted']
|
||||
+ with self.rh_config_context(lines) as (tempdir, env, tar):
|
||||
+ self.check_trusted_default(tar, tempdir)
|
||||
+
|
||||
+ def test_rh_empty_config_file(self):
|
||||
+ """Empty config file -> default behavior"""
|
||||
+ lines = []
|
||||
+ with self.rh_config_context(lines) as (tempdir, env, tar):
|
||||
+ self.check_rh_default_behavior(tar, tempdir)
|
||||
+
|
||||
+ def test_empty_config_section(self):
|
||||
+ """Empty section in config file -> default behavior"""
|
||||
+ lines = ['[tarfile]']
|
||||
+ with self.rh_config_context(lines) as (tempdir, env, tar):
|
||||
+ self.check_rh_default_behavior(tar, tempdir)
|
||||
+
|
||||
+ def test_rh_default_empty_config_option(self):
|
||||
+ """Empty option value in config file -> default behavior"""
|
||||
+ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=']
|
||||
+ with self.rh_config_context(lines) as (tempdir, env, tar):
|
||||
+ self.check_rh_default_behavior(tar, tempdir)
|
||||
+
|
||||
+ def test_bad_config_option(self):
|
||||
+ """Bad option value in config file -> ValueError"""
|
||||
+ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=unknown!']
|
||||
+ with self.rh_config_context(lines) as (tempdir, env, tar):
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ tar.extractall(tempdir / 'outdir')
|
||||
+
|
||||
+ def test_default_from_envvar(self):
|
||||
+ with self.rh_config_context() as (tempdir, env, tar):
|
||||
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'fully_trusted'
|
||||
+ self.check_trusted_default(tar, tempdir)
|
||||
+
|
||||
+ def test_empty_envvar(self):
|
||||
+ """Empty env variable -> default behavior"""
|
||||
+ with self.rh_config_context() as (tempdir, env, tar):
|
||||
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = ''
|
||||
+ self.check_rh_default_behavior(tar, tempdir)
|
||||
+
|
||||
+ def test_bad_envvar(self):
|
||||
+ with self.rh_config_context() as (tempdir, env, tar):
|
||||
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'unknown!'
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ tar.extractall(tempdir / 'outdir')
|
||||
+
|
||||
+ def test_envvar_overrides_file(self):
|
||||
+ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=data']
|
||||
+ with self.rh_config_context(lines) as (tempdir, env, tar):
|
||||
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'fully_trusted'
|
||||
+ self.check_trusted_default(tar, tempdir)
|
||||
+
|
||||
+ def test_monkeypatch_overrides_envvar(self):
|
||||
+ with self.rh_config_context(None) as (tempdir, env, tar):
|
||||
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'data'
|
||||
+ with support.swap_attr(
|
||||
+ tarfile.TarFile, 'extraction_filter',
|
||||
+ staticmethod(tarfile.fully_trusted_filter)
|
||||
+ ):
|
||||
+ self.check_trusted_default(tar, tempdir)
|
||||
+
|
||||
+
|
||||
class OverwriteTests(archiver_tests.OverwriteTests, unittest.TestCase):
|
||||
testdir = os.path.join(TEMPDIR, "testoverwrite")
|
||||
|
||||
--
|
||||
2.43.0
|
||||
|
@ -0,0 +1,483 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: Victor Stinner <vstinner@python.org>
|
||||
Date: Fri, 15 Dec 2023 16:10:40 +0100
|
||||
Subject: [PATCH] 00415: [CVE-2023-27043] gh-102988: Reject malformed addresses
|
||||
in email.parseaddr() (#111116)
|
||||
|
||||
Detect email address parsing errors and return empty tuple to
|
||||
indicate the parsing error (old API). Add an optional 'strict'
|
||||
parameter to getaddresses() and parseaddr() functions. Patch by
|
||||
Thomas Dwyer.
|
||||
|
||||
Co-Authored-By: Thomas Dwyer <github@tomd.tel>
|
||||
---
|
||||
Doc/library/email.utils.rst | 19 +-
|
||||
Lib/email/utils.py | 151 +++++++++++++-
|
||||
Lib/test/test_email/test_email.py | 187 +++++++++++++++++-
|
||||
...-10-20-15-28-08.gh-issue-102988.dStNO7.rst | 8 +
|
||||
4 files changed, 344 insertions(+), 21 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
||||
|
||||
diff --git a/Doc/library/email.utils.rst b/Doc/library/email.utils.rst
|
||||
index 345b64001c..d693a9bc39 100644
|
||||
--- a/Doc/library/email.utils.rst
|
||||
+++ b/Doc/library/email.utils.rst
|
||||
@@ -58,13 +58,18 @@ of the new API.
|
||||
begins with angle brackets, they are stripped off.
|
||||
|
||||
|
||||
-.. function:: parseaddr(address)
|
||||
+.. function:: parseaddr(address, *, strict=True)
|
||||
|
||||
Parse address -- which should be the value of some address-containing field such
|
||||
as :mailheader:`To` or :mailheader:`Cc` -- into its constituent *realname* and
|
||||
*email address* parts. Returns a tuple of that information, unless the parse
|
||||
fails, in which case a 2-tuple of ``('', '')`` is returned.
|
||||
|
||||
+ If *strict* is true, use a strict parser which rejects malformed inputs.
|
||||
+
|
||||
+ .. versionchanged:: 3.13
|
||||
+ Add *strict* optional parameter and reject malformed inputs by default.
|
||||
+
|
||||
|
||||
.. function:: formataddr(pair, charset='utf-8')
|
||||
|
||||
@@ -82,12 +87,15 @@ of the new API.
|
||||
Added the *charset* option.
|
||||
|
||||
|
||||
-.. function:: getaddresses(fieldvalues)
|
||||
+.. function:: getaddresses(fieldvalues, *, strict=True)
|
||||
|
||||
This method returns a list of 2-tuples of the form returned by ``parseaddr()``.
|
||||
*fieldvalues* is a sequence of header field values as might be returned by
|
||||
- :meth:`Message.get_all <email.message.Message.get_all>`. Here's a simple
|
||||
- example that gets all the recipients of a message::
|
||||
+ :meth:`Message.get_all <email.message.Message.get_all>`.
|
||||
+
|
||||
+ If *strict* is true, use a strict parser which rejects malformed inputs.
|
||||
+
|
||||
+ Here's a simple example that gets all the recipients of a message::
|
||||
|
||||
from email.utils import getaddresses
|
||||
|
||||
@@ -97,6 +105,9 @@ of the new API.
|
||||
resent_ccs = msg.get_all('resent-cc', [])
|
||||
all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs)
|
||||
|
||||
+ .. versionchanged:: 3.13
|
||||
+ Add *strict* optional parameter and reject malformed inputs by default.
|
||||
+
|
||||
|
||||
.. function:: parsedate(date)
|
||||
|
||||
diff --git a/Lib/email/utils.py b/Lib/email/utils.py
|
||||
index aa949aa933..af2fb14754 100644
|
||||
--- a/Lib/email/utils.py
|
||||
+++ b/Lib/email/utils.py
|
||||
@@ -48,6 +48,7 @@
|
||||
specialsre = re.compile(r'[][\\()<>@,:;".]')
|
||||
escapesre = re.compile(r'[\\"]')
|
||||
|
||||
+
|
||||
def _has_surrogates(s):
|
||||
"""Return True if s may contain surrogate-escaped binary data."""
|
||||
# This check is based on the fact that unless there are surrogates, utf8
|
||||
@@ -106,12 +107,127 @@ def formataddr(pair, charset='utf-8'):
|
||||
return address
|
||||
|
||||
|
||||
+def _iter_escaped_chars(addr):
|
||||
+ pos = 0
|
||||
+ escape = False
|
||||
+ for pos, ch in enumerate(addr):
|
||||
+ if escape:
|
||||
+ yield (pos, '\\' + ch)
|
||||
+ escape = False
|
||||
+ elif ch == '\\':
|
||||
+ escape = True
|
||||
+ else:
|
||||
+ yield (pos, ch)
|
||||
+ if escape:
|
||||
+ yield (pos, '\\')
|
||||
|
||||
-def getaddresses(fieldvalues):
|
||||
- """Return a list of (REALNAME, EMAIL) for each fieldvalue."""
|
||||
- all = COMMASPACE.join(str(v) for v in fieldvalues)
|
||||
- a = _AddressList(all)
|
||||
- return a.addresslist
|
||||
+
|
||||
+def _strip_quoted_realnames(addr):
|
||||
+ """Strip real names between quotes."""
|
||||
+ if '"' not in addr:
|
||||
+ # Fast path
|
||||
+ return addr
|
||||
+
|
||||
+ start = 0
|
||||
+ open_pos = None
|
||||
+ result = []
|
||||
+ for pos, ch in _iter_escaped_chars(addr):
|
||||
+ if ch == '"':
|
||||
+ if open_pos is None:
|
||||
+ open_pos = pos
|
||||
+ else:
|
||||
+ if start != open_pos:
|
||||
+ result.append(addr[start:open_pos])
|
||||
+ start = pos + 1
|
||||
+ open_pos = None
|
||||
+
|
||||
+ if start < len(addr):
|
||||
+ result.append(addr[start:])
|
||||
+
|
||||
+ return ''.join(result)
|
||||
+
|
||||
+
|
||||
+supports_strict_parsing = True
|
||||
+
|
||||
+def getaddresses(fieldvalues, *, strict=True):
|
||||
+ """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue.
|
||||
+
|
||||
+ When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in
|
||||
+ its place.
|
||||
+
|
||||
+ If strict is true, use a strict parser which rejects malformed inputs.
|
||||
+ """
|
||||
+
|
||||
+ # If strict is true, if the resulting list of parsed addresses is greater
|
||||
+ # than the number of fieldvalues in the input list, a parsing error has
|
||||
+ # occurred and consequently a list containing a single empty 2-tuple [('',
|
||||
+ # '')] is returned in its place. This is done to avoid invalid output.
|
||||
+ #
|
||||
+ # Malformed input: getaddresses(['alice@example.com <bob@example.com>'])
|
||||
+ # Invalid output: [('', 'alice@example.com'), ('', 'bob@example.com')]
|
||||
+ # Safe output: [('', '')]
|
||||
+
|
||||
+ if not strict:
|
||||
+ all = COMMASPACE.join(str(v) for v in fieldvalues)
|
||||
+ a = _AddressList(all)
|
||||
+ return a.addresslist
|
||||
+
|
||||
+ fieldvalues = [str(v) for v in fieldvalues]
|
||||
+ fieldvalues = _pre_parse_validation(fieldvalues)
|
||||
+ addr = COMMASPACE.join(fieldvalues)
|
||||
+ a = _AddressList(addr)
|
||||
+ result = _post_parse_validation(a.addresslist)
|
||||
+
|
||||
+ # Treat output as invalid if the number of addresses is not equal to the
|
||||
+ # expected number of addresses.
|
||||
+ n = 0
|
||||
+ for v in fieldvalues:
|
||||
+ # When a comma is used in the Real Name part it is not a deliminator.
|
||||
+ # So strip those out before counting the commas.
|
||||
+ v = _strip_quoted_realnames(v)
|
||||
+ # Expected number of addresses: 1 + number of commas
|
||||
+ n += 1 + v.count(',')
|
||||
+ if len(result) != n:
|
||||
+ return [('', '')]
|
||||
+
|
||||
+ return result
|
||||
+
|
||||
+
|
||||
+def _check_parenthesis(addr):
|
||||
+ # Ignore parenthesis in quoted real names.
|
||||
+ addr = _strip_quoted_realnames(addr)
|
||||
+
|
||||
+ opens = 0
|
||||
+ for pos, ch in _iter_escaped_chars(addr):
|
||||
+ if ch == '(':
|
||||
+ opens += 1
|
||||
+ elif ch == ')':
|
||||
+ opens -= 1
|
||||
+ if opens < 0:
|
||||
+ return False
|
||||
+ return (opens == 0)
|
||||
+
|
||||
+
|
||||
+def _pre_parse_validation(email_header_fields):
|
||||
+ accepted_values = []
|
||||
+ for v in email_header_fields:
|
||||
+ if not _check_parenthesis(v):
|
||||
+ v = "('', '')"
|
||||
+ accepted_values.append(v)
|
||||
+
|
||||
+ return accepted_values
|
||||
+
|
||||
+
|
||||
+def _post_parse_validation(parsed_email_header_tuples):
|
||||
+ accepted_values = []
|
||||
+ # The parser would have parsed a correctly formatted domain-literal
|
||||
+ # The existence of an [ after parsing indicates a parsing failure
|
||||
+ for v in parsed_email_header_tuples:
|
||||
+ if '[' in v[1]:
|
||||
+ v = ('', '')
|
||||
+ accepted_values.append(v)
|
||||
+
|
||||
+ return accepted_values
|
||||
|
||||
|
||||
def _format_timetuple_and_zone(timetuple, zone):
|
||||
@@ -205,16 +321,33 @@ def parsedate_to_datetime(data):
|
||||
tzinfo=datetime.timezone(datetime.timedelta(seconds=tz)))
|
||||
|
||||
|
||||
-def parseaddr(addr):
|
||||
+def parseaddr(addr, *, strict=True):
|
||||
"""
|
||||
Parse addr into its constituent realname and email address parts.
|
||||
|
||||
Return a tuple of realname and email address, unless the parse fails, in
|
||||
which case return a 2-tuple of ('', '').
|
||||
+
|
||||
+ If strict is True, use a strict parser which rejects malformed inputs.
|
||||
"""
|
||||
- addrs = _AddressList(addr).addresslist
|
||||
- if not addrs:
|
||||
- return '', ''
|
||||
+ if not strict:
|
||||
+ addrs = _AddressList(addr).addresslist
|
||||
+ if not addrs:
|
||||
+ return ('', '')
|
||||
+ return addrs[0]
|
||||
+
|
||||
+ if isinstance(addr, list):
|
||||
+ addr = addr[0]
|
||||
+
|
||||
+ if not isinstance(addr, str):
|
||||
+ return ('', '')
|
||||
+
|
||||
+ addr = _pre_parse_validation([addr])[0]
|
||||
+ addrs = _post_parse_validation(_AddressList(addr).addresslist)
|
||||
+
|
||||
+ if not addrs or len(addrs) > 1:
|
||||
+ return ('', '')
|
||||
+
|
||||
return addrs[0]
|
||||
|
||||
|
||||
diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py
|
||||
index a373c53c7c..c616398eb1 100644
|
||||
--- a/Lib/test/test_email/test_email.py
|
||||
+++ b/Lib/test/test_email/test_email.py
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
import email
|
||||
import email.policy
|
||||
+import email.utils
|
||||
|
||||
from email.charset import Charset
|
||||
from email.generator import Generator, DecodedGenerator, BytesGenerator
|
||||
@@ -3352,15 +3353,137 @@ def test_getaddresses_comma_in_name(self):
|
||||
],
|
||||
)
|
||||
|
||||
+ def test_parsing_errors(self):
|
||||
+ """Test for parsing errors from CVE-2023-27043 and CVE-2019-16056"""
|
||||
+ alice = 'alice@example.org'
|
||||
+ bob = 'bob@example.com'
|
||||
+ empty = ('', '')
|
||||
+
|
||||
+ # Test utils.getaddresses() and utils.parseaddr() on malformed email
|
||||
+ # addresses: default behavior (strict=True) rejects malformed address,
|
||||
+ # and strict=False which tolerates malformed address.
|
||||
+ for invalid_separator, expected_non_strict in (
|
||||
+ ('(', [(f'<{bob}>', alice)]),
|
||||
+ (')', [('', alice), empty, ('', bob)]),
|
||||
+ ('<', [('', alice), empty, ('', bob), empty]),
|
||||
+ ('>', [('', alice), empty, ('', bob)]),
|
||||
+ ('[', [('', f'{alice}[<{bob}>]')]),
|
||||
+ (']', [('', alice), empty, ('', bob)]),
|
||||
+ ('@', [empty, empty, ('', bob)]),
|
||||
+ (';', [('', alice), empty, ('', bob)]),
|
||||
+ (':', [('', alice), ('', bob)]),
|
||||
+ ('.', [('', alice + '.'), ('', bob)]),
|
||||
+ ('"', [('', alice), ('', f'<{bob}>')]),
|
||||
+ ):
|
||||
+ address = f'{alice}{invalid_separator}<{bob}>'
|
||||
+ with self.subTest(address=address):
|
||||
+ self.assertEqual(utils.getaddresses([address]),
|
||||
+ [empty])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ expected_non_strict)
|
||||
+
|
||||
+ self.assertEqual(utils.parseaddr([address]),
|
||||
+ empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Comma (',') is treated differently depending on strict parameter.
|
||||
+ # Comma without quotes.
|
||||
+ address = f'{alice},<{bob}>'
|
||||
+ self.assertEqual(utils.getaddresses([address]),
|
||||
+ [('', alice), ('', bob)])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ [('', alice), ('', bob)])
|
||||
+ self.assertEqual(utils.parseaddr([address]),
|
||||
+ empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Real name between quotes containing comma.
|
||||
+ address = '"Alice, alice@example.org" <bob@example.com>'
|
||||
+ expected_strict = ('Alice, alice@example.org', 'bob@example.com')
|
||||
+ self.assertEqual(utils.getaddresses([address]), [expected_strict])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict])
|
||||
+ self.assertEqual(utils.parseaddr([address]), expected_strict)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Valid parenthesis in comments.
|
||||
+ address = 'alice@example.org (Alice)'
|
||||
+ expected_strict = ('Alice', 'alice@example.org')
|
||||
+ self.assertEqual(utils.getaddresses([address]), [expected_strict])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict])
|
||||
+ self.assertEqual(utils.parseaddr([address]), expected_strict)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Invalid parenthesis in comments.
|
||||
+ address = 'alice@example.org )Alice('
|
||||
+ self.assertEqual(utils.getaddresses([address]), [empty])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ [('', 'alice@example.org'), ('', ''), ('', 'Alice')])
|
||||
+ self.assertEqual(utils.parseaddr([address]), empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Two addresses with quotes separated by comma.
|
||||
+ address = '"Jane Doe" <jane@example.net>, "John Doe" <john@example.net>'
|
||||
+ self.assertEqual(utils.getaddresses([address]),
|
||||
+ [('Jane Doe', 'jane@example.net'),
|
||||
+ ('John Doe', 'john@example.net')])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ [('Jane Doe', 'jane@example.net'),
|
||||
+ ('John Doe', 'john@example.net')])
|
||||
+ self.assertEqual(utils.parseaddr([address]), empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Test email.utils.supports_strict_parsing attribute
|
||||
+ self.assertEqual(email.utils.supports_strict_parsing, True)
|
||||
+
|
||||
def test_getaddresses_nasty(self):
|
||||
- eq = self.assertEqual
|
||||
- eq(utils.getaddresses(['foo: ;']), [('', '')])
|
||||
- eq(utils.getaddresses(
|
||||
- ['[]*-- =~$']),
|
||||
- [('', ''), ('', ''), ('', '*--')])
|
||||
- eq(utils.getaddresses(
|
||||
- ['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>']),
|
||||
- [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')])
|
||||
+ for addresses, expected in (
|
||||
+ (['"Sürname, Firstname" <to@example.com>'],
|
||||
+ [('Sürname, Firstname', 'to@example.com')]),
|
||||
+
|
||||
+ (['foo: ;'],
|
||||
+ [('', '')]),
|
||||
+
|
||||
+ (['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>'],
|
||||
+ [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]),
|
||||
+
|
||||
+ ([r'Pete(A nice \) chap) <pete(his account)@silly.test(his host)>'],
|
||||
+ [('Pete (A nice ) chap his account his host)', 'pete@silly.test')]),
|
||||
+
|
||||
+ (['(Empty list)(start)Undisclosed recipients :(nobody(I know))'],
|
||||
+ [('', '')]),
|
||||
+
|
||||
+ (['Mary <@machine.tld:mary@example.net>, , jdoe@test . example'],
|
||||
+ [('Mary', 'mary@example.net'), ('', ''), ('', 'jdoe@test.example')]),
|
||||
+
|
||||
+ (['John Doe <jdoe@machine(comment). example>'],
|
||||
+ [('John Doe (comment)', 'jdoe@machine.example')]),
|
||||
+
|
||||
+ (['"Mary Smith: Personal Account" <smith@home.example>'],
|
||||
+ [('Mary Smith: Personal Account', 'smith@home.example')]),
|
||||
+
|
||||
+ (['Undisclosed recipients:;'],
|
||||
+ [('', '')]),
|
||||
+
|
||||
+ ([r'<boss@nil.test>, "Giant; \"Big\" Box" <bob@example.net>'],
|
||||
+ [('', 'boss@nil.test'), ('Giant; "Big" Box', 'bob@example.net')]),
|
||||
+ ):
|
||||
+ with self.subTest(addresses=addresses):
|
||||
+ self.assertEqual(utils.getaddresses(addresses),
|
||||
+ expected)
|
||||
+ self.assertEqual(utils.getaddresses(addresses, strict=False),
|
||||
+ expected)
|
||||
+
|
||||
+ addresses = ['[]*-- =~$']
|
||||
+ self.assertEqual(utils.getaddresses(addresses),
|
||||
+ [('', '')])
|
||||
+ self.assertEqual(utils.getaddresses(addresses, strict=False),
|
||||
+ [('', ''), ('', ''), ('', '*--')])
|
||||
|
||||
def test_getaddresses_embedded_comment(self):
|
||||
"""Test proper handling of a nested comment"""
|
||||
@@ -3551,6 +3674,54 @@ def test_mime_classes_policy_argument(self):
|
||||
m = cls(*constructor, policy=email.policy.default)
|
||||
self.assertIs(m.policy, email.policy.default)
|
||||
|
||||
+ def test_iter_escaped_chars(self):
|
||||
+ self.assertEqual(list(utils._iter_escaped_chars(r'a\\b\"c\\"d')),
|
||||
+ [(0, 'a'),
|
||||
+ (2, '\\\\'),
|
||||
+ (3, 'b'),
|
||||
+ (5, '\\"'),
|
||||
+ (6, 'c'),
|
||||
+ (8, '\\\\'),
|
||||
+ (9, '"'),
|
||||
+ (10, 'd')])
|
||||
+ self.assertEqual(list(utils._iter_escaped_chars('a\\')),
|
||||
+ [(0, 'a'), (1, '\\')])
|
||||
+
|
||||
+ def test_strip_quoted_realnames(self):
|
||||
+ def check(addr, expected):
|
||||
+ self.assertEqual(utils._strip_quoted_realnames(addr), expected)
|
||||
+
|
||||
+ check('"Jane Doe" <jane@example.net>, "John Doe" <john@example.net>',
|
||||
+ ' <jane@example.net>, <john@example.net>')
|
||||
+ check(r'"Jane \"Doe\"." <jane@example.net>',
|
||||
+ ' <jane@example.net>')
|
||||
+
|
||||
+ # special cases
|
||||
+ check(r'before"name"after', 'beforeafter')
|
||||
+ check(r'before"name"', 'before')
|
||||
+ check(r'b"name"', 'b') # single char
|
||||
+ check(r'"name"after', 'after')
|
||||
+ check(r'"name"a', 'a') # single char
|
||||
+ check(r'"name"', '')
|
||||
+
|
||||
+ # no change
|
||||
+ for addr in (
|
||||
+ 'Jane Doe <jane@example.net>, John Doe <john@example.net>',
|
||||
+ 'lone " quote',
|
||||
+ ):
|
||||
+ self.assertEqual(utils._strip_quoted_realnames(addr), addr)
|
||||
+
|
||||
+
|
||||
+ def test_check_parenthesis(self):
|
||||
+ addr = 'alice@example.net'
|
||||
+ self.assertTrue(utils._check_parenthesis(f'{addr} (Alice)'))
|
||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} )Alice('))
|
||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} (Alice))'))
|
||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} ((Alice)'))
|
||||
+
|
||||
+ # Ignore real name between quotes
|
||||
+ self.assertTrue(utils._check_parenthesis(f'")Alice((" {addr}'))
|
||||
+
|
||||
|
||||
# Test the iterator/generators
|
||||
class TestIterators(TestEmailBase):
|
||||
diff --git a/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst b/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
||||
new file mode 100644
|
||||
index 0000000000..3d0e9e4078
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
||||
@@ -0,0 +1,8 @@
|
||||
+:func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now
|
||||
+return ``('', '')`` 2-tuples in more situations where invalid email
|
||||
+addresses are encountered instead of potentially inaccurate values. Add
|
||||
+optional *strict* parameter to these two functions: use ``strict=False`` to
|
||||
+get the old behavior, accept malformed inputs.
|
||||
+``getattr(email.utils, 'supports_strict_parsing', False)`` can be use to check
|
||||
+if the *strict* paramater is available. Patch by Thomas Dwyer and Victor
|
||||
+Stinner to improve the CVE-2023-27043 fix.
|
@ -0,0 +1,63 @@
|
||||
From 60d40d7095983e0bc23a103b2050adc519dc7fe3 Mon Sep 17 00:00:00 2001
|
||||
From: Lumir Balhar <lbalhar@redhat.com>
|
||||
Date: Fri, 3 May 2024 14:17:48 +0200
|
||||
Subject: [PATCH] Expect failures in tests not working properly with expat with
|
||||
a fixed CVE in RHEL
|
||||
|
||||
---
|
||||
Lib/test/test_pyexpat.py | 1 +
|
||||
Lib/test/test_sax.py | 1 +
|
||||
Lib/test/test_xml_etree.py | 3 +++
|
||||
3 files changed, 5 insertions(+)
|
||||
|
||||
diff --git a/Lib/test/test_pyexpat.py b/Lib/test/test_pyexpat.py
|
||||
index 43cbd27..27b1502 100644
|
||||
--- a/Lib/test/test_pyexpat.py
|
||||
+++ b/Lib/test/test_pyexpat.py
|
||||
@@ -793,6 +793,7 @@ class ReparseDeferralTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(started, ['doc'])
|
||||
|
||||
+ @unittest.expectedFailure
|
||||
def test_reparse_deferral_disabled(self):
|
||||
started = []
|
||||
|
||||
diff --git a/Lib/test/test_sax.py b/Lib/test/test_sax.py
|
||||
index 9b3014a..646c92d 100644
|
||||
--- a/Lib/test/test_sax.py
|
||||
+++ b/Lib/test/test_sax.py
|
||||
@@ -1240,6 +1240,7 @@ class ExpatReaderTest(XmlTestBase):
|
||||
|
||||
self.assertEqual(result.getvalue(), start + b"<doc></doc>")
|
||||
|
||||
+ @unittest.expectedFailure
|
||||
def test_flush_reparse_deferral_disabled(self):
|
||||
result = BytesIO()
|
||||
xmlgen = XMLGenerator(result)
|
||||
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
|
||||
index 9c382d1..62f2871 100644
|
||||
--- a/Lib/test/test_xml_etree.py
|
||||
+++ b/Lib/test/test_xml_etree.py
|
||||
@@ -1424,9 +1424,11 @@ class XMLPullParserTest(unittest.TestCase):
|
||||
self.assert_event_tags(parser, [('end', 'root')])
|
||||
self.assertIsNone(parser.close())
|
||||
|
||||
+ @unittest.expectedFailure
|
||||
def test_simple_xml_chunk_1(self):
|
||||
self.test_simple_xml(chunk_size=1, flush=True)
|
||||
|
||||
+ @unittest.expectedFailure
|
||||
def test_simple_xml_chunk_5(self):
|
||||
self.test_simple_xml(chunk_size=5, flush=True)
|
||||
|
||||
@@ -1651,6 +1653,7 @@ class XMLPullParserTest(unittest.TestCase):
|
||||
|
||||
self.assert_event_tags(parser, [('end', 'doc')])
|
||||
|
||||
+ @unittest.expectedFailure
|
||||
def test_flush_reparse_deferral_disabled(self):
|
||||
parser = ET.XMLPullParser(events=('start', 'end'))
|
||||
|
||||
--
|
||||
2.44.0
|
||||
|
@ -0,0 +1,28 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: Karolina Surma <ksurma@redhat.com>
|
||||
Date: Wed, 10 Apr 2024 15:35:04 +0200
|
||||
Subject: [PATCH] 00425: Only check for 'test/wheeldata' when it's actually
|
||||
used
|
||||
|
||||
We build Python in Fedora 39+ with option `--with-wheel-pkg-dir`
|
||||
pointing to a custom wheel directory and delete the contents of
|
||||
upstream's `test/wheeldata`. Don't include the directory in the test set
|
||||
if the wheels are used from a different location.
|
||||
---
|
||||
Lib/test/test_tools/test_makefile.py | 3 +++
|
||||
1 file changed, 3 insertions(+)
|
||||
|
||||
diff --git a/Lib/test/test_tools/test_makefile.py b/Lib/test/test_tools/test_makefile.py
|
||||
index 17a1a6d0d3..9ce874033d 100644
|
||||
--- a/Lib/test/test_tools/test_makefile.py
|
||||
+++ b/Lib/test/test_tools/test_makefile.py
|
||||
@@ -66,6 +66,9 @@ def test_makefile_test_folders(self):
|
||||
)
|
||||
used.append(relpath)
|
||||
|
||||
+ if sysconfig.get_config_var('WHEEL_PKG_DIR'):
|
||||
+ test_dirs.remove('test/wheeldata')
|
||||
+
|
||||
# Check that there are no extra entries:
|
||||
unique_test_dirs = set(test_dirs)
|
||||
self.assertSetEqual(unique_test_dirs, set(used))
|
@ -0,0 +1,18 @@
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQKTBAABCgB9FiEEcWlgX2LHUTVtBUomqCHmgOX6YwUFAmYVDdNfFIAAAAAALgAo
|
||||
aXNzdWVyLWZwckBub3RhdGlvbnMub3BlbnBncC5maWZ0aGhvcnNlbWFuLm5ldDcx
|
||||
Njk2MDVGNjJDNzUxMzU2RDA1NEEyNkE4MjFFNjgwRTVGQTYzMDUACgkQqCHmgOX6
|
||||
YwU8Vg//aP8bxzPTDIM9Af1LLJj5LNLIuZOl5QysWQVbakoCpS8Z8ZiK3LyzGi7H
|
||||
pQ5uJEnRjhULnOi+va2TPBDqiYvY1CkVizYzmUe1dMtzHdJUBE1TzybfON02JzPD
|
||||
62oDHxUC1hvITyLE8tjnsgBuP9bbYYHnS+qqmDgBWS1M60i4bqcBiSdlWZp7ZTI4
|
||||
KIxIy9eyNujHnNQrQQ1oqIoj7ty1Hrtkfqia/3cVq7rkQT8HecBIW0K82WuIXizm
|
||||
/Ua/TQslTJsypslFYpoJBoIkWG2nk7RhJvfU5iLxQHen6cr7JOUo/u3jv0DIJyJs
|
||||
LdBWG6noTIiqKJb65UswLUxexM5f3Y7gLEZ4FCqlbAOAPG16xwwC8Xd7LIF33cHK
|
||||
133BvYCkwdl0MCpmsQuxi8i6Kql0MaEqJ9MEj6UN66ZJVpRx8hOm2FtZGhn5ZNxx
|
||||
r5C2zXGw/IjXeS01wgD8cSRVA0XJdN4bu88vmvhqMuezg3CDF5bX85isoFUaLUjS
|
||||
c5Lv1HNrqPiaWHOctnvzasy0djpwze+WCzsXFMI6VfejPpYwNlhmnxS7i3R9A4RK
|
||||
gBwViMd5q5rwx365tCfRfGcBW6OOvrHZalhSGYmUw13sBarFliW9CvN4ghN9kWbN
|
||||
YQwSggf5KD6v5mAAyReMrOJTyBG6B5hMlxKai5CzbRLlG25T2wI=
|
||||
=ZQxz
|
||||
-----END PGP SIGNATURE-----
|
@ -0,0 +1,136 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBFUAInYBEACrmKcXagNRlo1VjznrJZMMUh0rxUn2iK2wy9H5qrCo4EgMYahZ
|
||||
ibBunSWB4RNeVQevzUm3eSyOixnt+BmGZbSYqKp8tJIXRRcnKhEtC62X+7NVMc7B
|
||||
9uPu/aJ3HNqXrsQwBJUzZxzLMLg6obCyarhhHAYbWmfaafU4yNk3J4dGNKoZtHvz
|
||||
bjnUtlsUAkCmuyt3MsUuSYz34BviRLSEZEKW6xNoyQmD9dUhQ5exBuTPjtmdTf+x
|
||||
gOKpBluRkJ4TADGlWf42lIkaI+8DYRj1R8eQdLFwS7sDTu/MMPceKU7nTWOoj8HF
|
||||
3xXRJ+bJbpOJXZFEzVKjXHKuMFkhKr562i0LD8pdl1+s+9LRovmAvGwggt04Drzb
|
||||
AK437QoyjPKiTnFlg4tOeIuN0Y+GGk2hXOdH7fNw79B9Tq5ENxth8NsnKVlz1zpF
|
||||
X+aV0zCvAjNWutAUpikqZT/ibpwmM+NJcz3pgzQOq+LfPFskyrv7zkVODEjH3SG3
|
||||
s4ROvyoWfLPWmX92kJMOkvzyQObZmU2zWJgJbjYRApZiTfbfnH1tE+wxH4ZR5dji
|
||||
FpEdUJn1yjpYp21Q10khIdsj6q9IvS3RDq0ygc5wfl5111byEsdP12y36lvPTclT
|
||||
33VHBR1vxr+js9d8FI4wwt/o+7TmAO39DYhLrtn+ZgyRgIBYY65lhEaUtwARAQAB
|
||||
tCJUaG9tYXMgV291dGVycyA8dGhvbWFzQHB5dGhvbi5vcmc+iQJXBBMBCgBBAhsD
|
||||
BQsJCAcDBRUKCQgLBRYCAwEAAh4BAheABQkVRkQLFiEEcWlgX2LHUTVtBUomqCHm
|
||||
gOX6YwUFAmM7V3UCGQEACgkQqCHmgOX6YwVFeRAAkXE+BC/8O7VVtNe3iCdcQtW3
|
||||
PiCINEJgOQbXSwjkIGjD/Noheu+2cdwznjUmAX3qgnOyxIvo1AzYXagRazKVl1A+
|
||||
AiMctMNUCuVAkPeTL3nUERzOzZP6fE9OB/XNyiFeNPGg3qGz/HEJH8OMzahfOpzM
|
||||
VC3bCcZrn3JmMp6X8gLgArcK20L7qu/USO/Ico9vT8n+IkZIyxv9GNzfr4QZtGQN
|
||||
DkcHXHbX7p6juffdF9PpQgeAHfP4F9ZuDC+Mc5AGQaxY0z+gNLQGbTEjBBxkrGqd
|
||||
3iOHWb+RLLRJkHkF95KegatrgRkK3d+WLsHwCWzySDAKsjcvM33+N5YB9vWiL/K+
|
||||
kRbgEiecQHwsV1WT+DLY4yoLEBDVbThSw90R2b4bDzCOWShYMX8hDu5HaP3vT1Ye
|
||||
lLSYT/1TxX1yvGeCuA8D+V9OZbSi7eKVT7W4pxqiCcDTpvMvx3o9NfiHEFGQfjlZ
|
||||
nQsIBt9YeBG2c/GL0h1v4X9kBHjxv58576L9olEuWViuCam3OmW31Ik8OjYUwHs0
|
||||
tVqc/ciKsot/3ci96wxnG0RajkXL4ybQI7QzJ3OJJyLMZUPx7UTkdYlD7ZKJyU/N
|
||||
kdcmEjtvBtWeCROZOdivvZeJnSe/vANbH9Oibongl9Zwlq0w/Sd8fHKJQZC7c4dA
|
||||
bTVfbTLXuaLUE86ZCdSJAhwEEAEKAAYFAlUAQF8ACgkQq90IJmKQVoiM5w/+OT6z
|
||||
14MgLePITzg+SCRH7H6QgMjLQXrIwEFzwJC8sG2OxIG/nr+M9R0VgsUqEdP2vwur
|
||||
XDiadjvh4yfXkQst96EUk4l4HCSPo9YtXmPoM88n+rhHSI3UrQx/mqMdf6V4Z8AM
|
||||
KZ6LEe9sctrVjyJdEixZVVh0F4BOluKVwOLFmZkQLzblj8nul5b/X2+r+B2Z3b9H
|
||||
beI4EN/vBno/riv6L/ZHq3g7xztm1ZQE55PLtTkVD6PhEBXnLxuAHu9psWmFLAn3
|
||||
piz7GQg683y5ncrzHGJs11CXtE7BJ8iHet7whQneF2DY2LcVWVNjQtWslhlhp2+o
|
||||
YE+aVSB5cJ6pjHnz7d0m9QyS1dzDkGfDbPxMES6qDymQpsrpDP3Kro+O7V6JB6To
|
||||
8Qj9gEumjaze7mkQdC4A3NOiOgB6+c3P/ng3HYAA9T7KkiePj+2B4F7YrMdWQqhR
|
||||
IB0uHLUblmw9BVbd0p5EVM7dxEziqHE6kvlka0M9kC8naF6XvHhZQlZhbNXgo4zH
|
||||
JLFVNqQw/c2E983iiBEkBu18t5kz/lOnO0Oh5ARVGiw9cxwx9pzuw1s/VT50698r
|
||||
l0g4IeWbrVbYKKXhFMgX6BRQZiOzbE9QTpWX2PFwd69FHs/mgqlFf9IF5lkBkXP4
|
||||
3qAShAqz6+rlQFesaliN3uF29AHZwB5NOY6lNh6JAj0EEwEKACcFAlUAP5cCGwMF
|
||||
CQlmAYAFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQqCHmgOX6YwUS/hAAqg57
|
||||
TwZlu8qof1e/2BTCqUiKPQ7XuokYORTKpNrXCMpl/ygZMW8jc6Q+iVKaDDhJN9pz
|
||||
zh6ZBZQSNMwr4v8ndc858dId/WtAbMZ9eG96tIoFT4M+mSSW/YobRuvInuFyVvor
|
||||
Z4pT19/UDG7hRZRKofT6X22Nzumoy97veT3Weuu0jny+H4AiiTobSnRSsZpjCMUs
|
||||
0TzMk+v1+JdMmETGbJZrM4BYZFUmoOOOU1WOPoph4owmz8h2a+VxdnGB95sW9/04
|
||||
gKHld863oauFv01P33okRojin+Q2G97o14Lh+IxFjghiphz9MOzwytwTUNwwrSmf
|
||||
Hjv2VHEtWc5P+GC0Gys7+l2jqUZm/j3H2Vu40R47sLudtty5iMV/sVmBBDRdyuVh
|
||||
C7yJC0dWveFsuEuI/onjf1iKgICkM2fNSj8gCBLtnbK6m3ZURoAwdKfWZR+9vyrq
|
||||
waazxmC+76IOoSBnRFqvdB2z+8CbVHWQ4bOcAUyMDTkwtM1qAiNnRln3aFIdzDdQ
|
||||
GQh1j3p4kuda8XE+IMdcOY3Q9HSSQ76aVrh+zbyXIX2a8H5qN7Tn0a7G2PoThBMe
|
||||
5CNl7aXdFF+v7yoRY4vyt+8p3wxlOli/1iMn2X879BlA3TJzbpSSBTd9aR75fshp
|
||||
9cXSqsUb2ja93Ca6mWkJfDYQUIltTGgg4njduFq0IVRob21hcyBXb3V0ZXJzIDx0
|
||||
aG9tYXNAeHM0YWxsLm5sPokCVAQTAQoAPgIbAwULCQgHAwUVCgkICwUWAgMBAAIe
|
||||
AQIXgAUJFUZECxYhBHFpYF9ix1E1bQVKJqgh5oDl+mMFBQJjO1dvAAoJEKgh5oDl
|
||||
+mMFIlIQAKmkHcJbQ1ltBexoJSsoCi7+9IOCSJpD0fsP6210/hkcSdcbz4EuN9om
|
||||
f7BdCW9SOicBB8bxVid3uF0NnLjqyRusNbRVIXiKWzxb2+36cA9D6ugv1u8oV7Fq
|
||||
sD+zAEWJTNDjd4/rJjEMTMhUxN0EFNrQLDngDnx7AeJyGD2n4eFB6RCJ7qtJtCPq
|
||||
xqiW9jH6vH+YlAz8zbWKF7Z52CPVxAt/yoo6dwLFV6615Mo5n4VN6NiXQeKw5XmZ
|
||||
prXvxDQFkodpjBpoN3fcAX6UTX4yJOR9DhALorr2H1ldI9xdQ0pawlPTDT/gRMsY
|
||||
uHh3NVflUzoLny7TWqd7xLyocH2TqC3OAsF78oR+4W2P0QxuEq/W1WAf+LIpRjeI
|
||||
Q4Xt6TGDku694VHE0pfK5BjpHApyWlGRPVq89x6Z78pCrKiMMtoW30mCPWkSd63h
|
||||
3cPgQNAzo+BBoNYUdvQCAAMEFdBpUjVCQaInAqFuKw1N8IpahsKKSg4jMheLmocG
|
||||
KYbO5IIinjXxIz87skKD6xkukIwfcnhvRM/IkHuxuG+ltO17nbWQNvmvZtEZ47xN
|
||||
9hAVZkaK/5eBDmICH1N8o0gHGU61KfEaCRLuQkFRe72QnbxzUkIwYtC9TCAiYieA
|
||||
xsSRwY5boZsKEnzLmPfM1b96Rj7JKCiMDOBgNbUNcKXuAMqrOMZtiQIcBBABCgAG
|
||||
BQJVAEBfAAoJEKvdCCZikFaI8xEQAIl7VghcbWLaiIcS+SB4mlVuVpFyj6YmD7Y2
|
||||
GWAkiRMyxMdzETOA1dKzcdZG8+0ClATFgdCl7FxZ5M2qRhjOqxRpMK0KpSn7xuXB
|
||||
w+0LWo5gnEXk/+IrXOBNCCkaglyPGlW8GFE/hxGbBt4pDLyQ6nsYWSWLvPNOGdQg
|
||||
J2/P3pa8mCmiFeAt88rz7Xj9wg7OZ7O+3tV6AlYRklfQEBAB/NP1VHv3xc2GsCA1
|
||||
t83f8Echq+CjsYNmcVIOhEKSPvacbfF1dpBp4AQooy6KRA4G1ERi0cXcSOnAdKZW
|
||||
wG0fIl++ziz0iDIM9c7Lg7Jul1+ZpXM9nxCLu7KYGlXe05XK+xzQJdK02tJO96IX
|
||||
oNcZrtqnIV9D/BBX4HMVHlSjoRVCnpXsuCiobgOFahvp94s7I6D48ABc64HPMS/2
|
||||
Nkw0QKLsmpNQ++QfNbNNOhcjMJFmlAy8si0cpBSQjVoctvgoZMo63KjDjvUiEMni
|
||||
tvEq43xPLe4cAUgj3F6/Eq9NDXbS0Bvq2a2gPEfY+d0dSazUCpmPVe9eAGxCZ0QY
|
||||
sPctUvLRf4scwJdQ676DAj+VvUwRKjAlR2wlHFWS38PQ/irvKgu02yHW3K1j99QS
|
||||
2NBauBcKZlr/r3bIWbDxtgOBfvfKGFsEHesSAyNY06OgGONvqMncWiMT1dCKhbxQ
|
||||
GdoSEaNkiQI9BBMBCgAnBQJVACJ2AhsDBQkJZgGABQsJCAcDBRUKCQgLBRYCAwEA
|
||||
Ah4BAheAAAoJEKgh5oDl+mMFLxcP/17E5gEBa/ErPMnY+82r0jZ5rq0DGOotrjsY
|
||||
fSvIX0YvDc6BaqLLuSTr8Klo2MueVHZY78px6VC661KP5+aRBJZHTTUJ+9ZVrP+R
|
||||
pcNHQTKX70XrwYSnl2S7q0nc1K5CkCLYOuM05sAzvOBj/cAtpZuw9D009wxpQXd7
|
||||
kwSREBGcgZ8Ac7kqPbOojtmBkHFWFUg5Rx+fVsulcQkERs1j22tzTvPzsljGmrrY
|
||||
7o6P0K4fzVfeQx43sKKltQrNNZwBtleHD+jQPLThVf09Rhjtq/+cb2wwiAqpHskf
|
||||
dY1njeCh21Yl58IOMHQ6x0Mc6rF3A5wqrLVJ5GYVHl33unZLKcAWDmtV3MPvnvdF
|
||||
WvLDPqkykozU08d1ieyZXFWlwViYaVzh7cjgIM8keCbPk0zFtGZkVUS2um332Xiv
|
||||
pX02J8KqWhNsiioO/SAFOhS6+zR+mZghnr5eqD9SHmCKTJ1JWjG2wx2hibaaszQb
|
||||
qypBW1mwoGMDj4MG91iKSnwMgjp3P8xQIyHy/XTAJ1DD9F//2MtQ2ZK5MqiWTYLn
|
||||
iUzirq8M1E8rLHh8Met5xIOq5iIDcMn0HTBCwrWWPOyqTF2lWK3G6LOjTltQ01fJ
|
||||
JaICpTe1Eirt9v+kr1iPLH1+zIeWZ1O469I/nxRp/WW6Oavzb9WRg5gDBKZ2v+rJ
|
||||
jZLcaEBFtCRUaG9tYXMgV291dGVycyA8dHdvdXRlcnNAZ29vZ2xlLmNvbT6JAlQE
|
||||
EwEKAD4CGwMFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AWIQRxaWBfYsdRNW0FSiao
|
||||
IeaA5fpjBQUCYOBlAQUJFUZECwAKCRCoIeaA5fpjBTvxEACfyEt5rN5QGmVgahD/
|
||||
83l7lQpZUzLSq5MnIfRjCz50seh+oWsOuecayHZ79IDVSkF2L2kE1rumcB7UKPez
|
||||
0kHVrTdh3mQIsfCzQZEMsWTDYotlZbrPPvT3lKGL+O7fU321q9GVotJAssYcQFIK
|
||||
9F2p3jhN2coOzguikVlSc4nswnq2KRIJ4BpSJ3fk1rWLr8oJxN2pSpskYtHdUyUx
|
||||
fZ+fOrMHLbW94JWsLYDad4wpr8etBneVAaUPfphhbIwfhRXlHuTreDtwr3LJYKp1
|
||||
VjUjzGVVT2CXkS9LbJ7aM2BYa/1MJyHxkglu8O9LIDGH2arlbtmBKMbCXPSX/42H
|
||||
sGpUgQYRwG4f+2CfPj4fNx5GK8LO/EJjaw2Qh542U0356RRVZquN6E6SS6Sndlf9
|
||||
sO4cKU/ptT8IsfWKKaLwvr0l71hgLRqqe3rSpTV54cKpJfYIG+Qf4Do69etJLxjY
|
||||
UsyCqzuFocxZa0DGkqDQ+f1cD1bdg7Twso041NZG6y9+E7kCf3jtKkiYAHBY902q
|
||||
Zi8FvtI2tDAqwlfJjdiH5rUtYZALO3KGT+l9p3FTYIdDD1iVC41CeF6loJk0gQZi
|
||||
NmJtyY1TTyNS5Chtr8fSV9yYuoB5XoYYpLu1NCks4Cwva1tE45VhFrl8lPaM3EAB
|
||||
OV+JeHYHX/DgooJRIwgpXCBmwYkCHAQQAQoABgUCVQBAXwAKCRCr3QgmYpBWiBLT
|
||||
D/92NtklPiu0xYCsqUYQzy2dS9y93UHCY0IY7ZPS8k0gmmrxUxwyCARVvLKVy8pC
|
||||
DTw1As8ziOPlwD4QrVxIGMVSKcjM6aEa1h87ezWcz9UCFFKG5NzP7ixBj0bfFlP3
|
||||
STisQXoa1jjT4x8gNw5QMnI4nnnqYRIUZYdsyOru9Kk4pJYMzXG7dHI2mPlGlZ67
|
||||
gsHVtN9w/cF2e+5MeejxJF5YkHwbXyWymFvEHB+yvtrqUKtnr+SKXpBiRaiA1UBS
|
||||
Atus1HE+iEaNpfXjyGPT1/kj2dLS+PyKS61I0B8HF7msoXmbWClwOLAg19C7Gla8
|
||||
CLF53LgIqVjVH5c1+0VjmSyVOJoH9mFMd2rf6iJct9XlLfbAb9+liM/5FSRHtZCO
|
||||
YR6tqfVy+9j5ZhCWfPOaEo4rGLwQ7vHP1qkwH3sZn/UTsQvT4gZX8eIZ3QjutzTD
|
||||
TWfIWvxM7aeYCmNjPZFOB8hmnf3CYL4j5tMwnP19w2HWrYQHOBiIoPwGQxFFZf78
|
||||
F3GlPNYGqDUMNOgF7QxN0+GEh97JjSJ0P4JUX+nMOcousmrvZsH4IM6KIqhk0I59
|
||||
IjUuPxk7qpXU4z4ofEJzxEC0qTtmrNz4+kdvDQuJZY09hAbukzQ8xjl7i519Er+s
|
||||
8iApBcxVZ1Kl4pl60OS6S5FkBS6JfyYDDA2a72R5EJbh/7kCDQRVACJ2ARAA4lpb
|
||||
W8WeDqyRFffqQzVUK6456CkM7Fd77n1FdY0KwNeAmULYeiQ1Kp2PDzxFOyoJNe8a
|
||||
QazB7jPqGth0+JgFCOxGlnAtBP7DQl2MrYAL+AcKJ0c5dXc96ObZ6xtd01n9gAoo
|
||||
uppJINaA2aEX8P6nhQGu9qNz8yMBC22w0MYJZ+38ZVeXGcBCS3AGggeROwNPyNSZ
|
||||
nW5TPVHi+Sea5bCE4eo5UYIAMqcToxieI3V4A2ciQV9nBERLF0bAadD1HEeCb6wM
|
||||
g6h8z6VIRPitk45Dw73dy1yC6OvhkyGQ1yGuOPxwVnG3w0CLSUmMQeqyNAufmtN2
|
||||
yeoSMV74K9kOpkxCzzSulXGhEgCXWE7EXKC2g8i6M4BwYm3AaBGqeo+z7DinffWs
|
||||
8W2UvQUN6JTAdGVgNUfacYbP8YR7fOO1EczJ/FYGxq+JnDUFRpKNsDouw6ZeRI1E
|
||||
iQT3FEKWI3meNmTPBmIcWLoYGNYdmaeb4pqHBb6SfV45H4QjTyIjNHiW/LkpuI7o
|
||||
No/vIlNF8OQwyUFtknXIx57A0VSdI+vfz1crneg/bg0qzBz5SoYZ0XZUfvmYLAoD
|
||||
Z0/KLaqZ1x1Z9wiLbe3iK6nE1mjmWf7rOfmWHuxH/gbChXMDDfOMwgOYFXNXImsN
|
||||
PWPX3XA2DrhFrlNWzA8kxi9hXJrgAfkRcx/84oUAEQEAAYkCPAQYAQoAJgIbDBYh
|
||||
BHFpYF9ix1E1bQVKJqgh5oDl+mMFBQJg4GUPBQkVRkQZAAoJEKgh5oDl+mMFhIcP
|
||||
/j3tJamzKpJGJAwcsoneFtYfmZnLA4UosffaPlsLGRVL1buyRuj2dFBr2WU4NAld
|
||||
YrQPK4T+ciSpfogJ9Dk8s1eUMhZi7gxKmeOHUDyefPXIp7v3PSG4xcnfXjyEK9zC
|
||||
714qFsI9ERjTg7uaw6qmFv8Xht8O8TLGMgqDijQIgrH2oGd6tEdYyOOCOPQ7d6PB
|
||||
Sm5Sw53LlCWlW5I9bc0NCjbnwWjh7Z9UXtLffzZyxgxggSw0vfg5PuhcprZ2Rd3M
|
||||
wzJmALI2BB7eWW1x+M0hXmtdqj7Opmajh+UMrFjLtAlEZfslJwzV9NkAFxDYzRi2
|
||||
jvsmJx78vOPB1XhXgTvlEOvA7qEYDXFaZJHlBDmFU9JqytGZ6PtiQENuLHIe4hO6
|
||||
aHbhJA4I9EqoG1U1COQAwrsHreV6+fpcFn4lXbu+gWPyUzKiQMQd9kI3EEiayObU
|
||||
ro21OFHS7z131kKbMec/oc2RfADCvEwY8oay7o0S9aTqvPSQODs8nYkbZchNFoC+
|
||||
oF9n8pBMNzhYBsTk1OXleD1yMucsuywr5i0meyvu6oQ4+pdPYD6wh7JatJh0hayK
|
||||
y33GGsXd278J1Ek1p6MEFnGLc/zH+NZZLIU7Qn1oFU+gK4cVeaLX2g0/BLKcQ/AE
|
||||
mYIwnecLr8A+Y4mZVwwsnSHtfELtoGSsawN26bzKbnRs
|
||||
=t995
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
@ -0,0 +1,56 @@
|
||||
"""Checks if all *.pyc files have later mtime than their *.py files."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from importlib.util import cache_from_source
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
RPM_BUILD_ROOT = os.environ.get('RPM_BUILD_ROOT', '')
|
||||
|
||||
# ...cpython-3X.pyc
|
||||
# ...cpython-3X.opt-1.pyc
|
||||
# ...cpython-3X.opt-2.pyc
|
||||
LEVELS = (None, 1, 2)
|
||||
|
||||
# list of globs of test and other files that we expect not to have bytecode
|
||||
not_compiled = [
|
||||
'/usr/bin/*',
|
||||
'/usr/lib/rpm/redhat/*',
|
||||
'*/test/badsyntax_*.py',
|
||||
'*/tokenizedata/bad_coding.py',
|
||||
'*/tokenizedata/bad_coding2.py',
|
||||
'*/tokenizedata/badsyntax_*.py',
|
||||
'*/test_future_stmt/badsyntax_*.py',
|
||||
'*/test_lib2to3/data/*.py',
|
||||
'*/test_lib2to3/data/*/*.py',
|
||||
'*/test_lib2to3/data/*/*/*.py',
|
||||
'*.debug-gdb.py',
|
||||
]
|
||||
|
||||
|
||||
def bytecode_expected(path):
|
||||
path = Path(path[len(RPM_BUILD_ROOT):])
|
||||
for glob in not_compiled:
|
||||
if path.match(glob):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
failed = 0
|
||||
compiled = (path for path in sys.argv[1:] if bytecode_expected(path))
|
||||
for path in compiled:
|
||||
to_check = (cache_from_source(path, optimization=opt) for opt in LEVELS)
|
||||
f_mtime = os.path.getmtime(path)
|
||||
for pyc in to_check:
|
||||
c_mtime = os.path.getmtime(pyc)
|
||||
if c_mtime < f_mtime:
|
||||
print('Failed bytecompilation timestamps check: '
|
||||
f'Bytecode file {pyc} is older than source file {path}',
|
||||
file=sys.stderr)
|
||||
failed += 1
|
||||
|
||||
if failed:
|
||||
print(f'\n{failed} files failed bytecompilation timestamps check.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
@ -0,0 +1,11 @@
|
||||
[Desktop Entry]
|
||||
Version=1.0
|
||||
Name=IDLE 3
|
||||
Comment=Python 3 Integrated Development and Learning Environment
|
||||
Exec=idle3 %F
|
||||
TryExec=idle3
|
||||
Terminal=false
|
||||
Type=Application
|
||||
Icon=idle3
|
||||
Categories=Development;IDE;
|
||||
MimeType=text/x-python;
|
@ -0,0 +1,171 @@
|
||||
'''Script to perform import of each module given to %%py_check_import
|
||||
'''
|
||||
import argparse
|
||||
import importlib
|
||||
import fnmatch
|
||||
import os
|
||||
import re
|
||||
import site
|
||||
import sys
|
||||
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def read_modules_files(file_paths):
|
||||
'''Read module names from the files (modules must be newline separated).
|
||||
|
||||
Return the module names list or, if no files were provided, an empty list.
|
||||
'''
|
||||
|
||||
if not file_paths:
|
||||
return []
|
||||
|
||||
modules = []
|
||||
for file in file_paths:
|
||||
file_contents = file.read_text()
|
||||
modules.extend(file_contents.split())
|
||||
return modules
|
||||
|
||||
|
||||
def read_modules_from_cli(argv):
|
||||
'''Read module names from command-line arguments (space or comma separated).
|
||||
|
||||
Return the module names list.
|
||||
'''
|
||||
|
||||
if not argv:
|
||||
return []
|
||||
|
||||
# %%py3_check_import allows to separate module list with comma or whitespace,
|
||||
# we need to unify the output to a list of particular elements
|
||||
modules_as_str = ' '.join(argv)
|
||||
modules = re.split(r'[\s,]+', modules_as_str)
|
||||
# Because of shell expansion in some less typical cases it may happen
|
||||
# that a trailing space will occur at the end of the list.
|
||||
# Remove the empty items from the list before passing it further
|
||||
modules = [m for m in modules if m]
|
||||
return modules
|
||||
|
||||
|
||||
def filter_top_level_modules_only(modules):
|
||||
'''Filter out entries with nested modules (containing dot) ie. 'foo.bar'.
|
||||
|
||||
Return the list of top-level modules.
|
||||
'''
|
||||
|
||||
return [module for module in modules if '.' not in module]
|
||||
|
||||
|
||||
def any_match(text, globs):
|
||||
'''Return True if any of given globs fnmatchcase's the given text.'''
|
||||
|
||||
return any(fnmatch.fnmatchcase(text, g) for g in globs)
|
||||
|
||||
|
||||
def exclude_unwanted_module_globs(globs, modules):
|
||||
'''Filter out entries which match the either of the globs given as argv.
|
||||
|
||||
Return the list of filtered modules.
|
||||
'''
|
||||
|
||||
return [m for m in modules if not any_match(m, globs)]
|
||||
|
||||
|
||||
def read_modules_from_all_args(args):
|
||||
'''Return a joined list of modules from all given command-line arguments.
|
||||
'''
|
||||
|
||||
modules = read_modules_files(args.filename)
|
||||
modules.extend(read_modules_from_cli(args.modules))
|
||||
if args.exclude:
|
||||
modules = exclude_unwanted_module_globs(args.exclude, modules)
|
||||
|
||||
if args.top_level:
|
||||
modules = filter_top_level_modules_only(modules)
|
||||
|
||||
# Error when someone accidentally managed to filter out everything
|
||||
if len(modules) == 0:
|
||||
raise ValueError('No modules to check were left')
|
||||
|
||||
return modules
|
||||
|
||||
|
||||
def import_modules(modules):
|
||||
'''Procedure to perform import check for each module name from the given list of modules.
|
||||
'''
|
||||
|
||||
for module in modules:
|
||||
print('Check import:', module, file=sys.stderr)
|
||||
importlib.import_module(module)
|
||||
|
||||
|
||||
def argparser():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Generate list of all importable modules for import check.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'modules', nargs='*',
|
||||
help=('Add modules to check the import (space or comma separated).'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'-f', '--filename', action='append', type=Path,
|
||||
help='Add importable module names list from file.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-t', '--top-level', action='store_true',
|
||||
help='Check only top-level modules.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-e', '--exclude', action='append',
|
||||
help='Provide modules globs to be excluded from the check.',
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
@contextmanager
|
||||
def remove_unwanteds_from_sys_path():
|
||||
'''Remove cwd and this script's parent from sys.path for the import test.
|
||||
Bring the original contents back after import is done (or failed)
|
||||
'''
|
||||
|
||||
cwd_absolute = Path.cwd().absolute()
|
||||
this_file_parent = Path(__file__).parent.absolute()
|
||||
old_sys_path = list(sys.path)
|
||||
for path in old_sys_path:
|
||||
if Path(path).absolute() in (cwd_absolute, this_file_parent):
|
||||
sys.path.remove(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
sys.path = old_sys_path
|
||||
|
||||
|
||||
def addsitedirs_from_environ():
|
||||
'''Load directories from the _PYTHONSITE environment variable (separated by :)
|
||||
and load the ones already present in sys.path via site.addsitedir()
|
||||
to handle .pth files in them.
|
||||
|
||||
This is needed to properly import old-style namespace packages with nspkg.pth files.
|
||||
See https://bugzilla.redhat.com/2018551 for a more detailed rationale.'''
|
||||
for path in os.getenv('_PYTHONSITE', '').split(':'):
|
||||
if path in sys.path:
|
||||
site.addsitedir(path)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
|
||||
cli_args = argparser().parse_args(argv)
|
||||
|
||||
if not cli_args.modules and not cli_args.filename:
|
||||
raise ValueError('No modules to check were provided')
|
||||
|
||||
modules = read_modules_from_all_args(cli_args)
|
||||
|
||||
with remove_unwanteds_from_sys_path():
|
||||
addsitedirs_from_environ()
|
||||
import_modules(modules)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,91 @@
|
||||
%__python3 /usr/bin/python3.12
|
||||
%python3_pkgversion 3.12
|
||||
|
||||
# The following are macros from macros.python3 in Fedora that are newer/different than those in the python3-rpm-macros package in RHEL 8.
|
||||
# These macros overwrite/supercede some of the macros in the python3-rpm-macros package in RHEL.
|
||||
|
||||
# nb: $RPM_BUILD_ROOT is not set when the macros are expanded (at spec parse time)
|
||||
# so we set it manually (to empty string), making our Python prefer the correct install scheme location
|
||||
# platbase/base is explicitly set to %%{_prefix} to support custom values, such as /app for flatpaks
|
||||
%python3_sitelib %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_path('purelib', vars={'platbase': '%{_prefix}', 'base': '%{_prefix}'}))")
|
||||
%python3_sitearch %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_path('platlib', vars={'platbase': '%{_prefix}', 'base': '%{_prefix}'}))")
|
||||
%python3_version %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; sys.stdout.write('{0.major}.{0.minor}'.format(sys.version_info))")
|
||||
%python3_version_nodots %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; sys.stdout.write('{0.major}{0.minor}'.format(sys.version_info))")
|
||||
%python3_platform %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_platform())")
|
||||
%python3_platform_triplet %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_config_var('MULTIARCH'))")
|
||||
%python3_ext_suffix %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_config_var('EXT_SUFFIX'))")
|
||||
%python3_cache_tag %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; print(sys.implementation.cache_tag)")
|
||||
|
||||
%_py3_shebang_s s
|
||||
%_py3_shebang_P %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; print('P' if hasattr(sys.flags, 'safe_path') else '')")
|
||||
%py3_shbang_opts -%{?_py3_shebang_s}%{?_py3_shebang_P}
|
||||
|
||||
%py3_shebang_fix %{expand:\\\
|
||||
if [ -z "%{?py3_shebang_flags}" ]; then
|
||||
shebang_flags="-k"
|
||||
else
|
||||
shebang_flags="-ka%{py3_shebang_flags}"
|
||||
fi
|
||||
%{__python3} -B %{_rpmconfigdir}/redhat/pathfix_py3_12.py -pni %{__python3} $shebang_flags}
|
||||
|
||||
%py3_install() %{expand:\\\
|
||||
CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}"\\\
|
||||
%{__python3} %{py_setup} %{?py_setup_args} install -O1 --skip-build --root %{buildroot} --prefix %{_prefix} %{?*}
|
||||
rm -rfv %{buildroot}%{_bindir}/__pycache__
|
||||
}
|
||||
|
||||
%py3_install_egg() %{expand:\\\
|
||||
mkdir -p %{buildroot}%{python3_sitelib}
|
||||
%{__python3} -m easy_install -m --prefix %{buildroot}%{_prefix} -Z dist/*-py%{python3_version}.egg %{?*}
|
||||
rm -rfv %{buildroot}%{_bindir}/__pycache__
|
||||
}
|
||||
|
||||
%py3_install_wheel() %{expand:\\\
|
||||
%{__python3} -m pip install -I dist/%{1} --root %{buildroot} --prefix %{_prefix} --no-deps --no-index --no-warn-script-location
|
||||
rm -rfv %{buildroot}%{_bindir}/__pycache__
|
||||
for distinfo in %{buildroot}%{python3_sitelib}/*.dist-info %{buildroot}%{python3_sitearch}/*.dist-info; do
|
||||
if [ -f ${distinfo}/direct_url.json ]; then
|
||||
rm -fv ${distinfo}/direct_url.json
|
||||
sed -i '/direct_url.json/d' ${distinfo}/RECORD
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# With $PATH and $PYTHONPATH set to the %%buildroot,
|
||||
# try to import the Python 3 module(s) given as command-line args or read from file (-f).
|
||||
# Respect the custom values of %%py3_shebang_flags or set nothing if it's undefined.
|
||||
# Filter and check import on only top-level modules using -t flag.
|
||||
# Exclude unwanted modules by passing their globs to -e option.
|
||||
# Useful as a smoke test in %%check when running tests is not feasible.
|
||||
# Use spaces or commas as separators if providing list directly.
|
||||
# Use newlines as separators if providing list in a file.
|
||||
%py3_check_import(e:tf:) %{expand:\\\
|
||||
PATH="%{buildroot}%{_bindir}:$PATH"\\\
|
||||
PYTHONPATH="${PYTHONPATH:-%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}}"\\\
|
||||
_PYTHONSITE="%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}"\\\
|
||||
PYTHONDONTWRITEBYTECODE=1\\\
|
||||
%{lua:
|
||||
local command = "%{__python3} "
|
||||
if rpm.expand("%{?py3_shebang_flags}") ~= "" then
|
||||
command = command .. "-%{py3_shebang_flags}"
|
||||
end
|
||||
command = command .. " %{_rpmconfigdir}/redhat/import_all_modules_py3_12.py "
|
||||
-- handle multiline arguments correctly, see https://bugzilla.redhat.com/2018809
|
||||
local args=rpm.expand('%{?**}'):gsub("[%s\\\\]*%s+", " ")
|
||||
print(command .. args)
|
||||
}
|
||||
}
|
||||
|
||||
# Environment variables used by %%pytest, %%tox or standalone, e.g.:
|
||||
# %%{py3_test_envvars} %%{python3} -m unittest
|
||||
%py3_test_envvars %{expand:\\\
|
||||
CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}"\\\
|
||||
PATH="%{buildroot}%{_bindir}:$PATH"\\\
|
||||
PYTHONPATH="${PYTHONPATH:-%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}}"\\\
|
||||
PYTHONDONTWRITEBYTECODE=1\\\
|
||||
%{?__pytest_addopts:PYTEST_ADDOPTS="${PYTEST_ADDOPTS:-} %{__pytest_addopts}"}\\\
|
||||
PYTEST_XDIST_AUTO_NUM_WORKERS=%{_smp_build_ncpus}}
|
||||
|
||||
# This is intended for Python 3 only, hence also no Python version in the name.
|
||||
%__pytest /usr/bin/pytest-%{python3_version}
|
||||
%pytest %py3_test_envvars %__pytest
|
@ -0,0 +1,199 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os
|
||||
from stat import *
|
||||
import getopt
|
||||
|
||||
err = sys.stderr.write
|
||||
dbg = err
|
||||
rep = sys.stdout.write
|
||||
|
||||
new_interpreter = None
|
||||
preserve_timestamps = False
|
||||
create_backup = True
|
||||
keep_flags = False
|
||||
add_flags = b''
|
||||
|
||||
|
||||
def main():
|
||||
global new_interpreter
|
||||
global preserve_timestamps
|
||||
global create_backup
|
||||
global keep_flags
|
||||
global add_flags
|
||||
|
||||
usage = ('usage: %s -i /interpreter -p -n -k -a file-or-directory ...\n' %
|
||||
sys.argv[0])
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], 'i:a:kpn')
|
||||
except getopt.error as msg:
|
||||
err(str(msg) + '\n')
|
||||
err(usage)
|
||||
sys.exit(2)
|
||||
for o, a in opts:
|
||||
if o == '-i':
|
||||
new_interpreter = a.encode()
|
||||
if o == '-p':
|
||||
preserve_timestamps = True
|
||||
if o == '-n':
|
||||
create_backup = False
|
||||
if o == '-k':
|
||||
keep_flags = True
|
||||
if o == '-a':
|
||||
add_flags = a.encode()
|
||||
if b' ' in add_flags:
|
||||
err("-a option doesn't support whitespaces")
|
||||
sys.exit(2)
|
||||
if not new_interpreter or not new_interpreter.startswith(b'/') or \
|
||||
not args:
|
||||
err('-i option or file-or-directory missing\n')
|
||||
err(usage)
|
||||
sys.exit(2)
|
||||
bad = 0
|
||||
for arg in args:
|
||||
if os.path.isdir(arg):
|
||||
if recursedown(arg): bad = 1
|
||||
elif os.path.islink(arg):
|
||||
err(arg + ': will not process symbolic links\n')
|
||||
bad = 1
|
||||
else:
|
||||
if fix(arg): bad = 1
|
||||
sys.exit(bad)
|
||||
|
||||
|
||||
def ispython(name):
|
||||
return name.endswith('.py')
|
||||
|
||||
|
||||
def recursedown(dirname):
|
||||
dbg('recursedown(%r)\n' % (dirname,))
|
||||
bad = 0
|
||||
try:
|
||||
names = os.listdir(dirname)
|
||||
except OSError as msg:
|
||||
err('%s: cannot list directory: %r\n' % (dirname, msg))
|
||||
return 1
|
||||
names.sort()
|
||||
subdirs = []
|
||||
for name in names:
|
||||
if name in (os.curdir, os.pardir): continue
|
||||
fullname = os.path.join(dirname, name)
|
||||
if os.path.islink(fullname): pass
|
||||
elif os.path.isdir(fullname):
|
||||
subdirs.append(fullname)
|
||||
elif ispython(name):
|
||||
if fix(fullname): bad = 1
|
||||
for fullname in subdirs:
|
||||
if recursedown(fullname): bad = 1
|
||||
return bad
|
||||
|
||||
|
||||
def fix(filename):
|
||||
## dbg('fix(%r)\n' % (filename,))
|
||||
try:
|
||||
f = open(filename, 'rb')
|
||||
except IOError as msg:
|
||||
err('%s: cannot open: %r\n' % (filename, msg))
|
||||
return 1
|
||||
with f:
|
||||
line = f.readline()
|
||||
fixed = fixline(line)
|
||||
if line == fixed:
|
||||
rep(filename+': no change\n')
|
||||
return
|
||||
head, tail = os.path.split(filename)
|
||||
tempname = os.path.join(head, '@' + tail)
|
||||
try:
|
||||
g = open(tempname, 'wb')
|
||||
except IOError as msg:
|
||||
err('%s: cannot create: %r\n' % (tempname, msg))
|
||||
return 1
|
||||
with g:
|
||||
rep(filename + ': updating\n')
|
||||
g.write(fixed)
|
||||
BUFSIZE = 8*1024
|
||||
while 1:
|
||||
buf = f.read(BUFSIZE)
|
||||
if not buf: break
|
||||
g.write(buf)
|
||||
|
||||
# Finishing touch -- move files
|
||||
|
||||
mtime = None
|
||||
atime = None
|
||||
# First copy the file's mode to the temp file
|
||||
try:
|
||||
statbuf = os.stat(filename)
|
||||
mtime = statbuf.st_mtime
|
||||
atime = statbuf.st_atime
|
||||
os.chmod(tempname, statbuf[ST_MODE] & 0o7777)
|
||||
except OSError as msg:
|
||||
err('%s: warning: chmod failed (%r)\n' % (tempname, msg))
|
||||
# Then make a backup of the original file as filename~
|
||||
if create_backup:
|
||||
try:
|
||||
os.rename(filename, filename + '~')
|
||||
except OSError as msg:
|
||||
err('%s: warning: backup failed (%r)\n' % (filename, msg))
|
||||
else:
|
||||
try:
|
||||
os.remove(filename)
|
||||
except OSError as msg:
|
||||
err('%s: warning: removing failed (%r)\n' % (filename, msg))
|
||||
# Now move the temp file to the original file
|
||||
try:
|
||||
os.rename(tempname, filename)
|
||||
except OSError as msg:
|
||||
err('%s: rename failed (%r)\n' % (filename, msg))
|
||||
return 1
|
||||
if preserve_timestamps:
|
||||
if atime and mtime:
|
||||
try:
|
||||
os.utime(filename, (atime, mtime))
|
||||
except OSError as msg:
|
||||
err('%s: reset of timestamp failed (%r)\n' % (filename, msg))
|
||||
return 1
|
||||
# Return success
|
||||
return 0
|
||||
|
||||
|
||||
def parse_shebang(shebangline):
|
||||
shebangline = shebangline.rstrip(b'\n')
|
||||
start = shebangline.find(b' -')
|
||||
if start == -1:
|
||||
return b''
|
||||
return shebangline[start:]
|
||||
|
||||
|
||||
def populate_flags(shebangline):
|
||||
old_flags = b''
|
||||
if keep_flags:
|
||||
old_flags = parse_shebang(shebangline)
|
||||
if old_flags:
|
||||
old_flags = old_flags[2:]
|
||||
if not (old_flags or add_flags):
|
||||
return b''
|
||||
# On Linux, the entire string following the interpreter name
|
||||
# is passed as a single argument to the interpreter.
|
||||
# e.g. "#! /usr/bin/python3 -W Error -s" runs "/usr/bin/python3 "-W Error -s"
|
||||
# so shebang should have single '-' where flags are given and
|
||||
# flag might need argument for that reasons adding new flags is
|
||||
# between '-' and original flags
|
||||
# e.g. #! /usr/bin/python3 -sW Error
|
||||
return b' -' + add_flags + old_flags
|
||||
|
||||
|
||||
def fixline(line):
|
||||
if not line.startswith(b'#!'):
|
||||
return line
|
||||
|
||||
if b"python" not in line:
|
||||
return line
|
||||
|
||||
flags = populate_flags(line)
|
||||
return b'#! ' + new_interpreter + flags + b'\n'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in new issue