commit
ce4699a074
@ -0,0 +1 @@
|
||||
SOURCES/Python-3.12.1.tar.xz
|
@ -0,0 +1 @@
|
||||
5b11c58ea58cd6b8e1943c7e9b5f6e0997ca3632 SOURCES/Python-3.12.1.tar.xz
|
@ -0,0 +1,173 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= <miro@hroncok.cz>
|
||||
Date: Mon, 15 Feb 2021 12:19:27 +0100
|
||||
Subject: [PATCH] 00251: Change user install location
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
Set values of base and platbase in sysconfig from /usr
|
||||
to /usr/local when RPM build is not detected
|
||||
to make pip and similar tools install into separate location.
|
||||
|
||||
Fedora Change: https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
Downstream only.
|
||||
|
||||
We've tried to rework in Fedora 36/Python 3.10 to follow https://bugs.python.org/issue43976
|
||||
but we have identified serious problems with that approach,
|
||||
see https://bugzilla.redhat.com/2026979 or https://bugzilla.redhat.com/2097183
|
||||
|
||||
pypa/distutils integration: https://github.com/pypa/distutils/pull/70
|
||||
|
||||
Co-authored-by: Petr Viktorin <encukou@gmail.com>
|
||||
Co-authored-by: Miro Hrončok <miro@hroncok.cz>
|
||||
Co-authored-by: Michal Cyprian <m.cyprian@gmail.com>
|
||||
Co-authored-by: Lumír Balhar <frenzy.madness@gmail.com>
|
||||
---
|
||||
Lib/site.py | 9 ++++++-
|
||||
Lib/sysconfig.py | 49 +++++++++++++++++++++++++++++++++++++-
|
||||
Lib/test/test_sysconfig.py | 17 +++++++++++--
|
||||
3 files changed, 71 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/Lib/site.py b/Lib/site.py
|
||||
index 672fa7b000..0a9c5be53e 100644
|
||||
--- a/Lib/site.py
|
||||
+++ b/Lib/site.py
|
||||
@@ -377,8 +377,15 @@ def getsitepackages(prefixes=None):
|
||||
return sitepackages
|
||||
|
||||
def addsitepackages(known_paths, prefixes=None):
|
||||
- """Add site-packages to sys.path"""
|
||||
+ """Add site-packages to sys.path
|
||||
+
|
||||
+ '/usr/local' is included in PREFIXES if RPM build is not detected
|
||||
+ to make packages installed into this location visible.
|
||||
+
|
||||
+ """
|
||||
_trace("Processing global site-packages")
|
||||
+ if ENABLE_USER_SITE and 'RPM_BUILD_ROOT' not in os.environ:
|
||||
+ PREFIXES.insert(0, "/usr/local")
|
||||
for sitedir in getsitepackages(prefixes):
|
||||
if os.path.isdir(sitedir):
|
||||
addsitedir(sitedir, known_paths)
|
||||
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
|
||||
index 122d441bd1..2d354a11da 100644
|
||||
--- a/Lib/sysconfig.py
|
||||
+++ b/Lib/sysconfig.py
|
||||
@@ -104,6 +104,11 @@
|
||||
else:
|
||||
_INSTALL_SCHEMES['venv'] = _INSTALL_SCHEMES['posix_venv']
|
||||
|
||||
+# For a brief period of time in the Fedora 36 life cycle,
|
||||
+# this installation scheme existed and was documented in the release notes.
|
||||
+# For backwards compatibility, we keep it here (at least on 3.10 and 3.11).
|
||||
+_INSTALL_SCHEMES['rpm_prefix'] = _INSTALL_SCHEMES['posix_prefix']
|
||||
+
|
||||
|
||||
# NOTE: site.py has copy of this function.
|
||||
# Sync it when modify this function.
|
||||
@@ -163,6 +168,19 @@ def joinuser(*args):
|
||||
},
|
||||
}
|
||||
|
||||
+# This is used by distutils.command.install in the stdlib
|
||||
+# as well as pypa/distutils (e.g. bundled in setuptools).
|
||||
+# The self.prefix value is set to sys.prefix + /local/
|
||||
+# if neither RPM build nor virtual environment is
|
||||
+# detected to make distutils install packages
|
||||
+# into the separate location.
|
||||
+# https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
+if (not (hasattr(sys, 'real_prefix') or
|
||||
+ sys.prefix != sys.base_prefix) and
|
||||
+ 'RPM_BUILD_ROOT' not in os.environ):
|
||||
+ _prefix_addition = '/local'
|
||||
+
|
||||
+
|
||||
_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
|
||||
'scripts', 'data')
|
||||
|
||||
@@ -263,11 +281,40 @@ def _extend_dict(target_dict, other_dict):
|
||||
target_dict[key] = value
|
||||
|
||||
|
||||
+_CONFIG_VARS_LOCAL = None
|
||||
+
|
||||
+
|
||||
+def _config_vars_local():
|
||||
+ # This function returns the config vars with prefixes amended to /usr/local
|
||||
+ # https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
+ global _CONFIG_VARS_LOCAL
|
||||
+ if _CONFIG_VARS_LOCAL is None:
|
||||
+ _CONFIG_VARS_LOCAL = dict(get_config_vars())
|
||||
+ _CONFIG_VARS_LOCAL['base'] = '/usr/local'
|
||||
+ _CONFIG_VARS_LOCAL['platbase'] = '/usr/local'
|
||||
+ return _CONFIG_VARS_LOCAL
|
||||
+
|
||||
+
|
||||
def _expand_vars(scheme, vars):
|
||||
res = {}
|
||||
if vars is None:
|
||||
vars = {}
|
||||
- _extend_dict(vars, get_config_vars())
|
||||
+
|
||||
+ # when we are not in a virtual environment or an RPM build
|
||||
+ # we change '/usr' to '/usr/local'
|
||||
+ # to avoid surprises, we explicitly check for the /usr/ prefix
|
||||
+ # Python virtual environments have different prefixes
|
||||
+ # we only do this for posix_prefix, not to mangle the venv scheme
|
||||
+ # posix_prefix is used by sudo pip install
|
||||
+ # we only change the defaults here, so explicit --prefix will take precedence
|
||||
+ # https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
+ if (scheme == 'posix_prefix' and
|
||||
+ _PREFIX == '/usr' and
|
||||
+ 'RPM_BUILD_ROOT' not in os.environ):
|
||||
+ _extend_dict(vars, _config_vars_local())
|
||||
+ else:
|
||||
+ _extend_dict(vars, get_config_vars())
|
||||
+
|
||||
if os.name == 'nt':
|
||||
# On Windows we want to substitute 'lib' for schemes rather
|
||||
# than the native value (without modifying vars, in case it
|
||||
diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py
|
||||
index b6dbf3d52c..4f06a7673c 100644
|
||||
--- a/Lib/test/test_sysconfig.py
|
||||
+++ b/Lib/test/test_sysconfig.py
|
||||
@@ -110,8 +110,19 @@ def test_get_path(self):
|
||||
for scheme in _INSTALL_SCHEMES:
|
||||
for name in _INSTALL_SCHEMES[scheme]:
|
||||
expected = _INSTALL_SCHEMES[scheme][name].format(**config_vars)
|
||||
+ tested = get_path(name, scheme)
|
||||
+ # https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
|
||||
+ if tested.startswith('/usr/local'):
|
||||
+ # /usr/local should only be used in posix_prefix
|
||||
+ self.assertEqual(scheme, 'posix_prefix')
|
||||
+ # Fedora CI runs tests for venv and virtualenv that check for other prefixes
|
||||
+ self.assertEqual(sys.prefix, '/usr')
|
||||
+ # When building the RPM of Python, %check runs this with RPM_BUILD_ROOT set
|
||||
+ # Fedora CI runs this with RPM_BUILD_ROOT unset
|
||||
+ self.assertNotIn('RPM_BUILD_ROOT', os.environ)
|
||||
+ tested = tested.replace('/usr/local', '/usr')
|
||||
self.assertEqual(
|
||||
- os.path.normpath(get_path(name, scheme)),
|
||||
+ os.path.normpath(tested),
|
||||
os.path.normpath(expected),
|
||||
)
|
||||
|
||||
@@ -335,7 +346,7 @@ def test_get_config_h_filename(self):
|
||||
self.assertTrue(os.path.isfile(config_h), config_h)
|
||||
|
||||
def test_get_scheme_names(self):
|
||||
- wanted = ['nt', 'posix_home', 'posix_prefix', 'posix_venv', 'nt_venv', 'venv']
|
||||
+ wanted = ['nt', 'posix_home', 'posix_prefix', 'posix_venv', 'nt_venv', 'venv', 'rpm_prefix']
|
||||
if HAS_USER_BASE:
|
||||
wanted.extend(['nt_user', 'osx_framework_user', 'posix_user'])
|
||||
self.assertEqual(get_scheme_names(), tuple(sorted(wanted)))
|
||||
@@ -347,6 +358,8 @@ def test_symlink(self): # Issue 7880
|
||||
cmd = "-c", "import sysconfig; print(sysconfig.get_platform())"
|
||||
self.assertEqual(py.call_real(*cmd), py.call_link(*cmd))
|
||||
|
||||
+ @unittest.skipIf('RPM_BUILD_ROOT' not in os.environ,
|
||||
+ "Test doesn't expect Fedora's paths")
|
||||
def test_user_similar(self):
|
||||
# Issue #8759: make sure the posix scheme for the users
|
||||
# is similar to the global posix_prefix one
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,103 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Hrn=C4=8Diar?= <thrnciar@redhat.com>
|
||||
Date: Tue, 25 Oct 2022 12:02:33 +0200
|
||||
Subject: [PATCH] 00371: Revert "bpo-1596321: Fix threading._shutdown() for the
|
||||
main thread (GH-28549) (GH-28589)"
|
||||
|
||||
This reverts commit 38c67738c64304928c68d5c2bd78bbb01d979b94. It
|
||||
introduced regression causing FreeIPA's tests to fail.
|
||||
|
||||
For more info see:
|
||||
https://bodhi.fedoraproject.org/updates/FEDORA-2021-e152ce5f31
|
||||
https://github.com/GrahamDumpleton/mod_wsgi/issues/730
|
||||
---
|
||||
Lib/test/test_threading.py | 33 ---------------------------------
|
||||
Lib/threading.py | 25 ++++++++-----------------
|
||||
2 files changed, 8 insertions(+), 50 deletions(-)
|
||||
|
||||
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
|
||||
index 756d5e329f..5d09775efc 100644
|
||||
--- a/Lib/test/test_threading.py
|
||||
+++ b/Lib/test/test_threading.py
|
||||
@@ -1007,39 +1007,6 @@ def noop(): pass
|
||||
threading.Thread(target=noop).start()
|
||||
# Thread.join() is not called
|
||||
|
||||
- def test_import_from_another_thread(self):
|
||||
- # bpo-1596321: If the threading module is first import from a thread
|
||||
- # different than the main thread, threading._shutdown() must handle
|
||||
- # this case without logging an error at Python exit.
|
||||
- code = textwrap.dedent('''
|
||||
- import _thread
|
||||
- import sys
|
||||
-
|
||||
- event = _thread.allocate_lock()
|
||||
- event.acquire()
|
||||
-
|
||||
- def import_threading():
|
||||
- import threading
|
||||
- event.release()
|
||||
-
|
||||
- if 'threading' in sys.modules:
|
||||
- raise Exception('threading is already imported')
|
||||
-
|
||||
- _thread.start_new_thread(import_threading, ())
|
||||
-
|
||||
- # wait until the threading module is imported
|
||||
- event.acquire()
|
||||
- event.release()
|
||||
-
|
||||
- if 'threading' not in sys.modules:
|
||||
- raise Exception('threading is not imported')
|
||||
-
|
||||
- # don't wait until the thread completes
|
||||
- ''')
|
||||
- rc, out, err = assert_python_ok("-c", code)
|
||||
- self.assertEqual(out, b'')
|
||||
- self.assertEqual(err, b'')
|
||||
-
|
||||
def test_start_new_thread_at_exit(self):
|
||||
code = """if 1:
|
||||
import atexit
|
||||
diff --git a/Lib/threading.py b/Lib/threading.py
|
||||
index 8dcaf8ca6a..ed0b0f4632 100644
|
||||
--- a/Lib/threading.py
|
||||
+++ b/Lib/threading.py
|
||||
@@ -1586,29 +1586,20 @@ def _shutdown():
|
||||
|
||||
global _SHUTTING_DOWN
|
||||
_SHUTTING_DOWN = True
|
||||
+ # Main thread
|
||||
+ tlock = _main_thread._tstate_lock
|
||||
+ # The main thread isn't finished yet, so its thread state lock can't have
|
||||
+ # been released.
|
||||
+ assert tlock is not None
|
||||
+ assert tlock.locked()
|
||||
+ tlock.release()
|
||||
+ _main_thread._stop()
|
||||
|
||||
# Call registered threading atexit functions before threads are joined.
|
||||
# Order is reversed, similar to atexit.
|
||||
for atexit_call in reversed(_threading_atexits):
|
||||
atexit_call()
|
||||
|
||||
- # Main thread
|
||||
- if _main_thread.ident == get_ident():
|
||||
- tlock = _main_thread._tstate_lock
|
||||
- # The main thread isn't finished yet, so its thread state lock can't
|
||||
- # have been released.
|
||||
- assert tlock is not None
|
||||
- assert tlock.locked()
|
||||
- tlock.release()
|
||||
- _main_thread._stop()
|
||||
- else:
|
||||
- # bpo-1596321: _shutdown() must be called in the main thread.
|
||||
- # If the threading module was not imported by the main thread,
|
||||
- # _main_thread is the thread which imported the threading module.
|
||||
- # In this case, ignore _main_thread, similar behavior than for threads
|
||||
- # spawned by C libraries or using _thread.start_new_thread().
|
||||
- pass
|
||||
-
|
||||
# Join all non-deamon threads
|
||||
while True:
|
||||
with _shutdown_locks_lock:
|
@ -0,0 +1,243 @@
|
||||
From 73d2995223c725638d53b9cb8e1d26b82daf0874 Mon Sep 17 00:00:00 2001
|
||||
From: Petr Viktorin <encukou@gmail.com>
|
||||
Date: Mon, 6 Mar 2023 17:24:24 +0100
|
||||
Subject: [PATCH] CVE-2007-4559, PEP-706: Add filters for tarfile extraction
|
||||
(downstream)
|
||||
|
||||
Add and test RHEL-specific ways of configuring the default behavior: environment
|
||||
variable and config file.
|
||||
---
|
||||
Lib/tarfile.py | 47 +++++++++++++--
|
||||
Lib/test/test_shutil.py | 2 +-
|
||||
Lib/test/test_tarfile.py | 123 ++++++++++++++++++++++++++++++++++++++-
|
||||
3 files changed, 163 insertions(+), 9 deletions(-)
|
||||
|
||||
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
|
||||
index 02f5e3b..f7109f3 100755
|
||||
--- a/Lib/tarfile.py
|
||||
+++ b/Lib/tarfile.py
|
||||
@@ -71,6 +71,13 @@ __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
|
||||
"OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
|
||||
"LinkOutsideDestinationError"]
|
||||
|
||||
+# If true, use the safer (but backwards-incompatible) 'tar' extraction filter,
|
||||
+# rather than 'fully_trusted', by default.
|
||||
+# The emitted warning is changed to match.
|
||||
+_RH_SAFER_DEFAULT = True
|
||||
+
|
||||
+# System-wide configuration file
|
||||
+_CONFIG_FILENAME = '/etc/python/tarfile.cfg'
|
||||
|
||||
#---------------------------------------------------------
|
||||
# tar constants
|
||||
@@ -2217,11 +2224,41 @@ class TarFile(object):
|
||||
if filter is None:
|
||||
filter = self.extraction_filter
|
||||
if filter is None:
|
||||
- warnings.warn(
|
||||
- 'Python 3.14 will, by default, filter extracted tar '
|
||||
- + 'archives and reject files or modify their metadata. '
|
||||
- + 'Use the filter argument to control this behavior.',
|
||||
- DeprecationWarning)
|
||||
+ name = os.environ.get('PYTHON_TARFILE_EXTRACTION_FILTER')
|
||||
+ if name is None:
|
||||
+ try:
|
||||
+ file = bltn_open(_CONFIG_FILENAME)
|
||||
+ except FileNotFoundError:
|
||||
+ pass
|
||||
+ else:
|
||||
+ import configparser
|
||||
+ conf = configparser.ConfigParser(
|
||||
+ interpolation=None,
|
||||
+ comment_prefixes=('#', ),
|
||||
+ )
|
||||
+ with file:
|
||||
+ conf.read_file(file)
|
||||
+ name = conf.get('tarfile',
|
||||
+ 'PYTHON_TARFILE_EXTRACTION_FILTER',
|
||||
+ fallback='')
|
||||
+ if name:
|
||||
+ try:
|
||||
+ filter = _NAMED_FILTERS[name]
|
||||
+ except KeyError:
|
||||
+ raise ValueError(f"filter {filter!r} not found") from None
|
||||
+ self.extraction_filter = filter
|
||||
+ return filter
|
||||
+ if _RH_SAFER_DEFAULT:
|
||||
+ warnings.warn(
|
||||
+ 'The default behavior of tarfile extraction has been '
|
||||
+ + 'changed to disallow common exploits '
|
||||
+ + '(including CVE-2007-4559). '
|
||||
+ + 'By default, absolute/parent paths are disallowed '
|
||||
+ + 'and some mode bits are cleared. '
|
||||
+ + 'See https://access.redhat.com/articles/7004769 '
|
||||
+ + 'for more details.',
|
||||
+ RuntimeWarning)
|
||||
+ return tar_filter
|
||||
return fully_trusted_filter
|
||||
if isinstance(filter, str):
|
||||
raise TypeError(
|
||||
diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py
|
||||
index 5fd8fb4..501da8f 100644
|
||||
--- a/Lib/test/test_shutil.py
|
||||
+++ b/Lib/test/test_shutil.py
|
||||
@@ -1950,7 +1950,7 @@ class TestArchives(BaseTest, unittest.TestCase):
|
||||
self.check_unpack_archive(format, filter='fully_trusted')
|
||||
self.check_unpack_archive(format, filter='data')
|
||||
with warnings_helper.check_warnings(
|
||||
- ('Python 3.14', DeprecationWarning)):
|
||||
+ ('.*CVE-2007-4559', RuntimeWarning)):
|
||||
self.check_unpack_archive(format)
|
||||
|
||||
def test_unpack_archive_tar(self):
|
||||
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
|
||||
index c5fc76d..397e334 100644
|
||||
--- a/Lib/test/test_tarfile.py
|
||||
+++ b/Lib/test/test_tarfile.py
|
||||
@@ -3097,8 +3097,8 @@ class NoneInfoExtractTests(ReadTest):
|
||||
tar.errorlevel = 0
|
||||
with ExitStack() as cm:
|
||||
if cls.extraction_filter is None:
|
||||
- cm.enter_context(warnings.catch_warnings(
|
||||
- action="ignore", category=DeprecationWarning))
|
||||
+ cm.enter_context(warnings.catch_warnings())
|
||||
+ warnings.simplefilter(action="ignore", category=RuntimeWarning)
|
||||
tar.extractall(cls.control_dir, filter=cls.extraction_filter)
|
||||
tar.close()
|
||||
cls.control_paths = set(
|
||||
@@ -3919,7 +3919,7 @@ class TestExtractionFilters(unittest.TestCase):
|
||||
with ArchiveMaker() as arc:
|
||||
arc.add('foo')
|
||||
with warnings_helper.check_warnings(
|
||||
- ('Python 3.14', DeprecationWarning)):
|
||||
+ ('.*CVE-2007-4559', RuntimeWarning)):
|
||||
with self.check_context(arc.open(), None):
|
||||
self.expect_file('foo')
|
||||
|
||||
@@ -4089,6 +4089,123 @@ class TestExtractionFilters(unittest.TestCase):
|
||||
self.expect_exception(TypeError) # errorlevel is not int
|
||||
|
||||
|
||||
+ @contextmanager
|
||||
+ def rh_config_context(self, config_lines=None):
|
||||
+ """Set up for testing various ways of overriding the default filter
|
||||
+
|
||||
+ return a triple with:
|
||||
+ - temporary directory
|
||||
+ - EnvironmentVarGuard()
|
||||
+ - a test archive for use with check_* methods below
|
||||
+
|
||||
+ If config_lines is given, write them to the config file. Otherwise
|
||||
+ the config file is missing.
|
||||
+ """
|
||||
+ tempdir = pathlib.Path(TEMPDIR) / 'tmp'
|
||||
+ configfile = tempdir / 'tarfile.cfg'
|
||||
+ with ArchiveMaker() as arc:
|
||||
+ arc.add('good')
|
||||
+ arc.add('ugly', symlink_to='/etc/passwd')
|
||||
+ arc.add('../bad')
|
||||
+ with (
|
||||
+ os_helper.temp_dir(tempdir),
|
||||
+ support.swap_attr(tarfile, '_CONFIG_FILENAME', str(configfile)),
|
||||
+ os_helper.EnvironmentVarGuard() as env,
|
||||
+ arc.open() as tar,
|
||||
+ ):
|
||||
+ if config_lines is not None:
|
||||
+ with configfile.open('w') as f:
|
||||
+ for line in config_lines:
|
||||
+ print(line, file=f)
|
||||
+ yield tempdir, env, tar
|
||||
+
|
||||
+ def check_rh_default_behavior(self, tar, tempdir):
|
||||
+ """Check RH default: warn and refuse to extract dangerous files."""
|
||||
+ with (
|
||||
+ warnings_helper.check_warnings(
|
||||
+ ('.*CVE-2007-4559', RuntimeWarning)),
|
||||
+ self.assertRaises(tarfile.OutsideDestinationError),
|
||||
+ ):
|
||||
+ tar.extractall(tempdir / 'outdir')
|
||||
+
|
||||
+ def check_trusted_default(self, tar, tempdir):
|
||||
+ """Check 'fully_trusted' is configured as the default filter."""
|
||||
+ with (
|
||||
+ warnings_helper.check_no_warnings(self),
|
||||
+ ):
|
||||
+ tar.extractall(tempdir / 'outdir')
|
||||
+ self.assertTrue((tempdir / 'outdir/good').exists())
|
||||
+ self.assertEqual((tempdir / 'outdir/ugly').readlink(),
|
||||
+ pathlib.Path('/etc/passwd'))
|
||||
+ self.assertTrue((tempdir / 'bad').exists())
|
||||
+
|
||||
+ def test_rh_default_no_conf(self):
|
||||
+ with self.rh_config_context() as (tempdir, env, tar):
|
||||
+ self.check_rh_default_behavior(tar, tempdir)
|
||||
+
|
||||
+ def test_rh_default_from_file(self):
|
||||
+ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=fully_trusted']
|
||||
+ with self.rh_config_context(lines) as (tempdir, env, tar):
|
||||
+ self.check_trusted_default(tar, tempdir)
|
||||
+
|
||||
+ def test_rh_empty_config_file(self):
|
||||
+ """Empty config file -> default behavior"""
|
||||
+ lines = []
|
||||
+ with self.rh_config_context(lines) as (tempdir, env, tar):
|
||||
+ self.check_rh_default_behavior(tar, tempdir)
|
||||
+
|
||||
+ def test_empty_config_section(self):
|
||||
+ """Empty section in config file -> default behavior"""
|
||||
+ lines = ['[tarfile]']
|
||||
+ with self.rh_config_context(lines) as (tempdir, env, tar):
|
||||
+ self.check_rh_default_behavior(tar, tempdir)
|
||||
+
|
||||
+ def test_rh_default_empty_config_option(self):
|
||||
+ """Empty option value in config file -> default behavior"""
|
||||
+ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=']
|
||||
+ with self.rh_config_context(lines) as (tempdir, env, tar):
|
||||
+ self.check_rh_default_behavior(tar, tempdir)
|
||||
+
|
||||
+ def test_bad_config_option(self):
|
||||
+ """Bad option value in config file -> ValueError"""
|
||||
+ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=unknown!']
|
||||
+ with self.rh_config_context(lines) as (tempdir, env, tar):
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ tar.extractall(tempdir / 'outdir')
|
||||
+
|
||||
+ def test_default_from_envvar(self):
|
||||
+ with self.rh_config_context() as (tempdir, env, tar):
|
||||
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'fully_trusted'
|
||||
+ self.check_trusted_default(tar, tempdir)
|
||||
+
|
||||
+ def test_empty_envvar(self):
|
||||
+ """Empty env variable -> default behavior"""
|
||||
+ with self.rh_config_context() as (tempdir, env, tar):
|
||||
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = ''
|
||||
+ self.check_rh_default_behavior(tar, tempdir)
|
||||
+
|
||||
+ def test_bad_envvar(self):
|
||||
+ with self.rh_config_context() as (tempdir, env, tar):
|
||||
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'unknown!'
|
||||
+ with self.assertRaises(ValueError):
|
||||
+ tar.extractall(tempdir / 'outdir')
|
||||
+
|
||||
+ def test_envvar_overrides_file(self):
|
||||
+ lines = ['[tarfile]', 'PYTHON_TARFILE_EXTRACTION_FILTER=data']
|
||||
+ with self.rh_config_context(lines) as (tempdir, env, tar):
|
||||
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'fully_trusted'
|
||||
+ self.check_trusted_default(tar, tempdir)
|
||||
+
|
||||
+ def test_monkeypatch_overrides_envvar(self):
|
||||
+ with self.rh_config_context(None) as (tempdir, env, tar):
|
||||
+ env['PYTHON_TARFILE_EXTRACTION_FILTER'] = 'data'
|
||||
+ with support.swap_attr(
|
||||
+ tarfile.TarFile, 'extraction_filter',
|
||||
+ staticmethod(tarfile.fully_trusted_filter)
|
||||
+ ):
|
||||
+ self.check_trusted_default(tar, tempdir)
|
||||
+
|
||||
+
|
||||
def setUpModule():
|
||||
os_helper.unlink(TEMPDIR)
|
||||
os.makedirs(TEMPDIR)
|
||||
--
|
||||
2.43.0
|
||||
|
@ -0,0 +1,483 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
|
||||
From: Victor Stinner <vstinner@python.org>
|
||||
Date: Fri, 15 Dec 2023 16:10:40 +0100
|
||||
Subject: [PATCH] 00415: [CVE-2023-27043] gh-102988: Reject malformed addresses
|
||||
in email.parseaddr() (#111116)
|
||||
|
||||
Detect email address parsing errors and return empty tuple to
|
||||
indicate the parsing error (old API). Add an optional 'strict'
|
||||
parameter to getaddresses() and parseaddr() functions. Patch by
|
||||
Thomas Dwyer.
|
||||
|
||||
Co-Authored-By: Thomas Dwyer <github@tomd.tel>
|
||||
---
|
||||
Doc/library/email.utils.rst | 19 +-
|
||||
Lib/email/utils.py | 151 +++++++++++++-
|
||||
Lib/test/test_email/test_email.py | 187 +++++++++++++++++-
|
||||
...-10-20-15-28-08.gh-issue-102988.dStNO7.rst | 8 +
|
||||
4 files changed, 344 insertions(+), 21 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
||||
|
||||
diff --git a/Doc/library/email.utils.rst b/Doc/library/email.utils.rst
|
||||
index 345b64001c..d693a9bc39 100644
|
||||
--- a/Doc/library/email.utils.rst
|
||||
+++ b/Doc/library/email.utils.rst
|
||||
@@ -58,13 +58,18 @@ of the new API.
|
||||
begins with angle brackets, they are stripped off.
|
||||
|
||||
|
||||
-.. function:: parseaddr(address)
|
||||
+.. function:: parseaddr(address, *, strict=True)
|
||||
|
||||
Parse address -- which should be the value of some address-containing field such
|
||||
as :mailheader:`To` or :mailheader:`Cc` -- into its constituent *realname* and
|
||||
*email address* parts. Returns a tuple of that information, unless the parse
|
||||
fails, in which case a 2-tuple of ``('', '')`` is returned.
|
||||
|
||||
+ If *strict* is true, use a strict parser which rejects malformed inputs.
|
||||
+
|
||||
+ .. versionchanged:: 3.13
|
||||
+ Add *strict* optional parameter and reject malformed inputs by default.
|
||||
+
|
||||
|
||||
.. function:: formataddr(pair, charset='utf-8')
|
||||
|
||||
@@ -82,12 +87,15 @@ of the new API.
|
||||
Added the *charset* option.
|
||||
|
||||
|
||||
-.. function:: getaddresses(fieldvalues)
|
||||
+.. function:: getaddresses(fieldvalues, *, strict=True)
|
||||
|
||||
This method returns a list of 2-tuples of the form returned by ``parseaddr()``.
|
||||
*fieldvalues* is a sequence of header field values as might be returned by
|
||||
- :meth:`Message.get_all <email.message.Message.get_all>`. Here's a simple
|
||||
- example that gets all the recipients of a message::
|
||||
+ :meth:`Message.get_all <email.message.Message.get_all>`.
|
||||
+
|
||||
+ If *strict* is true, use a strict parser which rejects malformed inputs.
|
||||
+
|
||||
+ Here's a simple example that gets all the recipients of a message::
|
||||
|
||||
from email.utils import getaddresses
|
||||
|
||||
@@ -97,6 +105,9 @@ of the new API.
|
||||
resent_ccs = msg.get_all('resent-cc', [])
|
||||
all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs)
|
||||
|
||||
+ .. versionchanged:: 3.13
|
||||
+ Add *strict* optional parameter and reject malformed inputs by default.
|
||||
+
|
||||
|
||||
.. function:: parsedate(date)
|
||||
|
||||
diff --git a/Lib/email/utils.py b/Lib/email/utils.py
|
||||
index 81da5394ea..43c3627fca 100644
|
||||
--- a/Lib/email/utils.py
|
||||
+++ b/Lib/email/utils.py
|
||||
@@ -48,6 +48,7 @@
|
||||
specialsre = re.compile(r'[][\\()<>@,:;".]')
|
||||
escapesre = re.compile(r'[\\"]')
|
||||
|
||||
+
|
||||
def _has_surrogates(s):
|
||||
"""Return True if s contains surrogate-escaped binary data."""
|
||||
# This check is based on the fact that unless there are surrogates, utf8
|
||||
@@ -106,12 +107,127 @@ def formataddr(pair, charset='utf-8'):
|
||||
return address
|
||||
|
||||
|
||||
+def _iter_escaped_chars(addr):
|
||||
+ pos = 0
|
||||
+ escape = False
|
||||
+ for pos, ch in enumerate(addr):
|
||||
+ if escape:
|
||||
+ yield (pos, '\\' + ch)
|
||||
+ escape = False
|
||||
+ elif ch == '\\':
|
||||
+ escape = True
|
||||
+ else:
|
||||
+ yield (pos, ch)
|
||||
+ if escape:
|
||||
+ yield (pos, '\\')
|
||||
|
||||
-def getaddresses(fieldvalues):
|
||||
- """Return a list of (REALNAME, EMAIL) for each fieldvalue."""
|
||||
- all = COMMASPACE.join(str(v) for v in fieldvalues)
|
||||
- a = _AddressList(all)
|
||||
- return a.addresslist
|
||||
+
|
||||
+def _strip_quoted_realnames(addr):
|
||||
+ """Strip real names between quotes."""
|
||||
+ if '"' not in addr:
|
||||
+ # Fast path
|
||||
+ return addr
|
||||
+
|
||||
+ start = 0
|
||||
+ open_pos = None
|
||||
+ result = []
|
||||
+ for pos, ch in _iter_escaped_chars(addr):
|
||||
+ if ch == '"':
|
||||
+ if open_pos is None:
|
||||
+ open_pos = pos
|
||||
+ else:
|
||||
+ if start != open_pos:
|
||||
+ result.append(addr[start:open_pos])
|
||||
+ start = pos + 1
|
||||
+ open_pos = None
|
||||
+
|
||||
+ if start < len(addr):
|
||||
+ result.append(addr[start:])
|
||||
+
|
||||
+ return ''.join(result)
|
||||
+
|
||||
+
|
||||
+supports_strict_parsing = True
|
||||
+
|
||||
+def getaddresses(fieldvalues, *, strict=True):
|
||||
+ """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue.
|
||||
+
|
||||
+ When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in
|
||||
+ its place.
|
||||
+
|
||||
+ If strict is true, use a strict parser which rejects malformed inputs.
|
||||
+ """
|
||||
+
|
||||
+ # If strict is true, if the resulting list of parsed addresses is greater
|
||||
+ # than the number of fieldvalues in the input list, a parsing error has
|
||||
+ # occurred and consequently a list containing a single empty 2-tuple [('',
|
||||
+ # '')] is returned in its place. This is done to avoid invalid output.
|
||||
+ #
|
||||
+ # Malformed input: getaddresses(['alice@example.com <bob@example.com>'])
|
||||
+ # Invalid output: [('', 'alice@example.com'), ('', 'bob@example.com')]
|
||||
+ # Safe output: [('', '')]
|
||||
+
|
||||
+ if not strict:
|
||||
+ all = COMMASPACE.join(str(v) for v in fieldvalues)
|
||||
+ a = _AddressList(all)
|
||||
+ return a.addresslist
|
||||
+
|
||||
+ fieldvalues = [str(v) for v in fieldvalues]
|
||||
+ fieldvalues = _pre_parse_validation(fieldvalues)
|
||||
+ addr = COMMASPACE.join(fieldvalues)
|
||||
+ a = _AddressList(addr)
|
||||
+ result = _post_parse_validation(a.addresslist)
|
||||
+
|
||||
+ # Treat output as invalid if the number of addresses is not equal to the
|
||||
+ # expected number of addresses.
|
||||
+ n = 0
|
||||
+ for v in fieldvalues:
|
||||
+ # When a comma is used in the Real Name part it is not a deliminator.
|
||||
+ # So strip those out before counting the commas.
|
||||
+ v = _strip_quoted_realnames(v)
|
||||
+ # Expected number of addresses: 1 + number of commas
|
||||
+ n += 1 + v.count(',')
|
||||
+ if len(result) != n:
|
||||
+ return [('', '')]
|
||||
+
|
||||
+ return result
|
||||
+
|
||||
+
|
||||
+def _check_parenthesis(addr):
|
||||
+ # Ignore parenthesis in quoted real names.
|
||||
+ addr = _strip_quoted_realnames(addr)
|
||||
+
|
||||
+ opens = 0
|
||||
+ for pos, ch in _iter_escaped_chars(addr):
|
||||
+ if ch == '(':
|
||||
+ opens += 1
|
||||
+ elif ch == ')':
|
||||
+ opens -= 1
|
||||
+ if opens < 0:
|
||||
+ return False
|
||||
+ return (opens == 0)
|
||||
+
|
||||
+
|
||||
+def _pre_parse_validation(email_header_fields):
|
||||
+ accepted_values = []
|
||||
+ for v in email_header_fields:
|
||||
+ if not _check_parenthesis(v):
|
||||
+ v = "('', '')"
|
||||
+ accepted_values.append(v)
|
||||
+
|
||||
+ return accepted_values
|
||||
+
|
||||
+
|
||||
+def _post_parse_validation(parsed_email_header_tuples):
|
||||
+ accepted_values = []
|
||||
+ # The parser would have parsed a correctly formatted domain-literal
|
||||
+ # The existence of an [ after parsing indicates a parsing failure
|
||||
+ for v in parsed_email_header_tuples:
|
||||
+ if '[' in v[1]:
|
||||
+ v = ('', '')
|
||||
+ accepted_values.append(v)
|
||||
+
|
||||
+ return accepted_values
|
||||
|
||||
|
||||
def _format_timetuple_and_zone(timetuple, zone):
|
||||
@@ -205,16 +321,33 @@ def parsedate_to_datetime(data):
|
||||
tzinfo=datetime.timezone(datetime.timedelta(seconds=tz)))
|
||||
|
||||
|
||||
-def parseaddr(addr):
|
||||
+def parseaddr(addr, *, strict=True):
|
||||
"""
|
||||
Parse addr into its constituent realname and email address parts.
|
||||
|
||||
Return a tuple of realname and email address, unless the parse fails, in
|
||||
which case return a 2-tuple of ('', '').
|
||||
+
|
||||
+ If strict is True, use a strict parser which rejects malformed inputs.
|
||||
"""
|
||||
- addrs = _AddressList(addr).addresslist
|
||||
- if not addrs:
|
||||
- return '', ''
|
||||
+ if not strict:
|
||||
+ addrs = _AddressList(addr).addresslist
|
||||
+ if not addrs:
|
||||
+ return ('', '')
|
||||
+ return addrs[0]
|
||||
+
|
||||
+ if isinstance(addr, list):
|
||||
+ addr = addr[0]
|
||||
+
|
||||
+ if not isinstance(addr, str):
|
||||
+ return ('', '')
|
||||
+
|
||||
+ addr = _pre_parse_validation([addr])[0]
|
||||
+ addrs = _post_parse_validation(_AddressList(addr).addresslist)
|
||||
+
|
||||
+ if not addrs or len(addrs) > 1:
|
||||
+ return ('', '')
|
||||
+
|
||||
return addrs[0]
|
||||
|
||||
|
||||
diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py
|
||||
index 2a237095b9..4672b790d8 100644
|
||||
--- a/Lib/test/test_email/test_email.py
|
||||
+++ b/Lib/test/test_email/test_email.py
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
import email
|
||||
import email.policy
|
||||
+import email.utils
|
||||
|
||||
from email.charset import Charset
|
||||
from email.generator import Generator, DecodedGenerator, BytesGenerator
|
||||
@@ -3337,15 +3338,137 @@ def test_getaddresses_comma_in_name(self):
|
||||
],
|
||||
)
|
||||
|
||||
+ def test_parsing_errors(self):
|
||||
+ """Test for parsing errors from CVE-2023-27043 and CVE-2019-16056"""
|
||||
+ alice = 'alice@example.org'
|
||||
+ bob = 'bob@example.com'
|
||||
+ empty = ('', '')
|
||||
+
|
||||
+ # Test utils.getaddresses() and utils.parseaddr() on malformed email
|
||||
+ # addresses: default behavior (strict=True) rejects malformed address,
|
||||
+ # and strict=False which tolerates malformed address.
|
||||
+ for invalid_separator, expected_non_strict in (
|
||||
+ ('(', [(f'<{bob}>', alice)]),
|
||||
+ (')', [('', alice), empty, ('', bob)]),
|
||||
+ ('<', [('', alice), empty, ('', bob), empty]),
|
||||
+ ('>', [('', alice), empty, ('', bob)]),
|
||||
+ ('[', [('', f'{alice}[<{bob}>]')]),
|
||||
+ (']', [('', alice), empty, ('', bob)]),
|
||||
+ ('@', [empty, empty, ('', bob)]),
|
||||
+ (';', [('', alice), empty, ('', bob)]),
|
||||
+ (':', [('', alice), ('', bob)]),
|
||||
+ ('.', [('', alice + '.'), ('', bob)]),
|
||||
+ ('"', [('', alice), ('', f'<{bob}>')]),
|
||||
+ ):
|
||||
+ address = f'{alice}{invalid_separator}<{bob}>'
|
||||
+ with self.subTest(address=address):
|
||||
+ self.assertEqual(utils.getaddresses([address]),
|
||||
+ [empty])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ expected_non_strict)
|
||||
+
|
||||
+ self.assertEqual(utils.parseaddr([address]),
|
||||
+ empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Comma (',') is treated differently depending on strict parameter.
|
||||
+ # Comma without quotes.
|
||||
+ address = f'{alice},<{bob}>'
|
||||
+ self.assertEqual(utils.getaddresses([address]),
|
||||
+ [('', alice), ('', bob)])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ [('', alice), ('', bob)])
|
||||
+ self.assertEqual(utils.parseaddr([address]),
|
||||
+ empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Real name between quotes containing comma.
|
||||
+ address = '"Alice, alice@example.org" <bob@example.com>'
|
||||
+ expected_strict = ('Alice, alice@example.org', 'bob@example.com')
|
||||
+ self.assertEqual(utils.getaddresses([address]), [expected_strict])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict])
|
||||
+ self.assertEqual(utils.parseaddr([address]), expected_strict)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Valid parenthesis in comments.
|
||||
+ address = 'alice@example.org (Alice)'
|
||||
+ expected_strict = ('Alice', 'alice@example.org')
|
||||
+ self.assertEqual(utils.getaddresses([address]), [expected_strict])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False), [expected_strict])
|
||||
+ self.assertEqual(utils.parseaddr([address]), expected_strict)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Invalid parenthesis in comments.
|
||||
+ address = 'alice@example.org )Alice('
|
||||
+ self.assertEqual(utils.getaddresses([address]), [empty])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ [('', 'alice@example.org'), ('', ''), ('', 'Alice')])
|
||||
+ self.assertEqual(utils.parseaddr([address]), empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Two addresses with quotes separated by comma.
|
||||
+ address = '"Jane Doe" <jane@example.net>, "John Doe" <john@example.net>'
|
||||
+ self.assertEqual(utils.getaddresses([address]),
|
||||
+ [('Jane Doe', 'jane@example.net'),
|
||||
+ ('John Doe', 'john@example.net')])
|
||||
+ self.assertEqual(utils.getaddresses([address], strict=False),
|
||||
+ [('Jane Doe', 'jane@example.net'),
|
||||
+ ('John Doe', 'john@example.net')])
|
||||
+ self.assertEqual(utils.parseaddr([address]), empty)
|
||||
+ self.assertEqual(utils.parseaddr([address], strict=False),
|
||||
+ ('', address))
|
||||
+
|
||||
+ # Test email.utils.supports_strict_parsing attribute
|
||||
+ self.assertEqual(email.utils.supports_strict_parsing, True)
|
||||
+
|
||||
def test_getaddresses_nasty(self):
|
||||
- eq = self.assertEqual
|
||||
- eq(utils.getaddresses(['foo: ;']), [('', '')])
|
||||
- eq(utils.getaddresses(
|
||||
- ['[]*-- =~$']),
|
||||
- [('', ''), ('', ''), ('', '*--')])
|
||||
- eq(utils.getaddresses(
|
||||
- ['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>']),
|
||||
- [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')])
|
||||
+ for addresses, expected in (
|
||||
+ (['"Sürname, Firstname" <to@example.com>'],
|
||||
+ [('Sürname, Firstname', 'to@example.com')]),
|
||||
+
|
||||
+ (['foo: ;'],
|
||||
+ [('', '')]),
|
||||
+
|
||||
+ (['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>'],
|
||||
+ [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]),
|
||||
+
|
||||
+ ([r'Pete(A nice \) chap) <pete(his account)@silly.test(his host)>'],
|
||||
+ [('Pete (A nice ) chap his account his host)', 'pete@silly.test')]),
|
||||
+
|
||||
+ (['(Empty list)(start)Undisclosed recipients :(nobody(I know))'],
|
||||
+ [('', '')]),
|
||||
+
|
||||
+ (['Mary <@machine.tld:mary@example.net>, , jdoe@test . example'],
|
||||
+ [('Mary', 'mary@example.net'), ('', ''), ('', 'jdoe@test.example')]),
|
||||
+
|
||||
+ (['John Doe <jdoe@machine(comment). example>'],
|
||||
+ [('John Doe (comment)', 'jdoe@machine.example')]),
|
||||
+
|
||||
+ (['"Mary Smith: Personal Account" <smith@home.example>'],
|
||||
+ [('Mary Smith: Personal Account', 'smith@home.example')]),
|
||||
+
|
||||
+ (['Undisclosed recipients:;'],
|
||||
+ [('', '')]),
|
||||
+
|
||||
+ ([r'<boss@nil.test>, "Giant; \"Big\" Box" <bob@example.net>'],
|
||||
+ [('', 'boss@nil.test'), ('Giant; "Big" Box', 'bob@example.net')]),
|
||||
+ ):
|
||||
+ with self.subTest(addresses=addresses):
|
||||
+ self.assertEqual(utils.getaddresses(addresses),
|
||||
+ expected)
|
||||
+ self.assertEqual(utils.getaddresses(addresses, strict=False),
|
||||
+ expected)
|
||||
+
|
||||
+ addresses = ['[]*-- =~$']
|
||||
+ self.assertEqual(utils.getaddresses(addresses),
|
||||
+ [('', '')])
|
||||
+ self.assertEqual(utils.getaddresses(addresses, strict=False),
|
||||
+ [('', ''), ('', ''), ('', '*--')])
|
||||
|
||||
def test_getaddresses_embedded_comment(self):
|
||||
"""Test proper handling of a nested comment"""
|
||||
@@ -3536,6 +3659,54 @@ def test_mime_classes_policy_argument(self):
|
||||
m = cls(*constructor, policy=email.policy.default)
|
||||
self.assertIs(m.policy, email.policy.default)
|
||||
|
||||
+ def test_iter_escaped_chars(self):
|
||||
+ self.assertEqual(list(utils._iter_escaped_chars(r'a\\b\"c\\"d')),
|
||||
+ [(0, 'a'),
|
||||
+ (2, '\\\\'),
|
||||
+ (3, 'b'),
|
||||
+ (5, '\\"'),
|
||||
+ (6, 'c'),
|
||||
+ (8, '\\\\'),
|
||||
+ (9, '"'),
|
||||
+ (10, 'd')])
|
||||
+ self.assertEqual(list(utils._iter_escaped_chars('a\\')),
|
||||
+ [(0, 'a'), (1, '\\')])
|
||||
+
|
||||
+ def test_strip_quoted_realnames(self):
|
||||
+ def check(addr, expected):
|
||||
+ self.assertEqual(utils._strip_quoted_realnames(addr), expected)
|
||||
+
|
||||
+ check('"Jane Doe" <jane@example.net>, "John Doe" <john@example.net>',
|
||||
+ ' <jane@example.net>, <john@example.net>')
|
||||
+ check(r'"Jane \"Doe\"." <jane@example.net>',
|
||||
+ ' <jane@example.net>')
|
||||
+
|
||||
+ # special cases
|
||||
+ check(r'before"name"after', 'beforeafter')
|
||||
+ check(r'before"name"', 'before')
|
||||
+ check(r'b"name"', 'b') # single char
|
||||
+ check(r'"name"after', 'after')
|
||||
+ check(r'"name"a', 'a') # single char
|
||||
+ check(r'"name"', '')
|
||||
+
|
||||
+ # no change
|
||||
+ for addr in (
|
||||
+ 'Jane Doe <jane@example.net>, John Doe <john@example.net>',
|
||||
+ 'lone " quote',
|
||||
+ ):
|
||||
+ self.assertEqual(utils._strip_quoted_realnames(addr), addr)
|
||||
+
|
||||
+
|
||||
+ def test_check_parenthesis(self):
|
||||
+ addr = 'alice@example.net'
|
||||
+ self.assertTrue(utils._check_parenthesis(f'{addr} (Alice)'))
|
||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} )Alice('))
|
||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} (Alice))'))
|
||||
+ self.assertFalse(utils._check_parenthesis(f'{addr} ((Alice)'))
|
||||
+
|
||||
+ # Ignore real name between quotes
|
||||
+ self.assertTrue(utils._check_parenthesis(f'")Alice((" {addr}'))
|
||||
+
|
||||
|
||||
# Test the iterator/generators
|
||||
class TestIterators(TestEmailBase):
|
||||
diff --git a/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst b/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
||||
new file mode 100644
|
||||
index 0000000000..3d0e9e4078
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst
|
||||
@@ -0,0 +1,8 @@
|
||||
+:func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now
|
||||
+return ``('', '')`` 2-tuples in more situations where invalid email
|
||||
+addresses are encountered instead of potentially inaccurate values. Add
|
||||
+optional *strict* parameter to these two functions: use ``strict=False`` to
|
||||
+get the old behavior, accept malformed inputs.
|
||||
+``getattr(email.utils, 'supports_strict_parsing', False)`` can be use to check
|
||||
+if the *strict* paramater is available. Patch by Thomas Dwyer and Victor
|
||||
+Stinner to improve the CVE-2023-27043 fix.
|
@ -0,0 +1,18 @@
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQKTBAABCgB9FiEEcWlgX2LHUTVtBUomqCHmgOX6YwUFAmVyMspfFIAAAAAALgAo
|
||||
aXNzdWVyLWZwckBub3RhdGlvbnMub3BlbnBncC5maWZ0aGhvcnNlbWFuLm5ldDcx
|
||||
Njk2MDVGNjJDNzUxMzU2RDA1NEEyNkE4MjFFNjgwRTVGQTYzMDUACgkQqCHmgOX6
|
||||
YwWv5w/+JlGtfy+x+6mtauH1uOkt7n9PMQou1LcthDs5s41wuwjO7RbwnmJD6aDk
|
||||
DqwLHheoq6Kjbl6PF1kG2T8ZbHkMudhnc5yH4eQG52IGNQ6evilxoC6AyhVg8ANi
|
||||
+u6Juh9r2Hjz/LDWFB4hzwcOBKy0jYw98+A0uMvpPd2bmdFMBLQE0GTZCdrRsGYs
|
||||
q0oysUX7uCJBfINp7XwiVGAK/6ma0nrr0A1ho6LCau+VGkDnJZdKZgIMyyxp6qL1
|
||||
7tMjb3LUpV3FWp57L2za59TaayApNf5BlanC+de6oKEhEJ8oEFyWxOx2GmXHZwch
|
||||
ucj7Z1dxuI7fjNVkEvZ+JuheLGtB9mAmUZslXgUJf5wo49bCo9E4/ZlIFQk7VJR3
|
||||
Bm9VlQb5mMydB8QJbMy/BpgNjgKmEvBTnir37prJpUV/TL1YZT0eZ5JxCnlUIL/F
|
||||
6cOzAE3zHPnvHcyHhKV3q5CoONdBtB3RWgS66m4eMneuWoNKaoEbO5IDxtKvCd1J
|
||||
AKLmzCB0/KCWVUIYBTfJ8ytBVQA0Z2w8CZ7SC8asX4DocDCvxim1sQg5s8c4mzh+
|
||||
1JVbyqqEmf9m74Mqby0vICC6UVvgaPyiOxTphtRXLIYHUscLVn5+586RMYnM9nP4
|
||||
nEK+H/fq6Rcp1XEtIPzCG4IPUAYnuDLjbGQegltpKV/SAYn+DGg=
|
||||
=dCpy
|
||||
-----END PGP SIGNATURE-----
|
@ -0,0 +1,136 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBFUAInYBEACrmKcXagNRlo1VjznrJZMMUh0rxUn2iK2wy9H5qrCo4EgMYahZ
|
||||
ibBunSWB4RNeVQevzUm3eSyOixnt+BmGZbSYqKp8tJIXRRcnKhEtC62X+7NVMc7B
|
||||
9uPu/aJ3HNqXrsQwBJUzZxzLMLg6obCyarhhHAYbWmfaafU4yNk3J4dGNKoZtHvz
|
||||
bjnUtlsUAkCmuyt3MsUuSYz34BviRLSEZEKW6xNoyQmD9dUhQ5exBuTPjtmdTf+x
|
||||
gOKpBluRkJ4TADGlWf42lIkaI+8DYRj1R8eQdLFwS7sDTu/MMPceKU7nTWOoj8HF
|
||||
3xXRJ+bJbpOJXZFEzVKjXHKuMFkhKr562i0LD8pdl1+s+9LRovmAvGwggt04Drzb
|
||||
AK437QoyjPKiTnFlg4tOeIuN0Y+GGk2hXOdH7fNw79B9Tq5ENxth8NsnKVlz1zpF
|
||||
X+aV0zCvAjNWutAUpikqZT/ibpwmM+NJcz3pgzQOq+LfPFskyrv7zkVODEjH3SG3
|
||||
s4ROvyoWfLPWmX92kJMOkvzyQObZmU2zWJgJbjYRApZiTfbfnH1tE+wxH4ZR5dji
|
||||
FpEdUJn1yjpYp21Q10khIdsj6q9IvS3RDq0ygc5wfl5111byEsdP12y36lvPTclT
|
||||
33VHBR1vxr+js9d8FI4wwt/o+7TmAO39DYhLrtn+ZgyRgIBYY65lhEaUtwARAQAB
|
||||
tCJUaG9tYXMgV291dGVycyA8dGhvbWFzQHB5dGhvbi5vcmc+iQJXBBMBCgBBAhsD
|
||||
BQsJCAcDBRUKCQgLBRYCAwEAAh4BAheABQkVRkQLFiEEcWlgX2LHUTVtBUomqCHm
|
||||
gOX6YwUFAmM7V3UCGQEACgkQqCHmgOX6YwVFeRAAkXE+BC/8O7VVtNe3iCdcQtW3
|
||||
PiCINEJgOQbXSwjkIGjD/Noheu+2cdwznjUmAX3qgnOyxIvo1AzYXagRazKVl1A+
|
||||
AiMctMNUCuVAkPeTL3nUERzOzZP6fE9OB/XNyiFeNPGg3qGz/HEJH8OMzahfOpzM
|
||||
VC3bCcZrn3JmMp6X8gLgArcK20L7qu/USO/Ico9vT8n+IkZIyxv9GNzfr4QZtGQN
|
||||
DkcHXHbX7p6juffdF9PpQgeAHfP4F9ZuDC+Mc5AGQaxY0z+gNLQGbTEjBBxkrGqd
|
||||
3iOHWb+RLLRJkHkF95KegatrgRkK3d+WLsHwCWzySDAKsjcvM33+N5YB9vWiL/K+
|
||||
kRbgEiecQHwsV1WT+DLY4yoLEBDVbThSw90R2b4bDzCOWShYMX8hDu5HaP3vT1Ye
|
||||
lLSYT/1TxX1yvGeCuA8D+V9OZbSi7eKVT7W4pxqiCcDTpvMvx3o9NfiHEFGQfjlZ
|
||||
nQsIBt9YeBG2c/GL0h1v4X9kBHjxv58576L9olEuWViuCam3OmW31Ik8OjYUwHs0
|
||||
tVqc/ciKsot/3ci96wxnG0RajkXL4ybQI7QzJ3OJJyLMZUPx7UTkdYlD7ZKJyU/N
|
||||
kdcmEjtvBtWeCROZOdivvZeJnSe/vANbH9Oibongl9Zwlq0w/Sd8fHKJQZC7c4dA
|
||||
bTVfbTLXuaLUE86ZCdSJAhwEEAEKAAYFAlUAQF8ACgkQq90IJmKQVoiM5w/+OT6z
|
||||
14MgLePITzg+SCRH7H6QgMjLQXrIwEFzwJC8sG2OxIG/nr+M9R0VgsUqEdP2vwur
|
||||
XDiadjvh4yfXkQst96EUk4l4HCSPo9YtXmPoM88n+rhHSI3UrQx/mqMdf6V4Z8AM
|
||||
KZ6LEe9sctrVjyJdEixZVVh0F4BOluKVwOLFmZkQLzblj8nul5b/X2+r+B2Z3b9H
|
||||
beI4EN/vBno/riv6L/ZHq3g7xztm1ZQE55PLtTkVD6PhEBXnLxuAHu9psWmFLAn3
|
||||
piz7GQg683y5ncrzHGJs11CXtE7BJ8iHet7whQneF2DY2LcVWVNjQtWslhlhp2+o
|
||||
YE+aVSB5cJ6pjHnz7d0m9QyS1dzDkGfDbPxMES6qDymQpsrpDP3Kro+O7V6JB6To
|
||||
8Qj9gEumjaze7mkQdC4A3NOiOgB6+c3P/ng3HYAA9T7KkiePj+2B4F7YrMdWQqhR
|
||||
IB0uHLUblmw9BVbd0p5EVM7dxEziqHE6kvlka0M9kC8naF6XvHhZQlZhbNXgo4zH
|
||||
JLFVNqQw/c2E983iiBEkBu18t5kz/lOnO0Oh5ARVGiw9cxwx9pzuw1s/VT50698r
|
||||
l0g4IeWbrVbYKKXhFMgX6BRQZiOzbE9QTpWX2PFwd69FHs/mgqlFf9IF5lkBkXP4
|
||||
3qAShAqz6+rlQFesaliN3uF29AHZwB5NOY6lNh6JAj0EEwEKACcFAlUAP5cCGwMF
|
||||
CQlmAYAFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AACgkQqCHmgOX6YwUS/hAAqg57
|
||||
TwZlu8qof1e/2BTCqUiKPQ7XuokYORTKpNrXCMpl/ygZMW8jc6Q+iVKaDDhJN9pz
|
||||
zh6ZBZQSNMwr4v8ndc858dId/WtAbMZ9eG96tIoFT4M+mSSW/YobRuvInuFyVvor
|
||||
Z4pT19/UDG7hRZRKofT6X22Nzumoy97veT3Weuu0jny+H4AiiTobSnRSsZpjCMUs
|
||||
0TzMk+v1+JdMmETGbJZrM4BYZFUmoOOOU1WOPoph4owmz8h2a+VxdnGB95sW9/04
|
||||
gKHld863oauFv01P33okRojin+Q2G97o14Lh+IxFjghiphz9MOzwytwTUNwwrSmf
|
||||
Hjv2VHEtWc5P+GC0Gys7+l2jqUZm/j3H2Vu40R47sLudtty5iMV/sVmBBDRdyuVh
|
||||
C7yJC0dWveFsuEuI/onjf1iKgICkM2fNSj8gCBLtnbK6m3ZURoAwdKfWZR+9vyrq
|
||||
waazxmC+76IOoSBnRFqvdB2z+8CbVHWQ4bOcAUyMDTkwtM1qAiNnRln3aFIdzDdQ
|
||||
GQh1j3p4kuda8XE+IMdcOY3Q9HSSQ76aVrh+zbyXIX2a8H5qN7Tn0a7G2PoThBMe
|
||||
5CNl7aXdFF+v7yoRY4vyt+8p3wxlOli/1iMn2X879BlA3TJzbpSSBTd9aR75fshp
|
||||
9cXSqsUb2ja93Ca6mWkJfDYQUIltTGgg4njduFq0IVRob21hcyBXb3V0ZXJzIDx0
|
||||
aG9tYXNAeHM0YWxsLm5sPokCVAQTAQoAPgIbAwULCQgHAwUVCgkICwUWAgMBAAIe
|
||||
AQIXgAUJFUZECxYhBHFpYF9ix1E1bQVKJqgh5oDl+mMFBQJjO1dvAAoJEKgh5oDl
|
||||
+mMFIlIQAKmkHcJbQ1ltBexoJSsoCi7+9IOCSJpD0fsP6210/hkcSdcbz4EuN9om
|
||||
f7BdCW9SOicBB8bxVid3uF0NnLjqyRusNbRVIXiKWzxb2+36cA9D6ugv1u8oV7Fq
|
||||
sD+zAEWJTNDjd4/rJjEMTMhUxN0EFNrQLDngDnx7AeJyGD2n4eFB6RCJ7qtJtCPq
|
||||
xqiW9jH6vH+YlAz8zbWKF7Z52CPVxAt/yoo6dwLFV6615Mo5n4VN6NiXQeKw5XmZ
|
||||
prXvxDQFkodpjBpoN3fcAX6UTX4yJOR9DhALorr2H1ldI9xdQ0pawlPTDT/gRMsY
|
||||
uHh3NVflUzoLny7TWqd7xLyocH2TqC3OAsF78oR+4W2P0QxuEq/W1WAf+LIpRjeI
|
||||
Q4Xt6TGDku694VHE0pfK5BjpHApyWlGRPVq89x6Z78pCrKiMMtoW30mCPWkSd63h
|
||||
3cPgQNAzo+BBoNYUdvQCAAMEFdBpUjVCQaInAqFuKw1N8IpahsKKSg4jMheLmocG
|
||||
KYbO5IIinjXxIz87skKD6xkukIwfcnhvRM/IkHuxuG+ltO17nbWQNvmvZtEZ47xN
|
||||
9hAVZkaK/5eBDmICH1N8o0gHGU61KfEaCRLuQkFRe72QnbxzUkIwYtC9TCAiYieA
|
||||
xsSRwY5boZsKEnzLmPfM1b96Rj7JKCiMDOBgNbUNcKXuAMqrOMZtiQIcBBABCgAG
|
||||
BQJVAEBfAAoJEKvdCCZikFaI8xEQAIl7VghcbWLaiIcS+SB4mlVuVpFyj6YmD7Y2
|
||||
GWAkiRMyxMdzETOA1dKzcdZG8+0ClATFgdCl7FxZ5M2qRhjOqxRpMK0KpSn7xuXB
|
||||
w+0LWo5gnEXk/+IrXOBNCCkaglyPGlW8GFE/hxGbBt4pDLyQ6nsYWSWLvPNOGdQg
|
||||
J2/P3pa8mCmiFeAt88rz7Xj9wg7OZ7O+3tV6AlYRklfQEBAB/NP1VHv3xc2GsCA1
|
||||
t83f8Echq+CjsYNmcVIOhEKSPvacbfF1dpBp4AQooy6KRA4G1ERi0cXcSOnAdKZW
|
||||
wG0fIl++ziz0iDIM9c7Lg7Jul1+ZpXM9nxCLu7KYGlXe05XK+xzQJdK02tJO96IX
|
||||
oNcZrtqnIV9D/BBX4HMVHlSjoRVCnpXsuCiobgOFahvp94s7I6D48ABc64HPMS/2
|
||||
Nkw0QKLsmpNQ++QfNbNNOhcjMJFmlAy8si0cpBSQjVoctvgoZMo63KjDjvUiEMni
|
||||
tvEq43xPLe4cAUgj3F6/Eq9NDXbS0Bvq2a2gPEfY+d0dSazUCpmPVe9eAGxCZ0QY
|
||||
sPctUvLRf4scwJdQ676DAj+VvUwRKjAlR2wlHFWS38PQ/irvKgu02yHW3K1j99QS
|
||||
2NBauBcKZlr/r3bIWbDxtgOBfvfKGFsEHesSAyNY06OgGONvqMncWiMT1dCKhbxQ
|
||||
GdoSEaNkiQI9BBMBCgAnBQJVACJ2AhsDBQkJZgGABQsJCAcDBRUKCQgLBRYCAwEA
|
||||
Ah4BAheAAAoJEKgh5oDl+mMFLxcP/17E5gEBa/ErPMnY+82r0jZ5rq0DGOotrjsY
|
||||
fSvIX0YvDc6BaqLLuSTr8Klo2MueVHZY78px6VC661KP5+aRBJZHTTUJ+9ZVrP+R
|
||||
pcNHQTKX70XrwYSnl2S7q0nc1K5CkCLYOuM05sAzvOBj/cAtpZuw9D009wxpQXd7
|
||||
kwSREBGcgZ8Ac7kqPbOojtmBkHFWFUg5Rx+fVsulcQkERs1j22tzTvPzsljGmrrY
|
||||
7o6P0K4fzVfeQx43sKKltQrNNZwBtleHD+jQPLThVf09Rhjtq/+cb2wwiAqpHskf
|
||||
dY1njeCh21Yl58IOMHQ6x0Mc6rF3A5wqrLVJ5GYVHl33unZLKcAWDmtV3MPvnvdF
|
||||
WvLDPqkykozU08d1ieyZXFWlwViYaVzh7cjgIM8keCbPk0zFtGZkVUS2um332Xiv
|
||||
pX02J8KqWhNsiioO/SAFOhS6+zR+mZghnr5eqD9SHmCKTJ1JWjG2wx2hibaaszQb
|
||||
qypBW1mwoGMDj4MG91iKSnwMgjp3P8xQIyHy/XTAJ1DD9F//2MtQ2ZK5MqiWTYLn
|
||||
iUzirq8M1E8rLHh8Met5xIOq5iIDcMn0HTBCwrWWPOyqTF2lWK3G6LOjTltQ01fJ
|
||||
JaICpTe1Eirt9v+kr1iPLH1+zIeWZ1O469I/nxRp/WW6Oavzb9WRg5gDBKZ2v+rJ
|
||||
jZLcaEBFtCRUaG9tYXMgV291dGVycyA8dHdvdXRlcnNAZ29vZ2xlLmNvbT6JAlQE
|
||||
EwEKAD4CGwMFCwkIBwMFFQoJCAsFFgIDAQACHgECF4AWIQRxaWBfYsdRNW0FSiao
|
||||
IeaA5fpjBQUCYOBlAQUJFUZECwAKCRCoIeaA5fpjBTvxEACfyEt5rN5QGmVgahD/
|
||||
83l7lQpZUzLSq5MnIfRjCz50seh+oWsOuecayHZ79IDVSkF2L2kE1rumcB7UKPez
|
||||
0kHVrTdh3mQIsfCzQZEMsWTDYotlZbrPPvT3lKGL+O7fU321q9GVotJAssYcQFIK
|
||||
9F2p3jhN2coOzguikVlSc4nswnq2KRIJ4BpSJ3fk1rWLr8oJxN2pSpskYtHdUyUx
|
||||
fZ+fOrMHLbW94JWsLYDad4wpr8etBneVAaUPfphhbIwfhRXlHuTreDtwr3LJYKp1
|
||||
VjUjzGVVT2CXkS9LbJ7aM2BYa/1MJyHxkglu8O9LIDGH2arlbtmBKMbCXPSX/42H
|
||||
sGpUgQYRwG4f+2CfPj4fNx5GK8LO/EJjaw2Qh542U0356RRVZquN6E6SS6Sndlf9
|
||||
sO4cKU/ptT8IsfWKKaLwvr0l71hgLRqqe3rSpTV54cKpJfYIG+Qf4Do69etJLxjY
|
||||
UsyCqzuFocxZa0DGkqDQ+f1cD1bdg7Twso041NZG6y9+E7kCf3jtKkiYAHBY902q
|
||||
Zi8FvtI2tDAqwlfJjdiH5rUtYZALO3KGT+l9p3FTYIdDD1iVC41CeF6loJk0gQZi
|
||||
NmJtyY1TTyNS5Chtr8fSV9yYuoB5XoYYpLu1NCks4Cwva1tE45VhFrl8lPaM3EAB
|
||||
OV+JeHYHX/DgooJRIwgpXCBmwYkCHAQQAQoABgUCVQBAXwAKCRCr3QgmYpBWiBLT
|
||||
D/92NtklPiu0xYCsqUYQzy2dS9y93UHCY0IY7ZPS8k0gmmrxUxwyCARVvLKVy8pC
|
||||
DTw1As8ziOPlwD4QrVxIGMVSKcjM6aEa1h87ezWcz9UCFFKG5NzP7ixBj0bfFlP3
|
||||
STisQXoa1jjT4x8gNw5QMnI4nnnqYRIUZYdsyOru9Kk4pJYMzXG7dHI2mPlGlZ67
|
||||
gsHVtN9w/cF2e+5MeejxJF5YkHwbXyWymFvEHB+yvtrqUKtnr+SKXpBiRaiA1UBS
|
||||
Atus1HE+iEaNpfXjyGPT1/kj2dLS+PyKS61I0B8HF7msoXmbWClwOLAg19C7Gla8
|
||||
CLF53LgIqVjVH5c1+0VjmSyVOJoH9mFMd2rf6iJct9XlLfbAb9+liM/5FSRHtZCO
|
||||
YR6tqfVy+9j5ZhCWfPOaEo4rGLwQ7vHP1qkwH3sZn/UTsQvT4gZX8eIZ3QjutzTD
|
||||
TWfIWvxM7aeYCmNjPZFOB8hmnf3CYL4j5tMwnP19w2HWrYQHOBiIoPwGQxFFZf78
|
||||
F3GlPNYGqDUMNOgF7QxN0+GEh97JjSJ0P4JUX+nMOcousmrvZsH4IM6KIqhk0I59
|
||||
IjUuPxk7qpXU4z4ofEJzxEC0qTtmrNz4+kdvDQuJZY09hAbukzQ8xjl7i519Er+s
|
||||
8iApBcxVZ1Kl4pl60OS6S5FkBS6JfyYDDA2a72R5EJbh/7kCDQRVACJ2ARAA4lpb
|
||||
W8WeDqyRFffqQzVUK6456CkM7Fd77n1FdY0KwNeAmULYeiQ1Kp2PDzxFOyoJNe8a
|
||||
QazB7jPqGth0+JgFCOxGlnAtBP7DQl2MrYAL+AcKJ0c5dXc96ObZ6xtd01n9gAoo
|
||||
uppJINaA2aEX8P6nhQGu9qNz8yMBC22w0MYJZ+38ZVeXGcBCS3AGggeROwNPyNSZ
|
||||
nW5TPVHi+Sea5bCE4eo5UYIAMqcToxieI3V4A2ciQV9nBERLF0bAadD1HEeCb6wM
|
||||
g6h8z6VIRPitk45Dw73dy1yC6OvhkyGQ1yGuOPxwVnG3w0CLSUmMQeqyNAufmtN2
|
||||
yeoSMV74K9kOpkxCzzSulXGhEgCXWE7EXKC2g8i6M4BwYm3AaBGqeo+z7DinffWs
|
||||
8W2UvQUN6JTAdGVgNUfacYbP8YR7fOO1EczJ/FYGxq+JnDUFRpKNsDouw6ZeRI1E
|
||||
iQT3FEKWI3meNmTPBmIcWLoYGNYdmaeb4pqHBb6SfV45H4QjTyIjNHiW/LkpuI7o
|
||||
No/vIlNF8OQwyUFtknXIx57A0VSdI+vfz1crneg/bg0qzBz5SoYZ0XZUfvmYLAoD
|
||||
Z0/KLaqZ1x1Z9wiLbe3iK6nE1mjmWf7rOfmWHuxH/gbChXMDDfOMwgOYFXNXImsN
|
||||
PWPX3XA2DrhFrlNWzA8kxi9hXJrgAfkRcx/84oUAEQEAAYkCPAQYAQoAJgIbDBYh
|
||||
BHFpYF9ix1E1bQVKJqgh5oDl+mMFBQJg4GUPBQkVRkQZAAoJEKgh5oDl+mMFhIcP
|
||||
/j3tJamzKpJGJAwcsoneFtYfmZnLA4UosffaPlsLGRVL1buyRuj2dFBr2WU4NAld
|
||||
YrQPK4T+ciSpfogJ9Dk8s1eUMhZi7gxKmeOHUDyefPXIp7v3PSG4xcnfXjyEK9zC
|
||||
714qFsI9ERjTg7uaw6qmFv8Xht8O8TLGMgqDijQIgrH2oGd6tEdYyOOCOPQ7d6PB
|
||||
Sm5Sw53LlCWlW5I9bc0NCjbnwWjh7Z9UXtLffzZyxgxggSw0vfg5PuhcprZ2Rd3M
|
||||
wzJmALI2BB7eWW1x+M0hXmtdqj7Opmajh+UMrFjLtAlEZfslJwzV9NkAFxDYzRi2
|
||||
jvsmJx78vOPB1XhXgTvlEOvA7qEYDXFaZJHlBDmFU9JqytGZ6PtiQENuLHIe4hO6
|
||||
aHbhJA4I9EqoG1U1COQAwrsHreV6+fpcFn4lXbu+gWPyUzKiQMQd9kI3EEiayObU
|
||||
ro21OFHS7z131kKbMec/oc2RfADCvEwY8oay7o0S9aTqvPSQODs8nYkbZchNFoC+
|
||||
oF9n8pBMNzhYBsTk1OXleD1yMucsuywr5i0meyvu6oQ4+pdPYD6wh7JatJh0hayK
|
||||
y33GGsXd278J1Ek1p6MEFnGLc/zH+NZZLIU7Qn1oFU+gK4cVeaLX2g0/BLKcQ/AE
|
||||
mYIwnecLr8A+Y4mZVwwsnSHtfELtoGSsawN26bzKbnRs
|
||||
=t995
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
@ -0,0 +1,56 @@
|
||||
"""Checks if all *.pyc files have later mtime than their *.py files."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from importlib.util import cache_from_source
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
RPM_BUILD_ROOT = os.environ.get('RPM_BUILD_ROOT', '')
|
||||
|
||||
# ...cpython-3X.pyc
|
||||
# ...cpython-3X.opt-1.pyc
|
||||
# ...cpython-3X.opt-2.pyc
|
||||
LEVELS = (None, 1, 2)
|
||||
|
||||
# list of globs of test and other files that we expect not to have bytecode
|
||||
not_compiled = [
|
||||
'/usr/bin/*',
|
||||
'/usr/lib/rpm/redhat/*',
|
||||
'*/test/badsyntax_*.py',
|
||||
'*/tokenizedata/bad_coding.py',
|
||||
'*/tokenizedata/bad_coding2.py',
|
||||
'*/tokenizedata/badsyntax_*.py',
|
||||
'*/test_future_stmt/badsyntax_*.py',
|
||||
'*/test_lib2to3/data/*.py',
|
||||
'*/test_lib2to3/data/*/*.py',
|
||||
'*/test_lib2to3/data/*/*/*.py',
|
||||
'*.debug-gdb.py',
|
||||
]
|
||||
|
||||
|
||||
def bytecode_expected(path):
|
||||
path = Path(path[len(RPM_BUILD_ROOT):])
|
||||
for glob in not_compiled:
|
||||
if path.match(glob):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
failed = 0
|
||||
compiled = (path for path in sys.argv[1:] if bytecode_expected(path))
|
||||
for path in compiled:
|
||||
to_check = (cache_from_source(path, optimization=opt) for opt in LEVELS)
|
||||
f_mtime = os.path.getmtime(path)
|
||||
for pyc in to_check:
|
||||
c_mtime = os.path.getmtime(pyc)
|
||||
if c_mtime < f_mtime:
|
||||
print('Failed bytecompilation timestamps check: '
|
||||
f'Bytecode file {pyc} is older than source file {path}',
|
||||
file=sys.stderr)
|
||||
failed += 1
|
||||
|
||||
if failed:
|
||||
print(f'\n{failed} files failed bytecompilation timestamps check.',
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
@ -0,0 +1,11 @@
|
||||
[Desktop Entry]
|
||||
Version=1.0
|
||||
Name=IDLE 3
|
||||
Comment=Python 3 Integrated Development and Learning Environment
|
||||
Exec=idle3 %F
|
||||
TryExec=idle3
|
||||
Terminal=false
|
||||
Type=Application
|
||||
Icon=idle3
|
||||
Categories=Development;IDE;
|
||||
MimeType=text/x-python;
|
@ -0,0 +1,171 @@
|
||||
'''Script to perform import of each module given to %%py_check_import
|
||||
'''
|
||||
import argparse
|
||||
import importlib
|
||||
import fnmatch
|
||||
import os
|
||||
import re
|
||||
import site
|
||||
import sys
|
||||
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def read_modules_files(file_paths):
|
||||
'''Read module names from the files (modules must be newline separated).
|
||||
|
||||
Return the module names list or, if no files were provided, an empty list.
|
||||
'''
|
||||
|
||||
if not file_paths:
|
||||
return []
|
||||
|
||||
modules = []
|
||||
for file in file_paths:
|
||||
file_contents = file.read_text()
|
||||
modules.extend(file_contents.split())
|
||||
return modules
|
||||
|
||||
|
||||
def read_modules_from_cli(argv):
|
||||
'''Read module names from command-line arguments (space or comma separated).
|
||||
|
||||
Return the module names list.
|
||||
'''
|
||||
|
||||
if not argv:
|
||||
return []
|
||||
|
||||
# %%py3_check_import allows to separate module list with comma or whitespace,
|
||||
# we need to unify the output to a list of particular elements
|
||||
modules_as_str = ' '.join(argv)
|
||||
modules = re.split(r'[\s,]+', modules_as_str)
|
||||
# Because of shell expansion in some less typical cases it may happen
|
||||
# that a trailing space will occur at the end of the list.
|
||||
# Remove the empty items from the list before passing it further
|
||||
modules = [m for m in modules if m]
|
||||
return modules
|
||||
|
||||
|
||||
def filter_top_level_modules_only(modules):
|
||||
'''Filter out entries with nested modules (containing dot) ie. 'foo.bar'.
|
||||
|
||||
Return the list of top-level modules.
|
||||
'''
|
||||
|
||||
return [module for module in modules if '.' not in module]
|
||||
|
||||
|
||||
def any_match(text, globs):
|
||||
'''Return True if any of given globs fnmatchcase's the given text.'''
|
||||
|
||||
return any(fnmatch.fnmatchcase(text, g) for g in globs)
|
||||
|
||||
|
||||
def exclude_unwanted_module_globs(globs, modules):
|
||||
'''Filter out entries which match the either of the globs given as argv.
|
||||
|
||||
Return the list of filtered modules.
|
||||
'''
|
||||
|
||||
return [m for m in modules if not any_match(m, globs)]
|
||||
|
||||
|
||||
def read_modules_from_all_args(args):
|
||||
'''Return a joined list of modules from all given command-line arguments.
|
||||
'''
|
||||
|
||||
modules = read_modules_files(args.filename)
|
||||
modules.extend(read_modules_from_cli(args.modules))
|
||||
if args.exclude:
|
||||
modules = exclude_unwanted_module_globs(args.exclude, modules)
|
||||
|
||||
if args.top_level:
|
||||
modules = filter_top_level_modules_only(modules)
|
||||
|
||||
# Error when someone accidentally managed to filter out everything
|
||||
if len(modules) == 0:
|
||||
raise ValueError('No modules to check were left')
|
||||
|
||||
return modules
|
||||
|
||||
|
||||
def import_modules(modules):
|
||||
'''Procedure to perform import check for each module name from the given list of modules.
|
||||
'''
|
||||
|
||||
for module in modules:
|
||||
print('Check import:', module, file=sys.stderr)
|
||||
importlib.import_module(module)
|
||||
|
||||
|
||||
def argparser():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Generate list of all importable modules for import check.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'modules', nargs='*',
|
||||
help=('Add modules to check the import (space or comma separated).'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'-f', '--filename', action='append', type=Path,
|
||||
help='Add importable module names list from file.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-t', '--top-level', action='store_true',
|
||||
help='Check only top-level modules.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-e', '--exclude', action='append',
|
||||
help='Provide modules globs to be excluded from the check.',
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
@contextmanager
|
||||
def remove_unwanteds_from_sys_path():
|
||||
'''Remove cwd and this script's parent from sys.path for the import test.
|
||||
Bring the original contents back after import is done (or failed)
|
||||
'''
|
||||
|
||||
cwd_absolute = Path.cwd().absolute()
|
||||
this_file_parent = Path(__file__).parent.absolute()
|
||||
old_sys_path = list(sys.path)
|
||||
for path in old_sys_path:
|
||||
if Path(path).absolute() in (cwd_absolute, this_file_parent):
|
||||
sys.path.remove(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
sys.path = old_sys_path
|
||||
|
||||
|
||||
def addsitedirs_from_environ():
|
||||
'''Load directories from the _PYTHONSITE environment variable (separated by :)
|
||||
and load the ones already present in sys.path via site.addsitedir()
|
||||
to handle .pth files in them.
|
||||
|
||||
This is needed to properly import old-style namespace packages with nspkg.pth files.
|
||||
See https://bugzilla.redhat.com/2018551 for a more detailed rationale.'''
|
||||
for path in os.getenv('_PYTHONSITE', '').split(':'):
|
||||
if path in sys.path:
|
||||
site.addsitedir(path)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
|
||||
cli_args = argparser().parse_args(argv)
|
||||
|
||||
if not cli_args.modules and not cli_args.filename:
|
||||
raise ValueError('No modules to check were provided')
|
||||
|
||||
modules = read_modules_from_all_args(cli_args)
|
||||
|
||||
with remove_unwanteds_from_sys_path():
|
||||
addsitedirs_from_environ()
|
||||
import_modules(modules)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,91 @@
|
||||
%__python3 /usr/bin/python3.12
|
||||
%python3_pkgversion 3.12
|
||||
|
||||
# The following are macros from macros.python3 in Fedora that are newer/different than those in the python3-rpm-macros package in RHEL 8.
|
||||
# These macros overwrite/supercede some of the macros in the python3-rpm-macros package in RHEL.
|
||||
|
||||
# nb: $RPM_BUILD_ROOT is not set when the macros are expanded (at spec parse time)
|
||||
# so we set it manually (to empty string), making our Python prefer the correct install scheme location
|
||||
# platbase/base is explicitly set to %%{_prefix} to support custom values, such as /app for flatpaks
|
||||
%python3_sitelib %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_path('purelib', vars={'platbase': '%{_prefix}', 'base': '%{_prefix}'}))")
|
||||
%python3_sitearch %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_path('platlib', vars={'platbase': '%{_prefix}', 'base': '%{_prefix}'}))")
|
||||
%python3_version %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; sys.stdout.write('{0.major}.{0.minor}'.format(sys.version_info))")
|
||||
%python3_version_nodots %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; sys.stdout.write('{0.major}{0.minor}'.format(sys.version_info))")
|
||||
%python3_platform %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_platform())")
|
||||
%python3_platform_triplet %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_config_var('MULTIARCH'))")
|
||||
%python3_ext_suffix %(RPM_BUILD_ROOT= %{__python3} -Ic "import sysconfig; print(sysconfig.get_config_var('EXT_SUFFIX'))")
|
||||
%python3_cache_tag %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; print(sys.implementation.cache_tag)")
|
||||
|
||||
%_py3_shebang_s s
|
||||
%_py3_shebang_P %(RPM_BUILD_ROOT= %{__python3} -Ic "import sys; print('P' if hasattr(sys.flags, 'safe_path') else '')")
|
||||
%py3_shbang_opts -%{?_py3_shebang_s}%{?_py3_shebang_P}
|
||||
|
||||
%py3_shebang_fix %{expand:\\\
|
||||
if [ -z "%{?py3_shebang_flags}" ]; then
|
||||
shebang_flags="-k"
|
||||
else
|
||||
shebang_flags="-ka%{py3_shebang_flags}"
|
||||
fi
|
||||
%{__python3} -B %{_rpmconfigdir}/redhat/pathfix_py3_12.py -pni %{__python3} $shebang_flags}
|
||||
|
||||
%py3_install() %{expand:\\\
|
||||
CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}"\\\
|
||||
%{__python3} %{py_setup} %{?py_setup_args} install -O1 --skip-build --root %{buildroot} --prefix %{_prefix} %{?*}
|
||||
rm -rfv %{buildroot}%{_bindir}/__pycache__
|
||||
}
|
||||
|
||||
%py3_install_egg() %{expand:\\\
|
||||
mkdir -p %{buildroot}%{python3_sitelib}
|
||||
%{__python3} -m easy_install -m --prefix %{buildroot}%{_prefix} -Z dist/*-py%{python3_version}.egg %{?*}
|
||||
rm -rfv %{buildroot}%{_bindir}/__pycache__
|
||||
}
|
||||
|
||||
%py3_install_wheel() %{expand:\\\
|
||||
%{__python3} -m pip install -I dist/%{1} --root %{buildroot} --prefix %{_prefix} --no-deps --no-index --no-warn-script-location
|
||||
rm -rfv %{buildroot}%{_bindir}/__pycache__
|
||||
for distinfo in %{buildroot}%{python3_sitelib}/*.dist-info %{buildroot}%{python3_sitearch}/*.dist-info; do
|
||||
if [ -f ${distinfo}/direct_url.json ]; then
|
||||
rm -fv ${distinfo}/direct_url.json
|
||||
sed -i '/direct_url.json/d' ${distinfo}/RECORD
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# With $PATH and $PYTHONPATH set to the %%buildroot,
|
||||
# try to import the Python 3 module(s) given as command-line args or read from file (-f).
|
||||
# Respect the custom values of %%py3_shebang_flags or set nothing if it's undefined.
|
||||
# Filter and check import on only top-level modules using -t flag.
|
||||
# Exclude unwanted modules by passing their globs to -e option.
|
||||
# Useful as a smoke test in %%check when running tests is not feasible.
|
||||
# Use spaces or commas as separators if providing list directly.
|
||||
# Use newlines as separators if providing list in a file.
|
||||
%py3_check_import(e:tf:) %{expand:\\\
|
||||
PATH="%{buildroot}%{_bindir}:$PATH"\\\
|
||||
PYTHONPATH="${PYTHONPATH:-%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}}"\\\
|
||||
_PYTHONSITE="%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}"\\\
|
||||
PYTHONDONTWRITEBYTECODE=1\\\
|
||||
%{lua:
|
||||
local command = "%{__python3} "
|
||||
if rpm.expand("%{?py3_shebang_flags}") ~= "" then
|
||||
command = command .. "-%{py3_shebang_flags}"
|
||||
end
|
||||
command = command .. " %{_rpmconfigdir}/redhat/import_all_modules_py3_12.py "
|
||||
-- handle multiline arguments correctly, see https://bugzilla.redhat.com/2018809
|
||||
local args=rpm.expand('%{?**}'):gsub("[%s\\\\]*%s+", " ")
|
||||
print(command .. args)
|
||||
}
|
||||
}
|
||||
|
||||
# Environment variables used by %%pytest, %%tox or standalone, e.g.:
|
||||
# %%{py3_test_envvars} %%{python3} -m unittest
|
||||
%py3_test_envvars %{expand:\\\
|
||||
CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}"\\\
|
||||
PATH="%{buildroot}%{_bindir}:$PATH"\\\
|
||||
PYTHONPATH="${PYTHONPATH:-%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}}"\\\
|
||||
PYTHONDONTWRITEBYTECODE=1\\\
|
||||
%{?__pytest_addopts:PYTEST_ADDOPTS="${PYTEST_ADDOPTS:-} %{__pytest_addopts}"}\\\
|
||||
PYTEST_XDIST_AUTO_NUM_WORKERS=%{_smp_build_ncpus}}
|
||||
|
||||
# This is intended for Python 3 only, hence also no Python version in the name.
|
||||
%__pytest /usr/bin/pytest-%{python3_version}
|
||||
%pytest %py3_test_envvars %__pytest
|
@ -0,0 +1,199 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os
|
||||
from stat import *
|
||||
import getopt
|
||||
|
||||
err = sys.stderr.write
|
||||
dbg = err
|
||||
rep = sys.stdout.write
|
||||
|
||||
new_interpreter = None
|
||||
preserve_timestamps = False
|
||||
create_backup = True
|
||||
keep_flags = False
|
||||
add_flags = b''
|
||||
|
||||
|
||||
def main():
|
||||
global new_interpreter
|
||||
global preserve_timestamps
|
||||
global create_backup
|
||||
global keep_flags
|
||||
global add_flags
|
||||
|
||||
usage = ('usage: %s -i /interpreter -p -n -k -a file-or-directory ...\n' %
|
||||
sys.argv[0])
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], 'i:a:kpn')
|
||||
except getopt.error as msg:
|
||||
err(str(msg) + '\n')
|
||||
err(usage)
|
||||
sys.exit(2)
|
||||
for o, a in opts:
|
||||
if o == '-i':
|
||||
new_interpreter = a.encode()
|
||||
if o == '-p':
|
||||
preserve_timestamps = True
|
||||
if o == '-n':
|
||||
create_backup = False
|
||||
if o == '-k':
|
||||
keep_flags = True
|
||||
if o == '-a':
|
||||
add_flags = a.encode()
|
||||
if b' ' in add_flags:
|
||||
err("-a option doesn't support whitespaces")
|
||||
sys.exit(2)
|
||||
if not new_interpreter or not new_interpreter.startswith(b'/') or \
|
||||
not args:
|
||||
err('-i option or file-or-directory missing\n')
|
||||
err(usage)
|
||||
sys.exit(2)
|
||||
bad = 0
|
||||
for arg in args:
|
||||
if os.path.isdir(arg):
|
||||
if recursedown(arg): bad = 1
|
||||
elif os.path.islink(arg):
|
||||
err(arg + ': will not process symbolic links\n')
|
||||
bad = 1
|
||||
else:
|
||||
if fix(arg): bad = 1
|
||||
sys.exit(bad)
|
||||
|
||||
|
||||
def ispython(name):
|
||||
return name.endswith('.py')
|
||||
|
||||
|
||||
def recursedown(dirname):
|
||||
dbg('recursedown(%r)\n' % (dirname,))
|
||||
bad = 0
|
||||
try:
|
||||
names = os.listdir(dirname)
|
||||
except OSError as msg:
|
||||
err('%s: cannot list directory: %r\n' % (dirname, msg))
|
||||
return 1
|
||||
names.sort()
|
||||
subdirs = []
|
||||
for name in names:
|
||||
if name in (os.curdir, os.pardir): continue
|
||||
fullname = os.path.join(dirname, name)
|
||||
if os.path.islink(fullname): pass
|
||||
elif os.path.isdir(fullname):
|
||||
subdirs.append(fullname)
|
||||
elif ispython(name):
|
||||
if fix(fullname): bad = 1
|
||||
for fullname in subdirs:
|
||||
if recursedown(fullname): bad = 1
|
||||
return bad
|
||||
|
||||
|
||||
def fix(filename):
|
||||
## dbg('fix(%r)\n' % (filename,))
|
||||
try:
|
||||
f = open(filename, 'rb')
|
||||
except IOError as msg:
|
||||
err('%s: cannot open: %r\n' % (filename, msg))
|
||||
return 1
|
||||
with f:
|
||||
line = f.readline()
|
||||
fixed = fixline(line)
|
||||
if line == fixed:
|
||||
rep(filename+': no change\n')
|
||||
return
|
||||
head, tail = os.path.split(filename)
|
||||
tempname = os.path.join(head, '@' + tail)
|
||||
try:
|
||||
g = open(tempname, 'wb')
|
||||
except IOError as msg:
|
||||
err('%s: cannot create: %r\n' % (tempname, msg))
|
||||
return 1
|
||||
with g:
|
||||
rep(filename + ': updating\n')
|
||||
g.write(fixed)
|
||||
BUFSIZE = 8*1024
|
||||
while 1:
|
||||
buf = f.read(BUFSIZE)
|
||||
if not buf: break
|
||||
g.write(buf)
|
||||
|
||||
# Finishing touch -- move files
|
||||
|
||||
mtime = None
|
||||
atime = None
|
||||
# First copy the file's mode to the temp file
|
||||
try:
|
||||
statbuf = os.stat(filename)
|
||||
mtime = statbuf.st_mtime
|
||||
atime = statbuf.st_atime
|
||||
os.chmod(tempname, statbuf[ST_MODE] & 0o7777)
|
||||
except OSError as msg:
|
||||
err('%s: warning: chmod failed (%r)\n' % (tempname, msg))
|
||||
# Then make a backup of the original file as filename~
|
||||
if create_backup:
|
||||
try:
|
||||
os.rename(filename, filename + '~')
|
||||
except OSError as msg:
|
||||
err('%s: warning: backup failed (%r)\n' % (filename, msg))
|
||||
else:
|
||||
try:
|
||||
os.remove(filename)
|
||||
except OSError as msg:
|
||||
err('%s: warning: removing failed (%r)\n' % (filename, msg))
|
||||
# Now move the temp file to the original file
|
||||
try:
|
||||
os.rename(tempname, filename)
|
||||
except OSError as msg:
|
||||
err('%s: rename failed (%r)\n' % (filename, msg))
|
||||
return 1
|
||||
if preserve_timestamps:
|
||||
if atime and mtime:
|
||||
try:
|
||||
os.utime(filename, (atime, mtime))
|
||||
except OSError as msg:
|
||||
err('%s: reset of timestamp failed (%r)\n' % (filename, msg))
|
||||
return 1
|
||||
# Return success
|
||||
return 0
|
||||
|
||||
|
||||
def parse_shebang(shebangline):
|
||||
shebangline = shebangline.rstrip(b'\n')
|
||||
start = shebangline.find(b' -')
|
||||
if start == -1:
|
||||
return b''
|
||||
return shebangline[start:]
|
||||
|
||||
|
||||
def populate_flags(shebangline):
|
||||
old_flags = b''
|
||||
if keep_flags:
|
||||
old_flags = parse_shebang(shebangline)
|
||||
if old_flags:
|
||||
old_flags = old_flags[2:]
|
||||
if not (old_flags or add_flags):
|
||||
return b''
|
||||
# On Linux, the entire string following the interpreter name
|
||||
# is passed as a single argument to the interpreter.
|
||||
# e.g. "#! /usr/bin/python3 -W Error -s" runs "/usr/bin/python3 "-W Error -s"
|
||||
# so shebang should have single '-' where flags are given and
|
||||
# flag might need argument for that reasons adding new flags is
|
||||
# between '-' and original flags
|
||||
# e.g. #! /usr/bin/python3 -sW Error
|
||||
return b' -' + add_flags + old_flags
|
||||
|
||||
|
||||
def fixline(line):
|
||||
if not line.startswith(b'#!'):
|
||||
return line
|
||||
|
||||
if b"python" not in line:
|
||||
return line
|
||||
|
||||
flags = populate_flags(line)
|
||||
return b'#! ' + new_interpreter + flags + b'\n'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in new issue