import python3.9-3.9.18-3.el9_4.1

c9 imports/c9/python3.9-3.9.18-3.el9_4.1
MSVSphere Packaging Team 5 months ago
parent bccc3ae7f2
commit 756fd0c7ad

@ -0,0 +1,88 @@
From e5be32d6eb880c9563fde2f23cc31b7e449719ec Mon Sep 17 00:00:00 2001
From: Victor Stinner <vstinner@python.org>
Date: Wed, 24 Jan 2024 18:14:14 +0100
Subject: [PATCH] bpo-46623: Skip two test_zlib tests on s390x (GH-31096)
Skip test_pair() and test_speech128() of test_zlib on s390x since
they fail if zlib uses the s390x hardware accelerator.
---
Lib/test/test_zlib.py | 32 +++++++++++++++++++
.../2022-02-03-09-45-26.bpo-46623.vxzuhV.rst | 2 ++
2 files changed, 34 insertions(+)
create mode 100644 Misc/NEWS.d/next/Tests/2022-02-03-09-45-26.bpo-46623.vxzuhV.rst
diff --git a/Lib/test/test_zlib.py b/Lib/test/test_zlib.py
index 02509cd..f3654c9 100644
--- a/Lib/test/test_zlib.py
+++ b/Lib/test/test_zlib.py
@@ -2,6 +2,7 @@ import unittest
from test import support
import binascii
import copy
+import os
import pickle
import random
import sys
@@ -16,6 +17,35 @@ requires_Decompress_copy = unittest.skipUnless(
hasattr(zlib.decompressobj(), "copy"),
'requires Decompress.copy()')
+# bpo-46623: On s390x, when a hardware accelerator is used, using different
+# ways to compress data with zlib can produce different compressed data.
+# Simplified test_pair() code:
+#
+# def func1(data):
+# return zlib.compress(data)
+#
+# def func2(data)
+# co = zlib.compressobj()
+# x1 = co.compress(data)
+# x2 = co.flush()
+# return x1 + x2
+#
+# On s390x if zlib uses a hardware accelerator, func1() creates a single
+# "final" compressed block whereas func2() produces 3 compressed blocks (the
+# last one is a final block). On other platforms with no accelerator, func1()
+# and func2() produce the same compressed data made of a single (final)
+# compressed block.
+#
+# Only the compressed data is different, the decompression returns the original
+# data:
+#
+# zlib.decompress(func1(data)) == zlib.decompress(func2(data)) == data
+#
+# Make the assumption that s390x always has an accelerator to simplify the skip
+# condition. Windows doesn't have os.uname() but it doesn't support s390x.
+skip_on_s390x = unittest.skipIf(hasattr(os, 'uname') and os.uname().machine == 's390x',
+ 'skipped on s390x')
+
class VersionTestCase(unittest.TestCase):
@@ -174,6 +204,7 @@ class CompressTestCase(BaseCompressTestCase, unittest.TestCase):
bufsize=zlib.DEF_BUF_SIZE),
HAMLET_SCENE)
+ @skip_on_s390x
def test_speech128(self):
# compress more data
data = HAMLET_SCENE * 128
@@ -225,6 +256,7 @@ class CompressTestCase(BaseCompressTestCase, unittest.TestCase):
class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):
# Test compression object
+ @skip_on_s390x
def test_pair(self):
# straightforward compress/decompress objects
datasrc = HAMLET_SCENE * 128
diff --git a/Misc/NEWS.d/next/Tests/2022-02-03-09-45-26.bpo-46623.vxzuhV.rst b/Misc/NEWS.d/next/Tests/2022-02-03-09-45-26.bpo-46623.vxzuhV.rst
new file mode 100644
index 0000000..be085c0
--- /dev/null
+++ b/Misc/NEWS.d/next/Tests/2022-02-03-09-45-26.bpo-46623.vxzuhV.rst
@@ -0,0 +1,2 @@
+Skip test_pair() and test_speech128() of test_zlib on s390x since they fail
+if zlib uses the s390x hardware accelerator. Patch by Victor Stinner.
--
2.43.0

@ -0,0 +1,77 @@
From c9364e8727ea2426519a74593ab03ebcb0da72b8 Mon Sep 17 00:00:00 2001
From: Lumir Balhar <lbalhar@redhat.com>
Date: Fri, 3 May 2024 14:17:48 +0200
Subject: [PATCH] Expect failures in tests not working properly with expat with
a fixed CVE in RHEL
---
Lib/test/test_xml_etree.py | 53 ++++++++++++++++++++++----------------
1 file changed, 31 insertions(+), 22 deletions(-)
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
index 7c346f2..24e0bb8 100644
--- a/Lib/test/test_xml_etree.py
+++ b/Lib/test/test_xml_etree.py
@@ -1391,28 +1391,37 @@ class XMLPullParserTest(unittest.TestCase):
self.assertEqual([(action, elem.tag) for action, elem in events],
expected)
- def test_simple_xml(self):
- for chunk_size in (None, 1, 5):
- with self.subTest(chunk_size=chunk_size):
- parser = ET.XMLPullParser()
- self.assert_event_tags(parser, [])
- self._feed(parser, "<!-- comment -->\n", chunk_size)
- self.assert_event_tags(parser, [])
- self._feed(parser,
- "<root>\n <element key='value'>text</element",
- chunk_size)
- self.assert_event_tags(parser, [])
- self._feed(parser, ">\n", chunk_size)
- self.assert_event_tags(parser, [('end', 'element')])
- self._feed(parser, "<element>text</element>tail\n", chunk_size)
- self._feed(parser, "<empty-element/>\n", chunk_size)
- self.assert_event_tags(parser, [
- ('end', 'element'),
- ('end', 'empty-element'),
- ])
- self._feed(parser, "</root>\n", chunk_size)
- self.assert_event_tags(parser, [('end', 'root')])
- self.assertIsNone(parser.close())
+ def test_simple_xml(self, chunk_size=None):
+ parser = ET.XMLPullParser()
+ self.assert_event_tags(parser, [])
+ self._feed(parser, "<!-- comment -->\n", chunk_size)
+ self.assert_event_tags(parser, [])
+ self._feed(parser,
+ "<root>\n <element key='value'>text</element",
+ chunk_size)
+ self.assert_event_tags(parser, [])
+ self._feed(parser, ">\n", chunk_size)
+ self.assert_event_tags(parser, [('end', 'element')])
+ self._feed(parser, "<element>text</element>tail\n", chunk_size)
+ self._feed(parser, "<empty-element/>\n", chunk_size)
+ self.assert_event_tags(parser, [
+ ('end', 'element'),
+ ('end', 'empty-element'),
+ ])
+ self._feed(parser, "</root>\n", chunk_size)
+ self.assert_event_tags(parser, [('end', 'root')])
+ self.assertIsNone(parser.close())
+
+ @unittest.expectedFailure
+ def test_simple_xml_chunk_1(self):
+ self.test_simple_xml(chunk_size=1)
+
+ @unittest.expectedFailure
+ def test_simple_xml_chunk_5(self):
+ self.test_simple_xml(chunk_size=5)
+
+ def test_simple_xml_chunk_22(self):
+ self.test_simple_xml(chunk_size=22)
def test_feed_while_iterating(self):
parser = ET.XMLPullParser()
--
2.45.0

@ -0,0 +1,211 @@
From d54e22a669ae6e987199bb5d2c69bb5a46b0083b Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka <storchaka@gmail.com>
Date: Wed, 17 Jan 2024 15:47:47 +0200
Subject: [PATCH] [3.9] gh-91133: tempfile.TemporaryDirectory: fix symlink bug
in cleanup (GH-99930) (GH-112842)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
(cherry picked from commit 81c16cd94ec38d61aa478b9a452436dc3b1b524d)
Co-authored-by: Søren Løvborg <sorenl@unity3d.com>
---
Lib/tempfile.py | 27 ++--
Lib/test/test_tempfile.py | 117 +++++++++++++++++-
...2-12-01-16-57-44.gh-issue-91133.LKMVCV.rst | 2 +
3 files changed, 136 insertions(+), 10 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst
diff --git a/Lib/tempfile.py b/Lib/tempfile.py
index eafce6f25b6fb2..59a628a1744685 100644
--- a/Lib/tempfile.py
+++ b/Lib/tempfile.py
@@ -268,6 +268,22 @@ def _mkstemp_inner(dir, pre, suf, flags, output_type):
raise FileExistsError(_errno.EEXIST,
"No usable temporary file name found")
+def _dont_follow_symlinks(func, path, *args):
+ # Pass follow_symlinks=False, unless not supported on this platform.
+ if func in _os.supports_follow_symlinks:
+ func(path, *args, follow_symlinks=False)
+ elif _os.name == 'nt' or not _os.path.islink(path):
+ func(path, *args)
+
+def _resetperms(path):
+ try:
+ chflags = _os.chflags
+ except AttributeError:
+ pass
+ else:
+ _dont_follow_symlinks(chflags, path, 0)
+ _dont_follow_symlinks(_os.chmod, path, 0o700)
+
# User visible interfaces.
@@ -789,17 +805,10 @@ def __init__(self, suffix=None, prefix=None, dir=None):
def _rmtree(cls, name):
def onerror(func, path, exc_info):
if issubclass(exc_info[0], PermissionError):
- def resetperms(path):
- try:
- _os.chflags(path, 0)
- except AttributeError:
- pass
- _os.chmod(path, 0o700)
-
try:
if path != name:
- resetperms(_os.path.dirname(path))
- resetperms(path)
+ _resetperms(_os.path.dirname(path))
+ _resetperms(path)
try:
_os.unlink(path)
diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py
index 8ad1bb98e8e899..571263d9c957d7 100644
--- a/Lib/test/test_tempfile.py
+++ b/Lib/test/test_tempfile.py
@@ -1394,6 +1394,103 @@ def test_cleanup_with_symlink_to_a_directory(self):
"were deleted")
d2.cleanup()
+ @support.skip_unless_symlink
+ def test_cleanup_with_symlink_modes(self):
+ # cleanup() should not follow symlinks when fixing mode bits (#91133)
+ with self.do_create(recurse=0) as d2:
+ file1 = os.path.join(d2, 'file1')
+ open(file1, 'wb').close()
+ dir1 = os.path.join(d2, 'dir1')
+ os.mkdir(dir1)
+ for mode in range(8):
+ mode <<= 6
+ with self.subTest(mode=format(mode, '03o')):
+ def test(target, target_is_directory):
+ d1 = self.do_create(recurse=0)
+ symlink = os.path.join(d1.name, 'symlink')
+ os.symlink(target, symlink,
+ target_is_directory=target_is_directory)
+ try:
+ os.chmod(symlink, mode, follow_symlinks=False)
+ except NotImplementedError:
+ pass
+ try:
+ os.chmod(symlink, mode)
+ except FileNotFoundError:
+ pass
+ os.chmod(d1.name, mode)
+ d1.cleanup()
+ self.assertFalse(os.path.exists(d1.name))
+
+ with self.subTest('nonexisting file'):
+ test('nonexisting', target_is_directory=False)
+ with self.subTest('nonexisting dir'):
+ test('nonexisting', target_is_directory=True)
+
+ with self.subTest('existing file'):
+ os.chmod(file1, mode)
+ old_mode = os.stat(file1).st_mode
+ test(file1, target_is_directory=False)
+ new_mode = os.stat(file1).st_mode
+ self.assertEqual(new_mode, old_mode,
+ '%03o != %03o' % (new_mode, old_mode))
+
+ with self.subTest('existing dir'):
+ os.chmod(dir1, mode)
+ old_mode = os.stat(dir1).st_mode
+ test(dir1, target_is_directory=True)
+ new_mode = os.stat(dir1).st_mode
+ self.assertEqual(new_mode, old_mode,
+ '%03o != %03o' % (new_mode, old_mode))
+
+ @unittest.skipUnless(hasattr(os, 'chflags'), 'requires os.chflags')
+ @support.skip_unless_symlink
+ def test_cleanup_with_symlink_flags(self):
+ # cleanup() should not follow symlinks when fixing flags (#91133)
+ flags = stat.UF_IMMUTABLE | stat.UF_NOUNLINK
+ self.check_flags(flags)
+
+ with self.do_create(recurse=0) as d2:
+ file1 = os.path.join(d2, 'file1')
+ open(file1, 'wb').close()
+ dir1 = os.path.join(d2, 'dir1')
+ os.mkdir(dir1)
+ def test(target, target_is_directory):
+ d1 = self.do_create(recurse=0)
+ symlink = os.path.join(d1.name, 'symlink')
+ os.symlink(target, symlink,
+ target_is_directory=target_is_directory)
+ try:
+ os.chflags(symlink, flags, follow_symlinks=False)
+ except NotImplementedError:
+ pass
+ try:
+ os.chflags(symlink, flags)
+ except FileNotFoundError:
+ pass
+ os.chflags(d1.name, flags)
+ d1.cleanup()
+ self.assertFalse(os.path.exists(d1.name))
+
+ with self.subTest('nonexisting file'):
+ test('nonexisting', target_is_directory=False)
+ with self.subTest('nonexisting dir'):
+ test('nonexisting', target_is_directory=True)
+
+ with self.subTest('existing file'):
+ os.chflags(file1, flags)
+ old_flags = os.stat(file1).st_flags
+ test(file1, target_is_directory=False)
+ new_flags = os.stat(file1).st_flags
+ self.assertEqual(new_flags, old_flags)
+
+ with self.subTest('existing dir'):
+ os.chflags(dir1, flags)
+ old_flags = os.stat(dir1).st_flags
+ test(dir1, target_is_directory=True)
+ new_flags = os.stat(dir1).st_flags
+ self.assertEqual(new_flags, old_flags)
+
@support.cpython_only
def test_del_on_collection(self):
# A TemporaryDirectory is deleted when garbage collected
@@ -1506,9 +1603,27 @@ def test_modes(self):
d.cleanup()
self.assertFalse(os.path.exists(d.name))
- @unittest.skipUnless(hasattr(os, 'chflags'), 'requires os.lchflags')
+ def check_flags(self, flags):
+ # skip the test if these flags are not supported (ex: FreeBSD 13)
+ filename = support.TESTFN
+ try:
+ open(filename, "w").close()
+ try:
+ os.chflags(filename, flags)
+ except OSError as exc:
+ # "OSError: [Errno 45] Operation not supported"
+ self.skipTest(f"chflags() doesn't support flags "
+ f"{flags:#b}: {exc}")
+ else:
+ os.chflags(filename, 0)
+ finally:
+ support.unlink(filename)
+
+ @unittest.skipUnless(hasattr(os, 'chflags'), 'requires os.chflags')
def test_flags(self):
flags = stat.UF_IMMUTABLE | stat.UF_NOUNLINK
+ self.check_flags(flags)
+
d = self.do_create(recurse=3, dirs=2, files=2)
with d:
# Change files and directories flags recursively.
diff --git a/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst b/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst
new file mode 100644
index 00000000000000..7991048fc48e03
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst
@@ -0,0 +1,2 @@
+Fix a bug in :class:`tempfile.TemporaryDirectory` cleanup, which now no longer
+dereferences symlinks when working around file system permission errors.

@ -0,0 +1,143 @@
From a2c59992e9e8d35baba9695eb186ad6c6ff85c51 Mon Sep 17 00:00:00 2001
From: "Miss Islington (bot)"
<31488909+miss-islington@users.noreply.github.com>
Date: Wed, 17 Jan 2024 14:48:06 +0100
Subject: [PATCH] [3.9] gh-109858: Protect zipfile from "quoted-overlap"
zipbomb (GH-110016) (GH-113915)
Raise BadZipFile when try to read an entry that overlaps with other entry or
central directory.
(cherry picked from commit 66363b9a7b9fe7c99eba3a185b74c5fdbf842eba)
Co-authored-by: Serhiy Storchaka <storchaka@gmail.com>
---
Lib/test/test_zipfile.py | 60 +++++++++++++++++++
Lib/zipfile.py | 12 ++++
...-09-28-13-15-51.gh-issue-109858.43e2dg.rst | 3 +
3 files changed, 75 insertions(+)
create mode 100644 Misc/NEWS.d/next/Library/2023-09-28-13-15-51.gh-issue-109858.43e2dg.rst
diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py
index bd383d3f68552b..17e95eb86239a5 100644
--- a/Lib/test/test_zipfile.py
+++ b/Lib/test/test_zipfile.py
@@ -2045,6 +2045,66 @@ def test_decompress_without_3rd_party_library(self):
with zipfile.ZipFile(zip_file) as zf:
self.assertRaises(RuntimeError, zf.extract, 'a.txt')
+ @requires_zlib()
+ def test_full_overlap(self):
+ data = (
+ b'PK\x03\x04\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2\x1e'
+ b'8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00a\xed'
+ b'\xc0\x81\x08\x00\x00\x00\xc00\xd6\xfbK\\d\x0b`P'
+ b'K\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2'
+ b'\x1e8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00\x00'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00aPK'
+ b'\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2\x1e'
+ b'8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00\x00\x00'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00bPK\x05'
+ b'\x06\x00\x00\x00\x00\x02\x00\x02\x00^\x00\x00\x00/\x00\x00'
+ b'\x00\x00\x00'
+ )
+ with zipfile.ZipFile(io.BytesIO(data), 'r') as zipf:
+ self.assertEqual(zipf.namelist(), ['a', 'b'])
+ zi = zipf.getinfo('a')
+ self.assertEqual(zi.header_offset, 0)
+ self.assertEqual(zi.compress_size, 16)
+ self.assertEqual(zi.file_size, 1033)
+ zi = zipf.getinfo('b')
+ self.assertEqual(zi.header_offset, 0)
+ self.assertEqual(zi.compress_size, 16)
+ self.assertEqual(zi.file_size, 1033)
+ self.assertEqual(len(zipf.read('a')), 1033)
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'File name.*differ'):
+ zipf.read('b')
+
+ @requires_zlib()
+ def test_quoted_overlap(self):
+ data = (
+ b'PK\x03\x04\x14\x00\x00\x00\x08\x00\xa0lH\x05Y\xfc'
+ b'8\x044\x00\x00\x00(\x04\x00\x00\x01\x00\x00\x00a\x00'
+ b'\x1f\x00\xe0\xffPK\x03\x04\x14\x00\x00\x00\x08\x00\xa0l'
+ b'H\x05\xe2\x1e8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00'
+ b'\x00\x00b\xed\xc0\x81\x08\x00\x00\x00\xc00\xd6\xfbK\\'
+ b'd\x0b`PK\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0'
+ b'lH\x05Y\xfc8\x044\x00\x00\x00(\x04\x00\x00\x01'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+ b'\x00aPK\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0l'
+ b'H\x05\xe2\x1e8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$\x00\x00\x00'
+ b'bPK\x05\x06\x00\x00\x00\x00\x02\x00\x02\x00^\x00\x00'
+ b'\x00S\x00\x00\x00\x00\x00'
+ )
+ with zipfile.ZipFile(io.BytesIO(data), 'r') as zipf:
+ self.assertEqual(zipf.namelist(), ['a', 'b'])
+ zi = zipf.getinfo('a')
+ self.assertEqual(zi.header_offset, 0)
+ self.assertEqual(zi.compress_size, 52)
+ self.assertEqual(zi.file_size, 1064)
+ zi = zipf.getinfo('b')
+ self.assertEqual(zi.header_offset, 36)
+ self.assertEqual(zi.compress_size, 16)
+ self.assertEqual(zi.file_size, 1033)
+ with self.assertRaisesRegex(zipfile.BadZipFile, 'Overlapped entries'):
+ zipf.read('a')
+ self.assertEqual(len(zipf.read('b')), 1033)
+
def tearDown(self):
unlink(TESTFN)
unlink(TESTFN2)
diff --git a/Lib/zipfile.py b/Lib/zipfile.py
index 1e942a503e8ee1..95f95ee112667a 100644
--- a/Lib/zipfile.py
+++ b/Lib/zipfile.py
@@ -338,6 +338,7 @@ class ZipInfo (object):
'compress_size',
'file_size',
'_raw_time',
+ '_end_offset',
)
def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
@@ -379,6 +380,7 @@ def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
self.external_attr = 0 # External file attributes
self.compress_size = 0 # Size of the compressed file
self.file_size = 0 # Size of the uncompressed file
+ self._end_offset = None # Start of the next local header or central directory
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
@@ -1399,6 +1401,12 @@ def _RealGetContents(self):
if self.debug > 2:
print("total", total)
+ end_offset = self.start_dir
+ for zinfo in sorted(self.filelist,
+ key=lambda zinfo: zinfo.header_offset,
+ reverse=True):
+ zinfo._end_offset = end_offset
+ end_offset = zinfo.header_offset
def namelist(self):
"""Return a list of file names in the archive."""
@@ -1554,6 +1562,10 @@ def open(self, name, mode="r", pwd=None, *, force_zip64=False):
'File name in directory %r and header %r differ.'
% (zinfo.orig_filename, fname))
+ if (zinfo._end_offset is not None and
+ zef_file.tell() + zinfo.compress_size > zinfo._end_offset):
+ raise BadZipFile(f"Overlapped entries: {zinfo.orig_filename!r} (possible zip bomb)")
+
# check for encrypted flag & handle password
is_encrypted = zinfo.flag_bits & 0x1
if is_encrypted:
diff --git a/Misc/NEWS.d/next/Library/2023-09-28-13-15-51.gh-issue-109858.43e2dg.rst b/Misc/NEWS.d/next/Library/2023-09-28-13-15-51.gh-issue-109858.43e2dg.rst
new file mode 100644
index 00000000000000..be279caffc46ee
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-09-28-13-15-51.gh-issue-109858.43e2dg.rst
@@ -0,0 +1,3 @@
+Protect :mod:`zipfile` from "quoted-overlap" zipbomb. It now raises
+BadZipFile when try to read an entry that overlaps with other entry or
+central directory.

@ -17,7 +17,7 @@ URL: https://www.python.org/
#global prerel ...
%global upstream_version %{general_version}%{?prerel}
Version: %{general_version}%{?prerel:~%{prerel}}
Release: 1%{?dist}.1
Release: 3%{?dist}.1
License: Python
@ -409,6 +409,12 @@ Patch353: 00353-architecture-names-upstream-downstream.patch
# - https://access.redhat.com/articles/7004769
Patch397: 00397-tarfile-filter.patch
# 00414 #
#
# Skip test_pair() and test_speech128() of test_zlib on s390x since
# they fail if zlib uses the s390x hardware accelerator.
Patch414: 00414-skip_test_zlib_s390x.patch
# 00415 #
# [CVE-2023-27043] gh-102988: Reject malformed addresses in email.parseaddr() (#111116)
#
@ -423,6 +429,27 @@ Patch397: 00397-tarfile-filter.patch
# config file or environment variable.
Patch415: 00415-cve-2023-27043-gh-102988-reject-malformed-addresses-in-email-parseaddr-111116.patch
# 00422 # a353cebef737c41420dc7ae2469dd657371b8881
# Fix tests for XMLPullParser with Expat 2.6.0
#
# Feeding the parser by too small chunks defers parsing to prevent
# CVE-2023-52425. Future versions of Expat may be more reactive.
Patch422: 00422-fix-tests-for-xmlpullparser-with-expat-2-6-0.patch
# 00426 #
# CVE-2023-6597: Path traversal on tempfile.TemporaryDirectory
# Fixed upstream:
# https://github.com/python/cpython/commit/d54e22a669ae6e987199bb5d2c69bb5a46b0083b
# Tracking bugzilla: https://bugzilla.redhat.com/show_bug.cgi?id=2276518
Patch426: 00426-CVE-2023-6597.patch
# 00427 #
# CVE-2024-0450: The zipfile module is vulnerable to zip-bombs leading to denial of service
# Fixed upstream:
# https://github.com/python/cpython/commit/a2c59992e9e8d35baba9695eb186ad6c6ff85c51
# Tracking bugzilla: https://bugzilla.redhat.com/show_bug.cgi?id=2276525
Patch427: 00427-CVE-2024-0450.patch
# (New patches go here ^^^)
#
# When adding new patches to "python" and "python3" in Fedora, EL, etc.,
@ -1824,9 +1851,18 @@ CheckPython optimized
# ======================================================
%changelog
* Thu Jan 04 2024 Lumír Balhar <lbalhar@redhat.com> - 3.9.18-1.1
* Thu May 16 2024 Charalampos Stratakis <cstratak@redhat.com> - 3.9.18-3.1
- Security fixes for CVE-2023-6597 and CVE-2024-0450
- Fix tests for XMLPullParser with Expat with fixed CVE
Resolves: RHEL-33887, RHEL-34287
* Wed Jan 24 2024 Lumír Balhar <lbalhar@redhat.com> - 3.9.18-3
- Fix tests on s390x with hw acceleration
Resolves: RHEL-13043
* Thu Jan 04 2024 Lumír Balhar <lbalhar@redhat.com> - 3.9.18-2
- Security fix for CVE-2023-27043
Resolves: RHEL-5594
Resolves: RHEL-20613
* Thu Sep 07 2023 Charalampos Stratakis <cstratak@redhat.com> - 3.9.18-1
- Update to 3.9.18

Loading…
Cancel
Save