Compare commits
No commits in common. 'c9-beta' and 'i9c' have entirely different histories.
@ -1 +1 @@
|
|||||||
SOURCES/Python-3.11.9.tar.xz
|
SOURCES/Python-3.11.7.tar.xz
|
||||||
|
@ -1 +1 @@
|
|||||||
926cd6a577b2e8dcbb17671b30eda04019328ada SOURCES/Python-3.11.9.tar.xz
|
f2534d591121f3845388fbdd6a121b96dfe305a6 SOURCES/Python-3.11.7.tar.xz
|
||||||
|
@ -1,75 +0,0 @@
|
|||||||
From 670984c96eea60488c5355b4cf535c1ee3cf081a Mon Sep 17 00:00:00 2001
|
|
||||||
From: rpm-build <rpm-build>
|
|
||||||
Date: Wed, 24 Apr 2024 04:24:16 +0200
|
|
||||||
Subject: [PATCH] Fix xml tests
|
|
||||||
|
|
||||||
---
|
|
||||||
Lib/test/test_pyexpat.py | 3 +++
|
|
||||||
Lib/test/test_sax.py | 2 ++
|
|
||||||
Lib/test/test_xml_etree.py | 6 ++++++
|
|
||||||
3 files changed, 11 insertions(+)
|
|
||||||
|
|
||||||
diff --git a/Lib/test/test_pyexpat.py b/Lib/test/test_pyexpat.py
|
|
||||||
index 44bd1de..5976fa0 100644
|
|
||||||
--- a/Lib/test/test_pyexpat.py
|
|
||||||
+++ b/Lib/test/test_pyexpat.py
|
|
||||||
@@ -3,6 +3,7 @@
|
|
||||||
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
+import pyexpat
|
|
||||||
import sys
|
|
||||||
import sysconfig
|
|
||||||
import unittest
|
|
||||||
@@ -793,6 +794,8 @@ class ReparseDeferralTest(unittest.TestCase):
|
|
||||||
|
|
||||||
self.assertEqual(started, ['doc'])
|
|
||||||
|
|
||||||
+ @unittest.skipIf(pyexpat.version_info < (2, 6, 0),
|
|
||||||
+ "Reparse deferral not defined for libexpat < 2.6.0")
|
|
||||||
def test_reparse_deferral_disabled(self):
|
|
||||||
started = []
|
|
||||||
|
|
||||||
diff --git a/Lib/test/test_sax.py b/Lib/test/test_sax.py
|
|
||||||
index 9b3014a..5960de1 100644
|
|
||||||
--- a/Lib/test/test_sax.py
|
|
||||||
+++ b/Lib/test/test_sax.py
|
|
||||||
@@ -1240,6 +1240,8 @@ class ExpatReaderTest(XmlTestBase):
|
|
||||||
|
|
||||||
self.assertEqual(result.getvalue(), start + b"<doc></doc>")
|
|
||||||
|
|
||||||
+ @unittest.skipIf(pyexpat.version_info < (2, 6, 0),
|
|
||||||
+ "Reparse deferral not defined for libexpat < 2.6.0")
|
|
||||||
def test_flush_reparse_deferral_disabled(self):
|
|
||||||
result = BytesIO()
|
|
||||||
xmlgen = XMLGenerator(result)
|
|
||||||
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
|
|
||||||
index 8becafb..5e9b6b5 100644
|
|
||||||
--- a/Lib/test/test_xml_etree.py
|
|
||||||
+++ b/Lib/test/test_xml_etree.py
|
|
||||||
@@ -1424,9 +1424,13 @@ class XMLPullParserTest(unittest.TestCase):
|
|
||||||
self.assert_event_tags(parser, [('end', 'root')])
|
|
||||||
self.assertIsNone(parser.close())
|
|
||||||
|
|
||||||
+ @unittest.skipIf(pyexpat.version_info < (2, 6, 0),
|
|
||||||
+ "test not compatible with the latest expat security release")
|
|
||||||
def test_simple_xml_chunk_1(self):
|
|
||||||
self.test_simple_xml(chunk_size=1, flush=True)
|
|
||||||
|
|
||||||
+ @unittest.skipIf(pyexpat.version_info < (2, 6, 0),
|
|
||||||
+ "test not compatible with the latest expat security release")
|
|
||||||
def test_simple_xml_chunk_5(self):
|
|
||||||
self.test_simple_xml(chunk_size=5, flush=True)
|
|
||||||
|
|
||||||
@@ -1651,6 +1655,8 @@ class XMLPullParserTest(unittest.TestCase):
|
|
||||||
|
|
||||||
self.assert_event_tags(parser, [('end', 'doc')])
|
|
||||||
|
|
||||||
+ @unittest.skipIf(pyexpat.version_info < (2, 6, 0),
|
|
||||||
+ "Reparse deferral not defined for libexpat < 2.6.0")
|
|
||||||
def test_flush_reparse_deferral_disabled(self):
|
|
||||||
parser = ET.XMLPullParser(events=('start', 'end'))
|
|
||||||
|
|
||||||
--
|
|
||||||
2.44.0
|
|
||||||
|
|
@ -0,0 +1,77 @@
|
|||||||
|
From c9364e8727ea2426519a74593ab03ebcb0da72b8 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Lumir Balhar <lbalhar@redhat.com>
|
||||||
|
Date: Fri, 3 May 2024 14:17:48 +0200
|
||||||
|
Subject: [PATCH] Expect failures in tests not working properly with expat with
|
||||||
|
a fixed CVE in RHEL
|
||||||
|
|
||||||
|
---
|
||||||
|
Lib/test/test_xml_etree.py | 53 ++++++++++++++++++++++----------------
|
||||||
|
1 file changed, 31 insertions(+), 22 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
|
||||||
|
index 7c346f2..24e0bb8 100644
|
||||||
|
--- a/Lib/test/test_xml_etree.py
|
||||||
|
+++ b/Lib/test/test_xml_etree.py
|
||||||
|
@@ -1391,28 +1391,37 @@ class XMLPullParserTest(unittest.TestCase):
|
||||||
|
self.assertEqual([(action, elem.tag) for action, elem in events],
|
||||||
|
expected)
|
||||||
|
|
||||||
|
- def test_simple_xml(self):
|
||||||
|
- for chunk_size in (None, 1, 5):
|
||||||
|
- with self.subTest(chunk_size=chunk_size):
|
||||||
|
- parser = ET.XMLPullParser()
|
||||||
|
- self.assert_event_tags(parser, [])
|
||||||
|
- self._feed(parser, "<!-- comment -->\n", chunk_size)
|
||||||
|
- self.assert_event_tags(parser, [])
|
||||||
|
- self._feed(parser,
|
||||||
|
- "<root>\n <element key='value'>text</element",
|
||||||
|
- chunk_size)
|
||||||
|
- self.assert_event_tags(parser, [])
|
||||||
|
- self._feed(parser, ">\n", chunk_size)
|
||||||
|
- self.assert_event_tags(parser, [('end', 'element')])
|
||||||
|
- self._feed(parser, "<element>text</element>tail\n", chunk_size)
|
||||||
|
- self._feed(parser, "<empty-element/>\n", chunk_size)
|
||||||
|
- self.assert_event_tags(parser, [
|
||||||
|
- ('end', 'element'),
|
||||||
|
- ('end', 'empty-element'),
|
||||||
|
- ])
|
||||||
|
- self._feed(parser, "</root>\n", chunk_size)
|
||||||
|
- self.assert_event_tags(parser, [('end', 'root')])
|
||||||
|
- self.assertIsNone(parser.close())
|
||||||
|
+ def test_simple_xml(self, chunk_size=None):
|
||||||
|
+ parser = ET.XMLPullParser()
|
||||||
|
+ self.assert_event_tags(parser, [])
|
||||||
|
+ self._feed(parser, "<!-- comment -->\n", chunk_size)
|
||||||
|
+ self.assert_event_tags(parser, [])
|
||||||
|
+ self._feed(parser,
|
||||||
|
+ "<root>\n <element key='value'>text</element",
|
||||||
|
+ chunk_size)
|
||||||
|
+ self.assert_event_tags(parser, [])
|
||||||
|
+ self._feed(parser, ">\n", chunk_size)
|
||||||
|
+ self.assert_event_tags(parser, [('end', 'element')])
|
||||||
|
+ self._feed(parser, "<element>text</element>tail\n", chunk_size)
|
||||||
|
+ self._feed(parser, "<empty-element/>\n", chunk_size)
|
||||||
|
+ self.assert_event_tags(parser, [
|
||||||
|
+ ('end', 'element'),
|
||||||
|
+ ('end', 'empty-element'),
|
||||||
|
+ ])
|
||||||
|
+ self._feed(parser, "</root>\n", chunk_size)
|
||||||
|
+ self.assert_event_tags(parser, [('end', 'root')])
|
||||||
|
+ self.assertIsNone(parser.close())
|
||||||
|
+
|
||||||
|
+ @unittest.expectedFailure
|
||||||
|
+ def test_simple_xml_chunk_1(self):
|
||||||
|
+ self.test_simple_xml(chunk_size=1)
|
||||||
|
+
|
||||||
|
+ @unittest.expectedFailure
|
||||||
|
+ def test_simple_xml_chunk_5(self):
|
||||||
|
+ self.test_simple_xml(chunk_size=5)
|
||||||
|
+
|
||||||
|
+ def test_simple_xml_chunk_22(self):
|
||||||
|
+ self.test_simple_xml(chunk_size=22)
|
||||||
|
|
||||||
|
def test_feed_while_iterating(self):
|
||||||
|
parser = ET.XMLPullParser()
|
||||||
|
--
|
||||||
|
2.45.0
|
||||||
|
|
@ -0,0 +1,215 @@
|
|||||||
|
From 5585334d772b253a01a6730e8202ffb1607c3d25 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Serhiy Storchaka <storchaka@gmail.com>
|
||||||
|
Date: Thu, 7 Dec 2023 18:37:10 +0200
|
||||||
|
Subject: [PATCH] [3.11] gh-91133: tempfile.TemporaryDirectory: fix symlink bug
|
||||||
|
in cleanup (GH-99930) (GH-112839)
|
||||||
|
MIME-Version: 1.0
|
||||||
|
Content-Type: text/plain; charset=UTF-8
|
||||||
|
Content-Transfer-Encoding: 8bit
|
||||||
|
|
||||||
|
(cherry picked from commit 81c16cd94ec38d61aa478b9a452436dc3b1b524d)
|
||||||
|
|
||||||
|
Co-authored-by: Søren Løvborg <sorenl@unity3d.com>
|
||||||
|
---
|
||||||
|
Lib/tempfile.py | 27 +++--
|
||||||
|
Lib/test/test_tempfile.py | 111 +++++++++++++++++-
|
||||||
|
...2-12-01-16-57-44.gh-issue-91133.LKMVCV.rst | 2 +
|
||||||
|
3 files changed, 125 insertions(+), 15 deletions(-)
|
||||||
|
create mode 100644 Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst
|
||||||
|
|
||||||
|
diff --git a/Lib/tempfile.py b/Lib/tempfile.py
|
||||||
|
index aace11fa7b19b9..f59a63a7b45b36 100644
|
||||||
|
--- a/Lib/tempfile.py
|
||||||
|
+++ b/Lib/tempfile.py
|
||||||
|
@@ -270,6 +270,22 @@ def _mkstemp_inner(dir, pre, suf, flags, output_type):
|
||||||
|
raise FileExistsError(_errno.EEXIST,
|
||||||
|
"No usable temporary file name found")
|
||||||
|
|
||||||
|
+def _dont_follow_symlinks(func, path, *args):
|
||||||
|
+ # Pass follow_symlinks=False, unless not supported on this platform.
|
||||||
|
+ if func in _os.supports_follow_symlinks:
|
||||||
|
+ func(path, *args, follow_symlinks=False)
|
||||||
|
+ elif _os.name == 'nt' or not _os.path.islink(path):
|
||||||
|
+ func(path, *args)
|
||||||
|
+
|
||||||
|
+def _resetperms(path):
|
||||||
|
+ try:
|
||||||
|
+ chflags = _os.chflags
|
||||||
|
+ except AttributeError:
|
||||||
|
+ pass
|
||||||
|
+ else:
|
||||||
|
+ _dont_follow_symlinks(chflags, path, 0)
|
||||||
|
+ _dont_follow_symlinks(_os.chmod, path, 0o700)
|
||||||
|
+
|
||||||
|
|
||||||
|
# User visible interfaces.
|
||||||
|
|
||||||
|
@@ -863,17 +879,10 @@ def __init__(self, suffix=None, prefix=None, dir=None,
|
||||||
|
def _rmtree(cls, name, ignore_errors=False):
|
||||||
|
def onerror(func, path, exc_info):
|
||||||
|
if issubclass(exc_info[0], PermissionError):
|
||||||
|
- def resetperms(path):
|
||||||
|
- try:
|
||||||
|
- _os.chflags(path, 0)
|
||||||
|
- except AttributeError:
|
||||||
|
- pass
|
||||||
|
- _os.chmod(path, 0o700)
|
||||||
|
-
|
||||||
|
try:
|
||||||
|
if path != name:
|
||||||
|
- resetperms(_os.path.dirname(path))
|
||||||
|
- resetperms(path)
|
||||||
|
+ _resetperms(_os.path.dirname(path))
|
||||||
|
+ _resetperms(path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
_os.unlink(path)
|
||||||
|
diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py
|
||||||
|
index 1242ec7e3cc9a1..675edc8de9cca5 100644
|
||||||
|
--- a/Lib/test/test_tempfile.py
|
||||||
|
+++ b/Lib/test/test_tempfile.py
|
||||||
|
@@ -1565,6 +1565,103 @@ def test_cleanup_with_symlink_to_a_directory(self):
|
||||||
|
"were deleted")
|
||||||
|
d2.cleanup()
|
||||||
|
|
||||||
|
+ @os_helper.skip_unless_symlink
|
||||||
|
+ def test_cleanup_with_symlink_modes(self):
|
||||||
|
+ # cleanup() should not follow symlinks when fixing mode bits (#91133)
|
||||||
|
+ with self.do_create(recurse=0) as d2:
|
||||||
|
+ file1 = os.path.join(d2, 'file1')
|
||||||
|
+ open(file1, 'wb').close()
|
||||||
|
+ dir1 = os.path.join(d2, 'dir1')
|
||||||
|
+ os.mkdir(dir1)
|
||||||
|
+ for mode in range(8):
|
||||||
|
+ mode <<= 6
|
||||||
|
+ with self.subTest(mode=format(mode, '03o')):
|
||||||
|
+ def test(target, target_is_directory):
|
||||||
|
+ d1 = self.do_create(recurse=0)
|
||||||
|
+ symlink = os.path.join(d1.name, 'symlink')
|
||||||
|
+ os.symlink(target, symlink,
|
||||||
|
+ target_is_directory=target_is_directory)
|
||||||
|
+ try:
|
||||||
|
+ os.chmod(symlink, mode, follow_symlinks=False)
|
||||||
|
+ except NotImplementedError:
|
||||||
|
+ pass
|
||||||
|
+ try:
|
||||||
|
+ os.chmod(symlink, mode)
|
||||||
|
+ except FileNotFoundError:
|
||||||
|
+ pass
|
||||||
|
+ os.chmod(d1.name, mode)
|
||||||
|
+ d1.cleanup()
|
||||||
|
+ self.assertFalse(os.path.exists(d1.name))
|
||||||
|
+
|
||||||
|
+ with self.subTest('nonexisting file'):
|
||||||
|
+ test('nonexisting', target_is_directory=False)
|
||||||
|
+ with self.subTest('nonexisting dir'):
|
||||||
|
+ test('nonexisting', target_is_directory=True)
|
||||||
|
+
|
||||||
|
+ with self.subTest('existing file'):
|
||||||
|
+ os.chmod(file1, mode)
|
||||||
|
+ old_mode = os.stat(file1).st_mode
|
||||||
|
+ test(file1, target_is_directory=False)
|
||||||
|
+ new_mode = os.stat(file1).st_mode
|
||||||
|
+ self.assertEqual(new_mode, old_mode,
|
||||||
|
+ '%03o != %03o' % (new_mode, old_mode))
|
||||||
|
+
|
||||||
|
+ with self.subTest('existing dir'):
|
||||||
|
+ os.chmod(dir1, mode)
|
||||||
|
+ old_mode = os.stat(dir1).st_mode
|
||||||
|
+ test(dir1, target_is_directory=True)
|
||||||
|
+ new_mode = os.stat(dir1).st_mode
|
||||||
|
+ self.assertEqual(new_mode, old_mode,
|
||||||
|
+ '%03o != %03o' % (new_mode, old_mode))
|
||||||
|
+
|
||||||
|
+ @unittest.skipUnless(hasattr(os, 'chflags'), 'requires os.chflags')
|
||||||
|
+ @os_helper.skip_unless_symlink
|
||||||
|
+ def test_cleanup_with_symlink_flags(self):
|
||||||
|
+ # cleanup() should not follow symlinks when fixing flags (#91133)
|
||||||
|
+ flags = stat.UF_IMMUTABLE | stat.UF_NOUNLINK
|
||||||
|
+ self.check_flags(flags)
|
||||||
|
+
|
||||||
|
+ with self.do_create(recurse=0) as d2:
|
||||||
|
+ file1 = os.path.join(d2, 'file1')
|
||||||
|
+ open(file1, 'wb').close()
|
||||||
|
+ dir1 = os.path.join(d2, 'dir1')
|
||||||
|
+ os.mkdir(dir1)
|
||||||
|
+ def test(target, target_is_directory):
|
||||||
|
+ d1 = self.do_create(recurse=0)
|
||||||
|
+ symlink = os.path.join(d1.name, 'symlink')
|
||||||
|
+ os.symlink(target, symlink,
|
||||||
|
+ target_is_directory=target_is_directory)
|
||||||
|
+ try:
|
||||||
|
+ os.chflags(symlink, flags, follow_symlinks=False)
|
||||||
|
+ except NotImplementedError:
|
||||||
|
+ pass
|
||||||
|
+ try:
|
||||||
|
+ os.chflags(symlink, flags)
|
||||||
|
+ except FileNotFoundError:
|
||||||
|
+ pass
|
||||||
|
+ os.chflags(d1.name, flags)
|
||||||
|
+ d1.cleanup()
|
||||||
|
+ self.assertFalse(os.path.exists(d1.name))
|
||||||
|
+
|
||||||
|
+ with self.subTest('nonexisting file'):
|
||||||
|
+ test('nonexisting', target_is_directory=False)
|
||||||
|
+ with self.subTest('nonexisting dir'):
|
||||||
|
+ test('nonexisting', target_is_directory=True)
|
||||||
|
+
|
||||||
|
+ with self.subTest('existing file'):
|
||||||
|
+ os.chflags(file1, flags)
|
||||||
|
+ old_flags = os.stat(file1).st_flags
|
||||||
|
+ test(file1, target_is_directory=False)
|
||||||
|
+ new_flags = os.stat(file1).st_flags
|
||||||
|
+ self.assertEqual(new_flags, old_flags)
|
||||||
|
+
|
||||||
|
+ with self.subTest('existing dir'):
|
||||||
|
+ os.chflags(dir1, flags)
|
||||||
|
+ old_flags = os.stat(dir1).st_flags
|
||||||
|
+ test(dir1, target_is_directory=True)
|
||||||
|
+ new_flags = os.stat(dir1).st_flags
|
||||||
|
+ self.assertEqual(new_flags, old_flags)
|
||||||
|
+
|
||||||
|
@support.cpython_only
|
||||||
|
def test_del_on_collection(self):
|
||||||
|
# A TemporaryDirectory is deleted when garbage collected
|
||||||
|
@@ -1737,10 +1834,7 @@ def test_modes(self):
|
||||||
|
d.cleanup()
|
||||||
|
self.assertFalse(os.path.exists(d.name))
|
||||||
|
|
||||||
|
- @unittest.skipUnless(hasattr(os, 'chflags'), 'requires os.chflags')
|
||||||
|
- def test_flags(self):
|
||||||
|
- flags = stat.UF_IMMUTABLE | stat.UF_NOUNLINK
|
||||||
|
-
|
||||||
|
+ def check_flags(self, flags):
|
||||||
|
# skip the test if these flags are not supported (ex: FreeBSD 13)
|
||||||
|
filename = os_helper.TESTFN
|
||||||
|
try:
|
||||||
|
@@ -1749,13 +1843,18 @@ def test_flags(self):
|
||||||
|
os.chflags(filename, flags)
|
||||||
|
except OSError as exc:
|
||||||
|
# "OSError: [Errno 45] Operation not supported"
|
||||||
|
- self.skipTest(f"chflags() doesn't support "
|
||||||
|
- f"UF_IMMUTABLE|UF_NOUNLINK: {exc}")
|
||||||
|
+ self.skipTest(f"chflags() doesn't support flags "
|
||||||
|
+ f"{flags:#b}: {exc}")
|
||||||
|
else:
|
||||||
|
os.chflags(filename, 0)
|
||||||
|
finally:
|
||||||
|
os_helper.unlink(filename)
|
||||||
|
|
||||||
|
+ @unittest.skipUnless(hasattr(os, 'chflags'), 'requires os.chflags')
|
||||||
|
+ def test_flags(self):
|
||||||
|
+ flags = stat.UF_IMMUTABLE | stat.UF_NOUNLINK
|
||||||
|
+ self.check_flags(flags)
|
||||||
|
+
|
||||||
|
d = self.do_create(recurse=3, dirs=2, files=2)
|
||||||
|
with d:
|
||||||
|
# Change files and directories flags recursively.
|
||||||
|
diff --git a/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst b/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst
|
||||||
|
new file mode 100644
|
||||||
|
index 00000000000000..7991048fc48e03
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst
|
||||||
|
@@ -0,0 +1,2 @@
|
||||||
|
+Fix a bug in :class:`tempfile.TemporaryDirectory` cleanup, which now no longer
|
||||||
|
+dereferences symlinks when working around file system permission errors.
|
@ -0,0 +1,244 @@
|
|||||||
|
From d449caf8a179e3b954268b3a88eb9170be3c8fbf Mon Sep 17 00:00:00 2001
|
||||||
|
From: Seth Michael Larson <seth@python.org>
|
||||||
|
Date: Tue, 3 Sep 2024 10:07:13 -0500
|
||||||
|
Subject: [PATCH] [3.11] gh-121285: Remove backtracking when parsing tarfile
|
||||||
|
headers (GH-121286) (#123639)
|
||||||
|
|
||||||
|
* Remove backtracking when parsing tarfile headers
|
||||||
|
* Rewrite PAX header parsing to be stricter
|
||||||
|
* Optimize parsing of GNU extended sparse headers v0.0
|
||||||
|
|
||||||
|
(cherry picked from commit 34ddb64d088dd7ccc321f6103d23153256caa5d4)
|
||||||
|
|
||||||
|
Co-authored-by: Kirill Podoprigora <kirill.bast9@mail.ru>
|
||||||
|
Co-authored-by: Gregory P. Smith <greg@krypto.org>
|
||||||
|
---
|
||||||
|
Lib/tarfile.py | 105 +++++++++++-------
|
||||||
|
Lib/test/test_tarfile.py | 42 +++++++
|
||||||
|
...-07-02-13-39-20.gh-issue-121285.hrl-yI.rst | 2 +
|
||||||
|
3 files changed, 111 insertions(+), 38 deletions(-)
|
||||||
|
create mode 100644 Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst
|
||||||
|
|
||||||
|
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
|
||||||
|
index 612217b1ad05b3..0d6b925533b63d 100755
|
||||||
|
--- a/Lib/tarfile.py
|
||||||
|
+++ b/Lib/tarfile.py
|
||||||
|
@@ -842,6 +842,9 @@ def data_filter(member, dest_path):
|
||||||
|
# Sentinel for replace() defaults, meaning "don't change the attribute"
|
||||||
|
_KEEP = object()
|
||||||
|
|
||||||
|
+# Header length is digits followed by a space.
|
||||||
|
+_header_length_prefix_re = re.compile(br"([0-9]{1,20}) ")
|
||||||
|
+
|
||||||
|
class TarInfo(object):
|
||||||
|
"""Informational class which holds the details about an
|
||||||
|
archive member given by a tar header block.
|
||||||
|
@@ -1411,41 +1414,59 @@ def _proc_pax(self, tarfile):
|
||||||
|
else:
|
||||||
|
pax_headers = tarfile.pax_headers.copy()
|
||||||
|
|
||||||
|
- # Check if the pax header contains a hdrcharset field. This tells us
|
||||||
|
- # the encoding of the path, linkpath, uname and gname fields. Normally,
|
||||||
|
- # these fields are UTF-8 encoded but since POSIX.1-2008 tar
|
||||||
|
- # implementations are allowed to store them as raw binary strings if
|
||||||
|
- # the translation to UTF-8 fails.
|
||||||
|
- match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
|
||||||
|
- if match is not None:
|
||||||
|
- pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
|
||||||
|
-
|
||||||
|
- # For the time being, we don't care about anything other than "BINARY".
|
||||||
|
- # The only other value that is currently allowed by the standard is
|
||||||
|
- # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
|
||||||
|
- hdrcharset = pax_headers.get("hdrcharset")
|
||||||
|
- if hdrcharset == "BINARY":
|
||||||
|
- encoding = tarfile.encoding
|
||||||
|
- else:
|
||||||
|
- encoding = "utf-8"
|
||||||
|
-
|
||||||
|
# Parse pax header information. A record looks like that:
|
||||||
|
# "%d %s=%s\n" % (length, keyword, value). length is the size
|
||||||
|
# of the complete record including the length field itself and
|
||||||
|
- # the newline. keyword and value are both UTF-8 encoded strings.
|
||||||
|
- regex = re.compile(br"(\d+) ([^=]+)=")
|
||||||
|
+ # the newline.
|
||||||
|
pos = 0
|
||||||
|
- while True:
|
||||||
|
- match = regex.match(buf, pos)
|
||||||
|
- if not match:
|
||||||
|
- break
|
||||||
|
+ encoding = None
|
||||||
|
+ raw_headers = []
|
||||||
|
+ while len(buf) > pos and buf[pos] != 0x00:
|
||||||
|
+ if not (match := _header_length_prefix_re.match(buf, pos)):
|
||||||
|
+ raise InvalidHeaderError("invalid header")
|
||||||
|
+ try:
|
||||||
|
+ length = int(match.group(1))
|
||||||
|
+ except ValueError:
|
||||||
|
+ raise InvalidHeaderError("invalid header")
|
||||||
|
+ # Headers must be at least 5 bytes, shortest being '5 x=\n'.
|
||||||
|
+ # Value is allowed to be empty.
|
||||||
|
+ if length < 5:
|
||||||
|
+ raise InvalidHeaderError("invalid header")
|
||||||
|
+ if pos + length > len(buf):
|
||||||
|
+ raise InvalidHeaderError("invalid header")
|
||||||
|
|
||||||
|
- length, keyword = match.groups()
|
||||||
|
- length = int(length)
|
||||||
|
- if length == 0:
|
||||||
|
+ header_value_end_offset = match.start(1) + length - 1 # Last byte of the header
|
||||||
|
+ keyword_and_value = buf[match.end(1) + 1:header_value_end_offset]
|
||||||
|
+ raw_keyword, equals, raw_value = keyword_and_value.partition(b"=")
|
||||||
|
+
|
||||||
|
+ # Check the framing of the header. The last character must be '\n' (0x0A)
|
||||||
|
+ if not raw_keyword or equals != b"=" or buf[header_value_end_offset] != 0x0A:
|
||||||
|
raise InvalidHeaderError("invalid header")
|
||||||
|
- value = buf[match.end(2) + 1:match.start(1) + length - 1]
|
||||||
|
+ raw_headers.append((length, raw_keyword, raw_value))
|
||||||
|
+
|
||||||
|
+ # Check if the pax header contains a hdrcharset field. This tells us
|
||||||
|
+ # the encoding of the path, linkpath, uname and gname fields. Normally,
|
||||||
|
+ # these fields are UTF-8 encoded but since POSIX.1-2008 tar
|
||||||
|
+ # implementations are allowed to store them as raw binary strings if
|
||||||
|
+ # the translation to UTF-8 fails. For the time being, we don't care about
|
||||||
|
+ # anything other than "BINARY". The only other value that is currently
|
||||||
|
+ # allowed by the standard is "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
|
||||||
|
+ # Note that we only follow the initial 'hdrcharset' setting to preserve
|
||||||
|
+ # the initial behavior of the 'tarfile' module.
|
||||||
|
+ if raw_keyword == b"hdrcharset" and encoding is None:
|
||||||
|
+ if raw_value == b"BINARY":
|
||||||
|
+ encoding = tarfile.encoding
|
||||||
|
+ else: # This branch ensures only the first 'hdrcharset' header is used.
|
||||||
|
+ encoding = "utf-8"
|
||||||
|
+
|
||||||
|
+ pos += length
|
||||||
|
|
||||||
|
+ # If no explicit hdrcharset is set, we use UTF-8 as a default.
|
||||||
|
+ if encoding is None:
|
||||||
|
+ encoding = "utf-8"
|
||||||
|
+
|
||||||
|
+ # After parsing the raw headers we can decode them to text.
|
||||||
|
+ for length, raw_keyword, raw_value in raw_headers:
|
||||||
|
# Normally, we could just use "utf-8" as the encoding and "strict"
|
||||||
|
# as the error handler, but we better not take the risk. For
|
||||||
|
# example, GNU tar <= 1.23 is known to store filenames it cannot
|
||||||
|
@@ -1453,17 +1474,16 @@ def _proc_pax(self, tarfile):
|
||||||
|
# hdrcharset=BINARY header).
|
||||||
|
# We first try the strict standard encoding, and if that fails we
|
||||||
|
# fall back on the user's encoding and error handler.
|
||||||
|
- keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
|
||||||
|
+ keyword = self._decode_pax_field(raw_keyword, "utf-8", "utf-8",
|
||||||
|
tarfile.errors)
|
||||||
|
if keyword in PAX_NAME_FIELDS:
|
||||||
|
- value = self._decode_pax_field(value, encoding, tarfile.encoding,
|
||||||
|
+ value = self._decode_pax_field(raw_value, encoding, tarfile.encoding,
|
||||||
|
tarfile.errors)
|
||||||
|
else:
|
||||||
|
- value = self._decode_pax_field(value, "utf-8", "utf-8",
|
||||||
|
+ value = self._decode_pax_field(raw_value, "utf-8", "utf-8",
|
||||||
|
tarfile.errors)
|
||||||
|
|
||||||
|
pax_headers[keyword] = value
|
||||||
|
- pos += length
|
||||||
|
|
||||||
|
# Fetch the next header.
|
||||||
|
try:
|
||||||
|
@@ -1478,7 +1498,7 @@ def _proc_pax(self, tarfile):
|
||||||
|
|
||||||
|
elif "GNU.sparse.size" in pax_headers:
|
||||||
|
# GNU extended sparse format version 0.0.
|
||||||
|
- self._proc_gnusparse_00(next, pax_headers, buf)
|
||||||
|
+ self._proc_gnusparse_00(next, raw_headers)
|
||||||
|
|
||||||
|
elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
|
||||||
|
# GNU extended sparse format version 1.0.
|
||||||
|
@@ -1500,15 +1520,24 @@ def _proc_pax(self, tarfile):
|
||||||
|
|
||||||
|
return next
|
||||||
|
|
||||||
|
- def _proc_gnusparse_00(self, next, pax_headers, buf):
|
||||||
|
+ def _proc_gnusparse_00(self, next, raw_headers):
|
||||||
|
"""Process a GNU tar extended sparse header, version 0.0.
|
||||||
|
"""
|
||||||
|
offsets = []
|
||||||
|
- for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
|
||||||
|
- offsets.append(int(match.group(1)))
|
||||||
|
numbytes = []
|
||||||
|
- for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
|
||||||
|
- numbytes.append(int(match.group(1)))
|
||||||
|
+ for _, keyword, value in raw_headers:
|
||||||
|
+ if keyword == b"GNU.sparse.offset":
|
||||||
|
+ try:
|
||||||
|
+ offsets.append(int(value.decode()))
|
||||||
|
+ except ValueError:
|
||||||
|
+ raise InvalidHeaderError("invalid header")
|
||||||
|
+
|
||||||
|
+ elif keyword == b"GNU.sparse.numbytes":
|
||||||
|
+ try:
|
||||||
|
+ numbytes.append(int(value.decode()))
|
||||||
|
+ except ValueError:
|
||||||
|
+ raise InvalidHeaderError("invalid header")
|
||||||
|
+
|
||||||
|
next.sparse = list(zip(offsets, numbytes))
|
||||||
|
|
||||||
|
def _proc_gnusparse_01(self, next, pax_headers):
|
||||||
|
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
|
||||||
|
index 389da7be3a3247..c99c88ce93af02 100644
|
||||||
|
--- a/Lib/test/test_tarfile.py
|
||||||
|
+++ b/Lib/test/test_tarfile.py
|
||||||
|
@@ -1208,6 +1208,48 @@ def test_pax_number_fields(self):
|
||||||
|
finally:
|
||||||
|
tar.close()
|
||||||
|
|
||||||
|
+ def test_pax_header_bad_formats(self):
|
||||||
|
+ # The fields from the pax header have priority over the
|
||||||
|
+ # TarInfo.
|
||||||
|
+ pax_header_replacements = (
|
||||||
|
+ b" foo=bar\n",
|
||||||
|
+ b"0 \n",
|
||||||
|
+ b"1 \n",
|
||||||
|
+ b"2 \n",
|
||||||
|
+ b"3 =\n",
|
||||||
|
+ b"4 =a\n",
|
||||||
|
+ b"1000000 foo=bar\n",
|
||||||
|
+ b"0 foo=bar\n",
|
||||||
|
+ b"-12 foo=bar\n",
|
||||||
|
+ b"000000000000000000000000036 foo=bar\n",
|
||||||
|
+ )
|
||||||
|
+ pax_headers = {"foo": "bar"}
|
||||||
|
+
|
||||||
|
+ for replacement in pax_header_replacements:
|
||||||
|
+ with self.subTest(header=replacement):
|
||||||
|
+ tar = tarfile.open(tmpname, "w", format=tarfile.PAX_FORMAT,
|
||||||
|
+ encoding="iso8859-1")
|
||||||
|
+ try:
|
||||||
|
+ t = tarfile.TarInfo()
|
||||||
|
+ t.name = "pax" # non-ASCII
|
||||||
|
+ t.uid = 1
|
||||||
|
+ t.pax_headers = pax_headers
|
||||||
|
+ tar.addfile(t)
|
||||||
|
+ finally:
|
||||||
|
+ tar.close()
|
||||||
|
+
|
||||||
|
+ with open(tmpname, "rb") as f:
|
||||||
|
+ data = f.read()
|
||||||
|
+ self.assertIn(b"11 foo=bar\n", data)
|
||||||
|
+ data = data.replace(b"11 foo=bar\n", replacement)
|
||||||
|
+
|
||||||
|
+ with open(tmpname, "wb") as f:
|
||||||
|
+ f.truncate()
|
||||||
|
+ f.write(data)
|
||||||
|
+
|
||||||
|
+ with self.assertRaisesRegex(tarfile.ReadError, r"method tar: ReadError\('invalid header'\)"):
|
||||||
|
+ tarfile.open(tmpname, encoding="iso8859-1")
|
||||||
|
+
|
||||||
|
|
||||||
|
class WriteTestBase(TarTest):
|
||||||
|
# Put all write tests in here that are supposed to be tested
|
||||||
|
diff --git a/Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst b/Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst
|
||||||
|
new file mode 100644
|
||||||
|
index 00000000000000..81f918bfe2b255
|
||||||
|
--- /dev/null
|
||||||
|
+++ b/Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst
|
||||||
|
@@ -0,0 +1,2 @@
|
||||||
|
+Remove backtracking from tarfile header parsing for ``hdrcharset``, PAX, and
|
||||||
|
+GNU sparse headers.
|
@ -0,0 +1,16 @@
|
|||||||
|
-----BEGIN PGP SIGNATURE-----
|
||||||
|
|
||||||
|
iQIzBAABCAAdFiEEz9yiRbEEPPKl+Xhl/+h0BBaL2EcFAmVuFigACgkQ/+h0BBaL
|
||||||
|
2EeHPg/+LU5xs2ZDrQogDcH+A1v8RyursiggypdM5hXTrsFsTCIk4iekcI9xkhG1
|
||||||
|
ltNX4UuCe5PUEbTgtaWP0ncXARrUnPCoQaQ1sHVDTYoHegancsk+sXZc1JM7qr0p
|
||||||
|
Y4Ig6mKjuHFMXCInQSI2GaH4t5r4Z1jGk/PGrecIHOPJgqfA/6Z3TBF5N+y3jEvS
|
||||||
|
2QazMB298q4RDhh9m3REe8LwFPHDlfw9eRohv0MB8xygg9KtxhLZrN7gLBQZvKGD
|
||||||
|
ihNw6EgJj5OZ0dvwKCCXnlZuwknuJW7vAOPHhYeenPdVdYCGoRSyN7JdD07L+5AG
|
||||||
|
O14l2rqZrz5Eu28by+kAUrcPYAfAXekw1PmtT3HSd9U/nqnUiTkkJcjyGG/e3cjJ
|
||||||
|
sUDKMNCSBq0G7j5DB3bB6VHkZjVuz+T+iR5QdfJ4kI2pYSuE/rUj1rhkUXApYsHl
|
||||||
|
7Wff0QbOW6QT1wCtQcMpJSzkTDVJVYxiqrko/ihlOhphDHYLdOIGOrxWAUwc06x/
|
||||||
|
BhJD6tM1kEVZvifoJp1OsNwDzZ/Ku6CUs05E1vWxdeNVeANyKAgCZ5hOVmhnv866
|
||||||
|
11zfgo/znRsMzMIyJuy0bhO0C6omVLzzfhipAbZM2jDorn37xxV0v/I0pceNtLrp
|
||||||
|
YR7Tjs7+Ihe6/oItjW53j9T7ANdgQ1RVDg98lKlPFNL+hxfctwY=
|
||||||
|
=0Pkd
|
||||||
|
-----END PGP SIGNATURE-----
|
@ -1,16 +0,0 @@
|
|||||||
-----BEGIN PGP SIGNATURE-----
|
|
||||||
|
|
||||||
iQIzBAABCAAdFiEEz9yiRbEEPPKl+Xhl/+h0BBaL2EcFAmYNMEcACgkQ/+h0BBaL
|
|
||||||
2EeHhxAAuuIM9bl0dgAWOjbgRjCeXR8aFdfcI4dkO7bZrUy8eKbM+XCvPUUvloRJ
|
|
||||||
vzGkxYyTmI4kcNPOHfscUwH7AVVij8nGv7WeaXBUZGIXNwfHwvqOxvYvSsNNNFnr
|
|
||||||
70yJB7Df8/2s0XqFx3X1aWcnyMDerWKpfJ/VI/NPmCVxkYXGshuTTSFcCMTSFBQB
|
|
||||||
sNrIb5NWAsBF4R85uRQDlCg1AoyaKOdJNQkPo1Nrjol1ExJ+MHE7+E+QL9pQkUWG
|
|
||||||
SBISPUhJySBAegxolw6YR5dz1L4nukueQDJz3NizUeQGDvH7h1ImY8cypRi44U61
|
|
||||||
SUUHhBfmUBiC2dS/tTQawySULWcgbkV4GJ6cJZfDd95uffd4S/GDJCa2wCE2UTlA
|
|
||||||
XzQHwbcnIeoL064gX7ruBuFHJ6n/Oz7nZkFqbH2aqLTAWgLiUq31xH3HY734sL6X
|
|
||||||
zIJQRbcK1EM7cnNjKMVPlnHpAeKbsbHbU6yzWwZ7reIoyWlZ7vEGrfXO7Kmul93K
|
|
||||||
wVaWu0AiOY566ugekdDx4cKV+FQN6oppAN63yTfPJ2Ddcmxs4KNrtozw9OAgDTPE
|
|
||||||
GTPFD6V1CMuyQj/jOpAmbj+4bRD4Mx3u2PSittvrIeopxrXPsGGSZ5kdl62Xa2+A
|
|
||||||
DzKyYNXzcmxqS9lGdFb+OWCTyAIXxwZrdz1Q61g5xDvR9z/wZiI=
|
|
||||||
=Br9/
|
|
||||||
-----END PGP SIGNATURE-----
|
|
Loading…
Reference in new issue