import python38-3.8.16-1.module+el8.8.0+17624+9a09af5a

c8-stream-3.8 imports/c8-stream-3.8/python38-3.8.16-1.module+el8.8.0+17624+9a09af5a
CentOS Sources 2 years ago committed by MSVSphere Packaging Team
commit bb10d28d6b

1
.gitignore vendored

@ -0,0 +1 @@
SOURCES/Python-3.8.16-noexe.tar.xz

@ -0,0 +1 @@
ec97523b167d5b0e915b37e58f03da6384b15caa SOURCES/Python-3.8.16-noexe.tar.xz

@ -0,0 +1,33 @@
From 08c67bfedd07ebec54f5087b59045b8c78fa2a6d Mon Sep 17 00:00:00 2001
From: David Malcolm <dmalcolm@redhat.com>
Date: Wed, 13 Jan 2010 21:25:18 +0000
Subject: [PATCH] 00001: Fixup distutils/unixccompiler.py to remove standard
library path from rpath Was Patch0 in ivazquez' python3000 specfile
---
Lib/distutils/unixccompiler.py | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py
index 4d7a6de740..353086a648 100644
--- a/Lib/distutils/unixccompiler.py
+++ b/Lib/distutils/unixccompiler.py
@@ -82,6 +82,15 @@ class UnixCCompiler(CCompiler):
if sys.platform == "cygwin":
exe_extension = ".exe"
+ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
+ """Remove standard library path from rpath"""
+ libraries, library_dirs, runtime_library_dirs = super()._fix_lib_args(
+ libraries, library_dirs, runtime_library_dirs)
+ libdir = sysconfig.get_config_var('LIBDIR')
+ if runtime_library_dirs and (libdir in runtime_library_dirs):
+ runtime_library_dirs.remove(libdir)
+ return libraries, library_dirs, runtime_library_dirs
+
def preprocess(self, source, output_file=None, macros=None,
include_dirs=None, extra_preargs=None, extra_postargs=None):
fixed_args = self._fix_compile_args(None, macros, include_dirs)
--
2.26.2

@ -0,0 +1,261 @@
From be6b9803109c3702dbff0ed8b0953913206008ca Mon Sep 17 00:00:00 2001
From: David Malcolm <dmalcolm@redhat.com>
Date: Wed, 13 Jan 2010 21:25:18 +0000
Subject: [PATCH] 00102: Change the various install paths to use /usr/lib64/
instead or /usr/lib/
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Only used when "%{_lib}" == "lib64".
Co-authored-by: David Malcolm <dmalcolm@redhat.com>
Co-authored-by: Thomas Spura <tomspur@fedoraproject.org>
Co-authored-by: Slavek Kabrda <bkabrda@redhat.com>
Co-authored-by: Matej Stuchlik <mstuchli@redhat.com>
Co-authored-by: Tomas Orsava <torsava@redhat.com>
Co-authored-by: Charalampos Stratakis <cstratak@redhat.com>
Co-authored-by: Petr Viktorin <pviktori@redhat.com>
Co-authored-by: Miro Hrončok <miro@hroncok.cz>
Co-authored-by: Iryna Shcherbina <shcherbina.iryna@gmail.com>
---
Lib/distutils/command/install.py | 4 ++--
Lib/distutils/sysconfig.py | 6 +++++-
Lib/distutils/tests/test_install.py | 3 ++-
Lib/site.py | 4 ++++
Lib/sysconfig.py | 12 ++++++------
Lib/test/test_site.py | 4 ++--
Makefile.pre.in | 2 +-
Modules/getpath.c | 6 +++---
configure | 4 ++--
configure.ac | 4 ++--
setup.py | 6 +++---
11 files changed, 32 insertions(+), 23 deletions(-)
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index c625c95bf7..ae4f915669 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -30,14 +30,14 @@ WINDOWS_SCHEME = {
INSTALL_SCHEMES = {
'unix_prefix': {
'purelib': '$base/lib/python$py_version_short/site-packages',
- 'platlib': '$platbase/lib/python$py_version_short/site-packages',
+ 'platlib': '$platbase/lib64/python$py_version_short/site-packages',
'headers': '$base/include/python$py_version_short$abiflags/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
},
'unix_home': {
'purelib': '$base/lib/python',
- 'platlib': '$base/lib/python',
+ 'platlib': '$base/lib64/python',
'headers': '$base/include/python/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index b51629eb94..9a4892a737 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -146,8 +146,12 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
prefix = plat_specific and EXEC_PREFIX or PREFIX
if os.name == "posix":
+ if plat_specific or standard_lib:
+ lib = "lib64"
+ else:
+ lib = "lib"
libpython = os.path.join(prefix,
- "lib", "python" + get_python_version())
+ lib, "python" + get_python_version())
if standard_lib:
return libpython
else:
diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py
index 287ab1989e..d4c05e0ab1 100644
--- a/Lib/distutils/tests/test_install.py
+++ b/Lib/distutils/tests/test_install.py
@@ -57,8 +57,9 @@ class InstallTestCase(support.TempdirManager,
self.assertEqual(got, expected)
libdir = os.path.join(destination, "lib", "python")
+ platlibdir = os.path.join(destination, "lib64", "python")
check_path(cmd.install_lib, libdir)
- check_path(cmd.install_platlib, libdir)
+ check_path(cmd.install_platlib, platlibdir)
check_path(cmd.install_purelib, libdir)
check_path(cmd.install_headers,
os.path.join(destination, "include", "python", "foopkg"))
diff --git a/Lib/site.py b/Lib/site.py
index a065ab0b5d..22d53fa562 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -335,11 +335,15 @@ def getsitepackages(prefixes=None):
seen.add(prefix)
if os.sep == '/':
+ sitepackages.append(os.path.join(prefix, "lib64",
+ "python" + sys.version[:3],
+ "site-packages"))
sitepackages.append(os.path.join(prefix, "lib",
"python%d.%d" % sys.version_info[:2],
"site-packages"))
else:
sitepackages.append(prefix)
+ sitepackages.append(os.path.join(prefix, "lib64", "site-packages"))
sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
return sitepackages
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
index b9e2fafbc0..0ae6d35b69 100644
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -20,10 +20,10 @@ __all__ = [
_INSTALL_SCHEMES = {
'posix_prefix': {
- 'stdlib': '{installed_base}/lib/python{py_version_short}',
- 'platstdlib': '{platbase}/lib/python{py_version_short}',
+ 'stdlib': '{installed_base}/lib64/python{py_version_short}',
+ 'platstdlib': '{platbase}/lib64/python{py_version_short}',
'purelib': '{base}/lib/python{py_version_short}/site-packages',
- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
+ 'platlib': '{platbase}/lib64/python{py_version_short}/site-packages',
'include':
'{installed_base}/include/python{py_version_short}{abiflags}',
'platinclude':
@@ -62,10 +62,10 @@ _INSTALL_SCHEMES = {
'data': '{userbase}',
},
'posix_user': {
- 'stdlib': '{userbase}/lib/python{py_version_short}',
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
+ 'stdlib': '{userbase}/lib64/python{py_version_short}',
+ 'platstdlib': '{userbase}/lib64/python{py_version_short}',
'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
+ 'platlib': '{userbase}/lib64/python{py_version_short}/site-packages',
'include': '{userbase}/include/python{py_version_short}',
'scripts': '{userbase}/bin',
'data': '{userbase}',
diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py
index 1bbc697936..9a7e80dfa0 100644
--- a/Lib/test/test_site.py
+++ b/Lib/test/test_site.py
@@ -267,8 +267,8 @@ class HelperFunctionsTests(unittest.TestCase):
dirs = site.getsitepackages()
if os.sep == '/':
# OS X, Linux, FreeBSD, etc
- self.assertEqual(len(dirs), 1)
- wanted = os.path.join('xoxo', 'lib',
+ self.assertEqual(len(dirs), 2)
+ wanted = os.path.join('xoxo', 'lib64',
'python%d.%d' % sys.version_info[:2],
'site-packages')
self.assertEqual(dirs[0], wanted)
diff --git a/Makefile.pre.in b/Makefile.pre.in
index a914a9c70f..406a441082 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -143,7 +143,7 @@ LIBDIR= @libdir@
MANDIR= @mandir@
INCLUDEDIR= @includedir@
CONFINCLUDEDIR= $(exec_prefix)/include
-SCRIPTDIR= $(prefix)/lib
+SCRIPTDIR= $(prefix)/lib64
ABIFLAGS= @ABIFLAGS@
# Detailed destination directories
diff --git a/Modules/getpath.c b/Modules/getpath.c
index b727f66953..a0c5fb6139 100644
--- a/Modules/getpath.c
+++ b/Modules/getpath.c
@@ -730,7 +730,7 @@ calculate_exec_prefix(PyCalculatePath *calculate, _PyPathConfig *pathconfig,
if (safe_wcscpy(exec_prefix, calculate->exec_prefix, exec_prefix_len) < 0) {
return PATHLEN_ERR();
}
- status = joinpath(exec_prefix, L"lib/lib-dynload", exec_prefix_len);
+ status = joinpath(exec_prefix, L"lib64/lib-dynload", exec_prefix_len);
if (_PyStatus_EXCEPTION(status)) {
return status;
}
@@ -1067,7 +1067,7 @@ calculate_zip_path(PyCalculatePath *calculate, const wchar_t *prefix,
return PATHLEN_ERR();
}
}
- status = joinpath(zip_path, L"lib/python00.zip", zip_path_len);
+ status = joinpath(zip_path, L"lib64/python00.zip", zip_path_len);
if (_PyStatus_EXCEPTION(status)) {
return status;
}
@@ -1197,7 +1197,7 @@ calculate_init(PyCalculatePath *calculate, const PyConfig *config)
if (!calculate->exec_prefix) {
return DECODE_LOCALE_ERR("EXEC_PREFIX define", len);
}
- calculate->lib_python = Py_DecodeLocale("lib/python" VERSION, &len);
+ calculate->lib_python = Py_DecodeLocale("lib64/python" VERSION, &len);
if (!calculate->lib_python) {
return DECODE_LOCALE_ERR("EXEC_PREFIX define", len);
}
diff --git a/configure b/configure
index 8886561645..78867c6ffc 100755
--- a/configure
+++ b/configure
@@ -15214,9 +15214,9 @@ fi
if test x$PLATFORM_TRIPLET = x; then
- LIBPL='$(prefix)'"/lib/python${VERSION}/config-${LDVERSION}"
+ LIBPL='$(prefix)'"/lib64/python${VERSION}/config-${LDVERSION}"
else
- LIBPL='$(prefix)'"/lib/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}"
+ LIBPL='$(prefix)'"/lib64/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}"
fi
diff --git a/configure.ac b/configure.ac
index d8de9d4943..477a5ff1cb 100644
--- a/configure.ac
+++ b/configure.ac
@@ -4689,9 +4689,9 @@ fi
dnl define LIBPL after ABIFLAGS and LDVERSION is defined.
AC_SUBST(PY_ENABLE_SHARED)
if test x$PLATFORM_TRIPLET = x; then
- LIBPL='$(prefix)'"/lib/python${VERSION}/config-${LDVERSION}"
+ LIBPL='$(prefix)'"/lib64/python${VERSION}/config-${LDVERSION}"
else
- LIBPL='$(prefix)'"/lib/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}"
+ LIBPL='$(prefix)'"/lib64/python${VERSION}/config-${LDVERSION}-${PLATFORM_TRIPLET}"
fi
AC_SUBST(LIBPL)
diff --git a/setup.py b/setup.py
index b168ed4082..8628b9d1cd 100644
--- a/setup.py
+++ b/setup.py
@@ -649,7 +649,7 @@ class PyBuildExt(build_ext):
# directories (i.e. '.' and 'Include') must be first. See issue
# 10520.
if not CROSS_COMPILING:
- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
+ add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib64')
add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
# only change this for cross builds for 3.3, issues on Mageia
if CROSS_COMPILING:
@@ -953,11 +953,11 @@ class PyBuildExt(build_ext):
elif curses_library:
readline_libs.append(curses_library)
elif self.compiler.find_library_file(self.lib_dirs +
- ['/usr/lib/termcap'],
+ ['/usr/lib64/termcap'],
'termcap'):
readline_libs.append('termcap')
self.add(Extension('readline', ['readline.c'],
- library_dirs=['/usr/lib/termcap'],
+ library_dirs=['/usr/lib64/termcap'],
extra_link_args=readline_extra_link_args,
libraries=readline_libs))
else:
--
2.26.2

@ -0,0 +1,78 @@
From 50236468e82a7a19ed3dd7e13cb922e7d3e0ff7f Mon Sep 17 00:00:00 2001
From: David Malcolm <dmalcolm@redhat.com>
Date: Mon, 18 Jan 2010 17:59:07 +0000
Subject: [PATCH] 00111: Don't try to build a libpythonMAJOR.MINOR.a
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Downstream only: not appropriate for upstream.
See https://bugzilla.redhat.com/show_bug.cgi?id=556092
Co-authored-by: David Malcolm <dmalcolm@redhat.com>
Co-authored-by: Bohuslav Kabrda <bkabrda@redhat.com>
Co-authored-by: Matej Stuchlik <mstuchli@redhat.com>
Co-authored-by: Robert Kuska <rkuska@redhat.com>
Co-authored-by: Charalampos Stratakis <cstratak@redhat.com>
Co-authored-by: Miro Hrončok <miro@hroncok.cz>
---
Makefile.pre.in | 21 ++-------------------
1 file changed, 2 insertions(+), 19 deletions(-)
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 406a441082..917303dd92 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -562,7 +562,7 @@ clinic: check-clean-src $(srcdir)/Modules/_blake2/blake2s_impl.c
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/clinic/clinic.py --make --srcdir $(srcdir)
# Build the interpreter
-$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
+$(BUILDPYTHON): Programs/python.o $(LDLIBRARY) $(PY3LIBRARY)
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS)
platform: $(BUILDPYTHON) pybuilddir.txt
@@ -610,12 +610,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o
_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
-
-# Build static library
-$(LIBRARY): $(LIBRARY_OBJS)
- -rm -f $@
- $(AR) $(ARFLAGS) $@ $(LIBRARY_OBJS)
-
libpython$(LDVERSION).so: $(LIBRARY_OBJS) $(DTRACE_OBJS)
if test $(INSTSONAME) != $(LDLIBRARY); then \
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM); \
@@ -693,7 +687,7 @@ Makefile Modules/config.c: Makefile.pre \
@echo "The Makefile was updated, you may need to re-run make."
-Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
+Programs/_testembed: Programs/_testembed.o $(LDLIBRARY) $(PY3LIBRARY)
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS)
############################################################################
@@ -1557,17 +1551,6 @@ libainstall: @DEF_MAKE_RULE@ python-config
else true; \
fi; \
done
- @if test -d $(LIBRARY); then :; else \
- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
- if test "$(SHLIB_SUFFIX)" = .dll; then \
- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \
- else \
- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
- fi; \
- else \
- echo Skip install of $(LIBRARY) - use make frameworkinstall; \
- fi; \
- fi
$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
$(INSTALL_DATA) Programs/python.o $(DESTDIR)$(LIBPL)/python.o
$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in
--
2.26.2

@ -0,0 +1,76 @@
From ff07393a8c9c3d69090a775a8d4b89a3019f71a9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= <miro@hroncok.cz>
Date: Wed, 15 Aug 2018 15:36:29 +0200
Subject: [PATCH] 00189: Instead of bundled wheels, use our RPM packaged wheels
We keep them in /usr/share/python-wheels
Downstream only: upstream bundles
We might eventually pursuit upstream support, but it's low prio
---
Lib/ensurepip/__init__.py | 36 ++++++++++++++++++++++++++----------
1 file changed, 26 insertions(+), 10 deletions(-)
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
index b291e9a..798d0f4 100644
--- a/Lib/ensurepip/__init__.py
+++ b/Lib/ensurepip/__init__.py
@@ -1,6 +1,7 @@
+import distutils.version
+import glob
import os
import os.path
-import pkgutil
import sys
import runpy
import tempfile
@@ -9,8 +10,26 @@ import subprocess
__all__ = ["version", "bootstrap"]
_PACKAGE_NAMES = ('setuptools', 'pip')
-_SETUPTOOLS_VERSION = "56.0.0"
-_PIP_VERSION = "22.0.4"
+
+_WHEEL_DIR = "/usr/share/python38-wheels/"
+
+_wheels = {}
+
+def _get_most_recent_wheel_version(pkg):
+ prefix = os.path.join(_WHEEL_DIR, "{}-".format(pkg))
+ _wheels[pkg] = {}
+ for suffix in "-py2.py3-none-any.whl", "-py3-none-any.whl":
+ pattern = "{}*{}".format(prefix, suffix)
+ for path in glob.glob(pattern):
+ version_str = path[len(prefix):-len(suffix)]
+ _wheels[pkg][version_str] = os.path.basename(path)
+ return str(max(_wheels[pkg], key=distutils.version.LooseVersion))
+
+
+_SETUPTOOLS_VERSION = _get_most_recent_wheel_version("setuptools")
+
+_PIP_VERSION = _get_most_recent_wheel_version("pip")
+
_PROJECTS = [
("setuptools", _SETUPTOOLS_VERSION, "py3"),
("pip", _PIP_VERSION, "py3"),
@@ -99,13 +118,10 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
# additional paths that need added to sys.path
additional_paths = []
for project, version, py_tag in _PROJECTS:
- wheel_name = "{}-{}-{}-none-any.whl".format(project, version, py_tag)
- whl = pkgutil.get_data(
- "ensurepip",
- "_bundled/{}".format(wheel_name),
- )
- with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
- fp.write(whl)
+ wheel_name = _wheels[project][version]
+ with open(os.path.join(_WHEEL_DIR, wheel_name), "rb") as sfp:
+ with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
+ fp.write(sfp.read())
additional_paths.append(os.path.join(tmpdir, wheel_name))
--
2.35.3

@ -0,0 +1,64 @@
From 197b8de27ebcd17fc5dd51426a639950c6f6c284 Mon Sep 17 00:00:00 2001
From: Michal Cyprian <m.cyprian@gmail.com>
Date: Mon, 26 Jun 2017 16:32:56 +0200
Subject: [PATCH] 00251: Change user install location
Set values of prefix and exec_prefix in distutils install command
to /usr/local if executable is /usr/bin/python* and RPM build
is not detected to make pip and distutils install into separate location.
Fedora Change: https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe
---
Lib/distutils/command/install.py | 15 +++++++++++++--
Lib/site.py | 9 ++++++++-
2 files changed, 21 insertions(+), 3 deletions(-)
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index ae4f915669..0e4fd5b74a 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -418,8 +418,19 @@ class install(Command):
raise DistutilsOptionError(
"must not supply exec-prefix without prefix")
- self.prefix = os.path.normpath(sys.prefix)
- self.exec_prefix = os.path.normpath(sys.exec_prefix)
+ # self.prefix is set to sys.prefix + /local/
+ # if neither RPM build nor virtual environment is
+ # detected to make pip and distutils install packages
+ # into the separate location.
+ if (not (hasattr(sys, 'real_prefix') or
+ sys.prefix != sys.base_prefix) and
+ 'RPM_BUILD_ROOT' not in os.environ):
+ addition = "/local"
+ else:
+ addition = ""
+
+ self.prefix = os.path.normpath(sys.prefix) + addition
+ self.exec_prefix = os.path.normpath(sys.exec_prefix) + addition
else:
if self.exec_prefix is None:
diff --git a/Lib/site.py b/Lib/site.py
index 22d53fa562..9513526109 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -348,7 +348,14 @@ def getsitepackages(prefixes=None):
return sitepackages
def addsitepackages(known_paths, prefixes=None):
- """Add site-packages to sys.path"""
+ """Add site-packages to sys.path
+
+ '/usr/local' is included in PREFIXES if RPM build is not detected
+ to make packages installed into this location visible.
+
+ """
+ if ENABLE_USER_SITE and 'RPM_BUILD_ROOT' not in os.environ:
+ PREFIXES.insert(0, "/usr/local")
for sitedir in getsitepackages(prefixes):
if os.path.isdir(sitedir):
addsitedir(sitedir, known_paths)
--
2.26.2

@ -0,0 +1,55 @@
From aedd897c6371bc54d3b2e2c9420fce6730c2acff Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= <miro@hroncok.cz>
Date: Thu, 11 Jul 2019 13:44:13 +0200
Subject: [PATCH] 00328: Restore pyc to TIMESTAMP invalidation mode as default
in rpmbuild
Since Fedora 31, the $SOURCE_DATE_EPOCH is set in rpmbuild to the latest
%changelog date. This makes Python default to the CHECKED_HASH pyc
invalidation mode, bringing more reproducible builds traded for an import
performance decrease. To avoid that, we don't default to CHECKED_HASH
when $RPM_BUILD_ROOT is set (i.e. when we are building RPM packages).
See https://src.fedoraproject.org/rpms/redhat-rpm-config/pull-request/57#comment-27426
---
Lib/py_compile.py | 3 ++-
Lib/test/test_py_compile.py | 2 ++
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/Lib/py_compile.py b/Lib/py_compile.py
index 21736896af..310bed5620 100644
--- a/Lib/py_compile.py
+++ b/Lib/py_compile.py
@@ -70,7 +70,8 @@ class PycInvalidationMode(enum.Enum):
def _get_default_invalidation_mode():
- if os.environ.get('SOURCE_DATE_EPOCH'):
+ if (os.environ.get('SOURCE_DATE_EPOCH') and not
+ os.environ.get('RPM_BUILD_ROOT')):
return PycInvalidationMode.CHECKED_HASH
else:
return PycInvalidationMode.TIMESTAMP
diff --git a/Lib/test/test_py_compile.py b/Lib/test/test_py_compile.py
index d4a68c9320..ed09874023 100644
--- a/Lib/test/test_py_compile.py
+++ b/Lib/test/test_py_compile.py
@@ -17,6 +17,7 @@ def without_source_date_epoch(fxn):
def wrapper(*args, **kwargs):
with support.EnvironmentVarGuard() as env:
env.unset('SOURCE_DATE_EPOCH')
+ env.unset('RPM_BUILD_ROOT')
return fxn(*args, **kwargs)
return wrapper
@@ -27,6 +28,7 @@ def with_source_date_epoch(fxn):
def wrapper(*args, **kwargs):
with support.EnvironmentVarGuard() as env:
env['SOURCE_DATE_EPOCH'] = '123456789'
+ env.unset('RPM_BUILD_ROOT')
return fxn(*args, **kwargs)
return wrapper
--
2.26.2

File diff suppressed because it is too large Load Diff

@ -0,0 +1,97 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Lumir Balhar <lbalhar@redhat.com>
Date: Tue, 4 Aug 2020 12:04:03 +0200
Subject: [PATCH] 00353: Original names for architectures with different names
downstream
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
https://fedoraproject.org/wiki/Changes/Python_Upstream_Architecture_Names
Pythons in RHEL/Fedora used different names for some architectures
than upstream and other distros (for example ppc64 vs. powerpc64).
This was patched in patch 274, now it is sedded if %with legacy_archnames.
That meant that an extension built with the default upstream settings
(on other distro or as an manylinux wheel) could not been found by Python
on RHEL/Fedora because it had a different suffix.
This patch adds the legacy names to importlib so Python is able
to import extensions with a legacy architecture name in its
file name.
It work both ways, so it support both %with and %without legacy_archnames.
WARNING: This patch has no effect on Python built with bootstrap
enabled because Python/importlib_external.h is not regenerated
and therefore Python during bootstrap contains importlib from
upstream without this feature. It's possible to include
Python/importlib_external.h to this patch but it'd make rebasing
a nightmare because it's basically a binary file.
Co-authored-by: Miro Hrončok <miro@hroncok.cz>
---
Lib/importlib/_bootstrap_external.py | 40 ++++++++++++++++++++++++++--
1 file changed, 38 insertions(+), 2 deletions(-)
diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py
index b8ac482994..62e937b819 100644
--- a/Lib/importlib/_bootstrap_external.py
+++ b/Lib/importlib/_bootstrap_external.py
@@ -1559,7 +1559,7 @@ def _get_supported_file_loaders():
Each item is a tuple (loader, suffixes).
"""
- extensions = ExtensionFileLoader, _imp.extension_suffixes()
+ extensions = ExtensionFileLoader, _alternative_architectures(_imp.extension_suffixes())
source = SourceFileLoader, SOURCE_SUFFIXES
bytecode = SourcelessFileLoader, BYTECODE_SUFFIXES
return [extensions, source, bytecode]
@@ -1623,7 +1623,7 @@ def _setup(_bootstrap_module):
# Constants
setattr(self_module, '_relax_case', _make_relax_case())
- EXTENSION_SUFFIXES.extend(_imp.extension_suffixes())
+ EXTENSION_SUFFIXES.extend(_alternative_architectures(_imp.extension_suffixes()))
if builtin_os == 'nt':
SOURCE_SUFFIXES.append('.pyw')
if '_d.pyd' in EXTENSION_SUFFIXES:
@@ -1636,3 +1636,39 @@ def _install(_bootstrap_module):
supported_loaders = _get_supported_file_loaders()
sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)])
sys.meta_path.append(PathFinder)
+
+
+_ARCH_MAP = {
+ "-arm-linux-gnueabi.": "-arm-linux-gnueabihf.",
+ "-armeb-linux-gnueabi.": "-armeb-linux-gnueabihf.",
+ "-mips64-linux-gnu.": "-mips64-linux-gnuabi64.",
+ "-mips64el-linux-gnu.": "-mips64el-linux-gnuabi64.",
+ "-ppc-linux-gnu.": "-powerpc-linux-gnu.",
+ "-ppc-linux-gnuspe.": "-powerpc-linux-gnuspe.",
+ "-ppc64-linux-gnu.": "-powerpc64-linux-gnu.",
+ "-ppc64le-linux-gnu.": "-powerpc64le-linux-gnu.",
+ # The above, but the other way around:
+ "-arm-linux-gnueabihf.": "-arm-linux-gnueabi.",
+ "-armeb-linux-gnueabihf.": "-armeb-linux-gnueabi.",
+ "-mips64-linux-gnuabi64.": "-mips64-linux-gnu.",
+ "-mips64el-linux-gnuabi64.": "-mips64el-linux-gnu.",
+ "-powerpc-linux-gnu.": "-ppc-linux-gnu.",
+ "-powerpc-linux-gnuspe.": "-ppc-linux-gnuspe.",
+ "-powerpc64-linux-gnu.": "-ppc64-linux-gnu.",
+ "-powerpc64le-linux-gnu.": "-ppc64le-linux-gnu.",
+}
+
+
+def _alternative_architectures(suffixes):
+ """Add a suffix with an alternative architecture name
+ to the list of suffixes so an extension built with
+ the default (upstream) setting is loadable with our Pythons
+ """
+
+ for suffix in suffixes:
+ for original, alternative in _ARCH_MAP.items():
+ if original in suffix:
+ suffixes.append(suffix.replace(original, alternative))
+ return suffixes
+
+ return suffixes

@ -0,0 +1,567 @@
From 78da9e020385fe78e36c20f99a0910bbc4a0c100 Mon Sep 17 00:00:00 2001
From: Lumir Balhar <lbalhar@redhat.com>
Date: Thu, 1 Apr 2021 08:18:07 +0200
Subject: [PATCH] CVE-2021-23336: Add `separator` argument to parse_qs; warn
with default
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Partially backports https://bugs.python.org/issue42967 : [security] Address a web cache-poisoning issue reported in urllib.parse.parse_qsl().
However, this solution is different than the upstream solution in Python 3.6.13.
An optional argument seperator is added to specify the separator.
It is recommended to set it to '&' or ';' to match the application or proxy in use.
The default can be set with an env variable of a config file.
If neither the argument, env var or config file specifies a separator, "&" is used
but a warning is raised if parse_qs is used on input that contains ';'.
Co-authors of the upstream change (who do not necessarily agree with this):
Co-authored-by: Adam Goldschmidt <adamgold7@gmail.com>
Co-authored-by: Ken Jin <28750310+Fidget-Spinner@users.noreply.github.com>
Co-authored-by: Éric Araujo <merwok@netwok.org>
---
Doc/library/cgi.rst | 2 +-
Doc/library/urllib.parse.rst | 12 +-
Lib/cgi.py | 4 +-
Lib/test/test_cgi.py | 29 +++++
Lib/test/test_urlparse.py | 232 ++++++++++++++++++++++++++++++++++-
Lib/urllib/parse.py | 78 +++++++++++-
6 files changed, 339 insertions(+), 18 deletions(-)
diff --git a/Doc/library/cgi.rst b/Doc/library/cgi.rst
index 880074b..d8a6dc1 100644
--- a/Doc/library/cgi.rst
+++ b/Doc/library/cgi.rst
@@ -277,7 +277,7 @@ These are useful if you want more control, or if you want to employ some of the
algorithms implemented in this module in other circumstances.
-.. function:: parse(fp=None, environ=os.environ, keep_blank_values=False, strict_parsing=False, separator="&")
+.. function:: parse(fp=None, environ=os.environ, keep_blank_values=False, strict_parsing=False, separator=None)
Parse a query in the environment or from a file (the file defaults to
``sys.stdin``). The *keep_blank_values*, *strict_parsing* and *separator* parameters are
diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst
index a6cfc5d..85b2448 100644
--- a/Doc/library/urllib.parse.rst
+++ b/Doc/library/urllib.parse.rst
@@ -165,7 +165,7 @@ or on combining URL components into a URL string.
now raise :exc:`ValueError`.
-.. function:: parse_qs(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None, separator='&')
+.. function:: parse_qs(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None, separator=None)
Parse a query string given as a string argument (data of type
:mimetype:`application/x-www-form-urlencoded`). Data are returned as a
@@ -191,7 +191,13 @@ or on combining URL components into a URL string.
*max_num_fields* fields read.
The optional argument *separator* is the symbol to use for separating the
- query arguments. It defaults to ``&``.
+ query arguments. It is recommended to set it to ``'&'`` or ``';'``.
+ It defaults to ``'&'``; a warning is raised if this default is used.
+ This default may be changed with the following environment variable settings:
+
+ - ``PYTHON_URLLIB_QS_SEPARATOR='&'``: use only ``&`` as separator, without warning (as in Python 3.6.13+ or 3.10)
+ - ``PYTHON_URLLIB_QS_SEPARATOR=';'``: use only ``;`` as separator
+ - ``PYTHON_URLLIB_QS_SEPARATOR=legacy``: use both ``&`` and ``;`` (as in previous versions of Python)
Use the :func:`urllib.parse.urlencode` function (with the ``doseq``
parameter set to ``True``) to convert such dictionaries into query
@@ -236,7 +242,7 @@ or on combining URL components into a URL string.
*max_num_fields* fields read.
The optional argument *separator* is the symbol to use for separating the
- query arguments. It defaults to ``&``.
+ query arguments. It works as in :py:func:`parse_qs`.
Use the :func:`urllib.parse.urlencode` function to convert such lists of pairs into
query strings.
diff --git a/Lib/cgi.py b/Lib/cgi.py
index 1e880e5..d7b994b 100755
--- a/Lib/cgi.py
+++ b/Lib/cgi.py
@@ -116,7 +116,7 @@ log = initlog # The current logging function
maxlen = 0
def parse(fp=None, environ=os.environ, keep_blank_values=0,
- strict_parsing=0, separator='&'):
+ strict_parsing=0, separator=None):
"""Parse a query in the environment or from a file (default stdin)
Arguments, all optional:
@@ -319,7 +319,7 @@ class FieldStorage:
def __init__(self, fp=None, headers=None, outerboundary=b'',
environ=os.environ, keep_blank_values=0, strict_parsing=0,
limit=None, encoding='utf-8', errors='replace',
- max_num_fields=None, separator='&'):
+ max_num_fields=None, separator=None):
"""Constructor. Read multipart/* until last part.
Arguments, all optional:
diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py
index 4e1506a..49b6926 100644
--- a/Lib/test/test_cgi.py
+++ b/Lib/test/test_cgi.py
@@ -180,6 +180,35 @@ Content-Length: 3
env = {'QUERY_STRING': orig}
fs = cgi.FieldStorage(environ=env)
+ if isinstance(expect, dict):
+ # test dict interface
+ self.assertEqual(len(expect), len(fs))
+ self.assertCountEqual(expect.keys(), fs.keys())
+ self.assertEqual(fs.getvalue("nonexistent field", "default"), "default")
+ # test individual fields
+ for key in expect.keys():
+ expect_val = expect[key]
+ self.assertIn(key, fs)
+ if len(expect_val) > 1:
+ self.assertEqual(fs.getvalue(key), expect_val)
+ else:
+ self.assertEqual(fs.getvalue(key), expect_val[0])
+
+ def test_separator(self):
+ parse_semicolon = [
+ ("x=1;y=2.0", {'x': ['1'], 'y': ['2.0']}),
+ ("x=1;y=2.0;z=2-3.%2b0", {'x': ['1'], 'y': ['2.0'], 'z': ['2-3.+0']}),
+ (";", ValueError("bad query field: ''")),
+ (";;", ValueError("bad query field: ''")),
+ ("=;a", ValueError("bad query field: 'a'")),
+ (";b=a", ValueError("bad query field: ''")),
+ ("b;=a", ValueError("bad query field: 'b'")),
+ ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}),
+ ("a=a+b;a=b+a", {'a': ['a b', 'b a']}),
+ ]
+ for orig, expect in parse_semicolon:
+ env = {'QUERY_STRING': orig}
+ fs = cgi.FieldStorage(separator=';', environ=env)
if isinstance(expect, dict):
# test dict interface
self.assertEqual(len(expect), len(fs))
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
index 0f99130..4e0d7e5 100644
--- a/Lib/test/test_urlparse.py
+++ b/Lib/test/test_urlparse.py
@@ -2,6 +2,11 @@ import sys
import unicodedata
import unittest
import urllib.parse
+from test.support import EnvironmentVarGuard
+from warnings import catch_warnings
+import tempfile
+import contextlib
+import os.path
RFC1808_BASE = "http://a/b/c/d;p?q#f"
RFC2396_BASE = "http://a/b/c/d;p?q"
@@ -32,10 +37,34 @@ parse_qsl_test_cases = [
(b"&a=b", [(b'a', b'b')]),
(b"a=a+b&b=b+c", [(b'a', b'a b'), (b'b', b'b c')]),
(b"a=1&a=2", [(b'a', b'1'), (b'a', b'2')]),
+]
+
+parse_qsl_test_cases_semicolon = [
+ (";", []),
+ (";;", []),
+ (";a=b", [('a', 'b')]),
+ ("a=a+b;b=b+c", [('a', 'a b'), ('b', 'b c')]),
+ ("a=1;a=2", [('a', '1'), ('a', '2')]),
+ (b";", []),
+ (b";;", []),
+ (b";a=b", [(b'a', b'b')]),
+ (b"a=a+b;b=b+c", [(b'a', b'a b'), (b'b', b'b c')]),
+ (b"a=1;a=2", [(b'a', b'1'), (b'a', b'2')]),
+]
+
+parse_qsl_test_cases_legacy = [
+ (b"a=1;a=2&a=3", [(b'a', b'1'), (b'a', b'2'), (b'a', b'3')]),
+ (b"a=1;b=2&c=3", [(b'a', b'1'), (b'b', b'2'), (b'c', b'3')]),
+ (b"a=1&b=2&c=3;", [(b'a', b'1'), (b'b', b'2'), (b'c', b'3')]),
+]
+
+parse_qsl_test_cases_warn = [
(";a=b", [(';a', 'b')]),
("a=a+b;b=b+c", [('a', 'a b;b=b c')]),
(b";a=b", [(b';a', b'b')]),
(b"a=a+b;b=b+c", [(b'a', b'a b;b=b c')]),
+ ("a=1;a=2&a=3", [('a', '1;a=2'), ('a', '3')]),
+ (b"a=1;a=2&a=3", [(b'a', b'1;a=2'), (b'a', b'3')]),
]
# Each parse_qs testcase is a two-tuple that contains
@@ -62,10 +91,37 @@ parse_qs_test_cases = [
(b"&a=b", {b'a': [b'b']}),
(b"a=a+b&b=b+c", {b'a': [b'a b'], b'b': [b'b c']}),
(b"a=1&a=2", {b'a': [b'1', b'2']}),
+]
+
+parse_qs_test_cases_semicolon = [
+ (";", {}),
+ (";;", {}),
+ (";a=b", {'a': ['b']}),
+ ("a=a+b;b=b+c", {'a': ['a b'], 'b': ['b c']}),
+ ("a=1;a=2", {'a': ['1', '2']}),
+ (b";", {}),
+ (b";;", {}),
+ (b";a=b", {b'a': [b'b']}),
+ (b"a=a+b;b=b+c", {b'a': [b'a b'], b'b': [b'b c']}),
+ (b"a=1;a=2", {b'a': [b'1', b'2']}),
+]
+
+parse_qs_test_cases_legacy = [
+ ("a=1;a=2&a=3", {'a': ['1', '2', '3']}),
+ ("a=1;b=2&c=3", {'a': ['1'], 'b': ['2'], 'c': ['3']}),
+ ("a=1&b=2&c=3;", {'a': ['1'], 'b': ['2'], 'c': ['3']}),
+ (b"a=1;a=2&a=3", {b'a': [b'1', b'2', b'3']}),
+ (b"a=1;b=2&c=3", {b'a': [b'1'], b'b': [b'2'], b'c': [b'3']}),
+ (b"a=1&b=2&c=3;", {b'a': [b'1'], b'b': [b'2'], b'c': [b'3']}),
+]
+
+parse_qs_test_cases_warn = [
(";a=b", {';a': ['b']}),
("a=a+b;b=b+c", {'a': ['a b;b=b c']}),
(b";a=b", {b';a': [b'b']}),
(b"a=a+b;b=b+c", {b'a':[ b'a b;b=b c']}),
+ ("a=1;a=2&a=3", {'a': ['1;a=2', '3']}),
+ (b"a=1;a=2&a=3", {b'a': [b'1;a=2', b'3']}),
]
class UrlParseTestCase(unittest.TestCase):
@@ -123,23 +179,57 @@ class UrlParseTestCase(unittest.TestCase):
def test_qsl(self):
for orig, expect in parse_qsl_test_cases:
- result = urllib.parse.parse_qsl(orig, keep_blank_values=True)
+ result = urllib.parse.parse_qsl(orig, keep_blank_values=True, separator="&")
self.assertEqual(result, expect, "Error parsing %r" % orig)
expect_without_blanks = [v for v in expect if len(v[1])]
- result = urllib.parse.parse_qsl(orig, keep_blank_values=False)
+ result = urllib.parse.parse_qsl(orig, keep_blank_values=False, separator="&")
self.assertEqual(result, expect_without_blanks,
"Error parsing %r" % orig)
def test_qs(self):
for orig, expect in parse_qs_test_cases:
- result = urllib.parse.parse_qs(orig, keep_blank_values=True)
+ result = urllib.parse.parse_qs(orig, keep_blank_values=True, separator="&")
self.assertEqual(result, expect, "Error parsing %r" % orig)
expect_without_blanks = {v: expect[v]
for v in expect if len(expect[v][0])}
- result = urllib.parse.parse_qs(orig, keep_blank_values=False)
+ result = urllib.parse.parse_qs(orig, keep_blank_values=False, separator="&")
self.assertEqual(result, expect_without_blanks,
"Error parsing %r" % orig)
+ def test_qs_default_warn(self):
+ for orig, expect in parse_qs_test_cases_warn:
+ with self.subTest(orig=orig, expect=expect):
+ with catch_warnings(record=True) as w:
+ result = urllib.parse.parse_qs(orig, keep_blank_values=True)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 1)
+ self.assertEqual(w[0].category, urllib.parse._QueryStringSeparatorWarning)
+
+ def test_qsl_default_warn(self):
+ for orig, expect in parse_qsl_test_cases_warn:
+ with self.subTest(orig=orig, expect=expect):
+ with catch_warnings(record=True) as w:
+ result = urllib.parse.parse_qsl(orig, keep_blank_values=True)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 1)
+ self.assertEqual(w[0].category, urllib.parse._QueryStringSeparatorWarning)
+
+ def test_default_qs_no_warnings(self):
+ for orig, expect in parse_qs_test_cases:
+ with self.subTest(orig=orig, expect=expect):
+ with catch_warnings(record=True) as w:
+ result = urllib.parse.parse_qs(orig, keep_blank_values=True)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+
+ def test_default_qsl_no_warnings(self):
+ for orig, expect in parse_qsl_test_cases:
+ with self.subTest(orig=orig, expect=expect):
+ with catch_warnings(record=True) as w:
+ result = urllib.parse.parse_qsl(orig, keep_blank_values=True)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+
def test_roundtrips(self):
str_cases = [
('file:///tmp/junk.txt',
@@ -919,8 +1009,8 @@ class UrlParseTestCase(unittest.TestCase):
def test_parse_qsl_max_num_fields(self):
with self.assertRaises(ValueError):
- urllib.parse.parse_qs('&'.join(['a=a']*11), max_num_fields=10)
- urllib.parse.parse_qs('&'.join(['a=a']*10), max_num_fields=10)
+ urllib.parse.parse_qs('&'.join(['a=a']*11), max_num_fields=10, separator='&')
+ urllib.parse.parse_qs('&'.join(['a=a']*10), max_num_fields=10, separator='&')
def test_parse_qs_separator(self):
parse_qs_semicolon_cases = [
@@ -964,6 +1054,136 @@ class UrlParseTestCase(unittest.TestCase):
self.assertEqual(result_bytes, expect, "Error parsing %r" % orig)
+ @contextlib.contextmanager
+ def _qsl_sep_config(self, sep):
+ """Context for the given parse_qsl default separator configured in config file"""
+ old_filename = urllib.parse._QS_SEPARATOR_CONFIG_FILENAME
+ urllib.parse._default_qs_separator = None
+ try:
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ filename = os.path.join(tmpdirname, 'conf.cfg')
+ with open(filename, 'w') as file:
+ file.write(f'[parse_qs]\n')
+ file.write(f'PYTHON_URLLIB_QS_SEPARATOR = {sep}')
+ urllib.parse._QS_SEPARATOR_CONFIG_FILENAME = filename
+ yield
+ finally:
+ urllib.parse._QS_SEPARATOR_CONFIG_FILENAME = old_filename
+ urllib.parse._default_qs_separator = None
+
+ def test_parse_qs_separator_semicolon(self):
+ for orig, expect in parse_qs_test_cases_semicolon:
+ with self.subTest(orig=orig, expect=expect, method='arg'):
+ result = urllib.parse.parse_qs(orig, separator=';')
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ with self.subTest(orig=orig, expect=expect, method='env'):
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = ';'
+ result = urllib.parse.parse_qs(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+ with self.subTest(orig=orig, expect=expect, method='conf'):
+ with self._qsl_sep_config(';'), catch_warnings(record=True) as w:
+ result = urllib.parse.parse_qs(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+
+ def test_parse_qsl_separator_semicolon(self):
+ for orig, expect in parse_qsl_test_cases_semicolon:
+ with self.subTest(orig=orig, expect=expect, method='arg'):
+ result = urllib.parse.parse_qsl(orig, separator=';')
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ with self.subTest(orig=orig, expect=expect, method='env'):
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = ';'
+ result = urllib.parse.parse_qsl(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+ with self.subTest(orig=orig, expect=expect, method='conf'):
+ with self._qsl_sep_config(';'), catch_warnings(record=True) as w:
+ result = urllib.parse.parse_qsl(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+
+ def test_parse_qs_separator_legacy(self):
+ for orig, expect in parse_qs_test_cases_legacy:
+ with self.subTest(orig=orig, expect=expect, method='env'):
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = 'legacy'
+ result = urllib.parse.parse_qs(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+ with self.subTest(orig=orig, expect=expect, method='conf'):
+ with self._qsl_sep_config('legacy'), catch_warnings(record=True) as w:
+ result = urllib.parse.parse_qs(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+
+ def test_parse_qsl_separator_legacy(self):
+ for orig, expect in parse_qsl_test_cases_legacy:
+ with self.subTest(orig=orig, expect=expect, method='env'):
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = 'legacy'
+ result = urllib.parse.parse_qsl(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+ with self.subTest(orig=orig, expect=expect, method='conf'):
+ with self._qsl_sep_config('legacy'), catch_warnings(record=True) as w:
+ result = urllib.parse.parse_qsl(orig)
+ self.assertEqual(result, expect, "Error parsing %r" % orig)
+ self.assertEqual(len(w), 0)
+
+ def test_parse_qs_separator_bad_value_env_or_config(self):
+ for bad_sep in '', 'abc', 'safe', '&;', 'SEP':
+ with self.subTest(bad_sep, method='env'):
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = bad_sep
+ with self.assertRaises(ValueError):
+ urllib.parse.parse_qsl('a=1;b=2')
+ with self.subTest(bad_sep, method='conf'):
+ with self._qsl_sep_config('bad_sep'), catch_warnings(record=True) as w:
+ with self.assertRaises(ValueError):
+ urllib.parse.parse_qsl('a=1;b=2')
+
+ def test_parse_qs_separator_bad_value_arg(self):
+ for bad_sep in True, {}, '':
+ with self.subTest(bad_sep):
+ with self.assertRaises(ValueError):
+ urllib.parse.parse_qsl('a=1;b=2', separator=bad_sep)
+
+ def test_parse_qs_separator_num_fields(self):
+ for qs, sep in (
+ ('a&b&c', '&'),
+ ('a;b;c', ';'),
+ ('a&b;c', 'legacy'),
+ ):
+ with self.subTest(qs=qs, sep=sep):
+ with EnvironmentVarGuard() as environ, catch_warnings(record=True) as w:
+ if sep != 'legacy':
+ with self.assertRaises(ValueError):
+ urllib.parse.parse_qsl(qs, separator=sep, max_num_fields=2)
+ if sep:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = sep
+ with self.assertRaises(ValueError):
+ urllib.parse.parse_qsl(qs, max_num_fields=2)
+
+ def test_parse_qs_separator_priority(self):
+ # env variable trumps config file
+ with self._qsl_sep_config('~'), EnvironmentVarGuard() as environ:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = '!'
+ result = urllib.parse.parse_qs('a=1!b=2~c=3')
+ self.assertEqual(result, {'a': ['1'], 'b': ['2~c=3']})
+ # argument trumps config file
+ with self._qsl_sep_config('~'):
+ result = urllib.parse.parse_qs('a=1$b=2~c=3', separator='$')
+ self.assertEqual(result, {'a': ['1'], 'b': ['2~c=3']})
+ # argument trumps env variable
+ with EnvironmentVarGuard() as environ:
+ environ['PYTHON_URLLIB_QS_SEPARATOR'] = '~'
+ result = urllib.parse.parse_qs('a=1$b=2~c=3', separator='$')
+ self.assertEqual(result, {'a': ['1'], 'b': ['2~c=3']})
+
+
def test_urlencode_sequences(self):
# Other tests incidentally urlencode things; test non-covered cases:
# Sequence and object values.
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py
index f0d9d4d..70fc268 100644
--- a/Lib/urllib/parse.py
+++ b/Lib/urllib/parse.py
@@ -28,6 +28,7 @@ test_urlparse.py provides a good indicator of parsing behavior.
"""
import re
+import os
import sys
import collections
import warnings
@@ -660,7 +661,7 @@ def unquote(string, encoding='utf-8', errors='replace'):
def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
- encoding='utf-8', errors='replace', max_num_fields=None, separator='&'):
+ encoding='utf-8', errors='replace', max_num_fields=None, separator=None):
"""Parse a query given as a string argument.
Arguments:
@@ -700,9 +701,16 @@ def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
parsed_result[name] = [value]
return parsed_result
+class _QueryStringSeparatorWarning(RuntimeWarning):
+ """Warning for using default `separator` in parse_qs or parse_qsl"""
+
+# The default "separator" for parse_qsl can be specified in a config file.
+# It's cached after first read.
+_QS_SEPARATOR_CONFIG_FILENAME = '/etc/python/urllib.cfg'
+_default_qs_separator = None
def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
- encoding='utf-8', errors='replace', max_num_fields=None, separator='&'):
+ encoding='utf-8', errors='replace', max_num_fields=None, separator=None):
"""Parse a query given as a string argument.
Arguments:
@@ -731,20 +739,78 @@ def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
Returns a list, as G-d intended.
"""
qs, _coerce_result = _coerce_args(qs)
- separator, _ = _coerce_args(separator)
- if not separator or (not isinstance(separator, (str, bytes))):
+ if isinstance(separator, bytes):
+ separator = separator.decode('ascii')
+
+ if (not separator or (not isinstance(separator, (str, bytes)))) and separator is not None:
raise ValueError("Separator must be of type string or bytes.")
+ # Used when both "&" and ";" act as separators. (Need a non-string value.)
+ _legacy = object()
+
+ if separator is None:
+ global _default_qs_separator
+ separator = _default_qs_separator
+ envvar_name = 'PYTHON_URLLIB_QS_SEPARATOR'
+ if separator is None:
+ # Set default separator from environment variable
+ separator = os.environ.get(envvar_name)
+ config_source = 'environment variable'
+ if separator is None:
+ # Set default separator from the configuration file
+ try:
+ file = open(_QS_SEPARATOR_CONFIG_FILENAME)
+ except FileNotFoundError:
+ pass
+ else:
+ with file:
+ import configparser
+ config = configparser.ConfigParser(
+ interpolation=None,
+ comment_prefixes=('#', ),
+ )
+ config.read_file(file)
+ separator = config.get('parse_qs', envvar_name, fallback=None)
+ _default_qs_separator = separator
+ config_source = _QS_SEPARATOR_CONFIG_FILENAME
+ if separator is None:
+ # The default is '&', but warn if not specified explicitly
+ if ';' in qs:
+ from warnings import warn
+ warn("The default separator of urllib.parse.parse_qsl and "
+ + "parse_qs was changed to '&' to avoid a web cache "
+ + "poisoning issue (CVE-2021-23336). "
+ + "By default, semicolons no longer act as query field "
+ + "separators. "
+ + "See https://access.redhat.com/articles/5860431 for "
+ + "more details.",
+ _QueryStringSeparatorWarning, stacklevel=2)
+ separator = '&'
+ elif separator == 'legacy':
+ separator = _legacy
+ elif len(separator) != 1:
+ raise ValueError(
+ f'{envvar_name} (from {config_source}) must contain '
+ + '1 character, or "legacy". See '
+ + 'https://access.redhat.com/articles/5860431 for more details.'
+ )
+
# If max_num_fields is defined then check that the number of fields
# is less than max_num_fields. This prevents a memory exhaustion DOS
# attack via post bodies with many fields.
if max_num_fields is not None:
- num_fields = 1 + qs.count(separator)
+ if separator is _legacy:
+ num_fields = 1 + qs.count('&') + qs.count(';')
+ else:
+ num_fields = 1 + qs.count(separator)
if max_num_fields < num_fields:
raise ValueError('Max number of fields exceeded')
- pairs = [s1 for s1 in qs.split(separator)]
+ if separator is _legacy:
+ pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
+ else:
+ pairs = [s1 for s1 in qs.split(separator)]
r = []
for name_value in pairs:
if not name_value and not strict_parsing:
--
2.31.1

@ -0,0 +1,36 @@
diff --git a/Lib/test/test_minidom.py b/Lib/test/test_minidom.py
index 06c9107..6c3f5a8 100644
--- a/Lib/test/test_minidom.py
+++ b/Lib/test/test_minidom.py
@@ -1149,14 +1149,11 @@ class MinidomTest(unittest.TestCase):
# Verify that character decoding errors raise exceptions instead
# of crashing
- if pyexpat.version_info >= (2, 4, 5):
- self.assertRaises(ExpatError, parseString,
- b'<fran\xe7ais></fran\xe7ais>')
- self.assertRaises(ExpatError, parseString,
- b'<franais>Comment \xe7a va ? Tr\xe8s bien ?</franais>')
- else:
- self.assertRaises(UnicodeDecodeError, parseString,
- b'<fran\xe7ais>Comment \xe7a va ? Tr\xe8s bien ?</fran\xe7ais>')
+
+ self.assertRaises(ExpatError, parseString,
+ b'<fran\xe7ais></fran\xe7ais>')
+ self.assertRaises(ExpatError, parseString,
+ b'<franais>Comment \xe7a va ? Tr\xe8s bien ?</franais>')
doc.unlink()
@@ -1601,10 +1598,7 @@ class MinidomTest(unittest.TestCase):
self.confirm(doc2.namespaceURI == xml.dom.EMPTY_NAMESPACE)
def testExceptionOnSpacesInXMLNSValue(self):
- if pyexpat.version_info >= (2, 4, 5):
- context = self.assertRaisesRegex(ExpatError, 'syntax error')
- else:
- context = self.assertRaisesRegex(ValueError, 'Unsupported syntax')
+ context = self.assertRaisesRegex(ExpatError, 'syntax error')
with context:
parseString('<element xmlns:abc="http:abc.com/de f g/hi/j k"><abc:foo /></element>')

@ -0,0 +1,55 @@
"""Checks if all *.pyc files have later mtime than their *.py files."""
import os
import sys
from importlib.util import cache_from_source
from pathlib import Path
RPM_BUILD_ROOT = os.environ.get('RPM_BUILD_ROOT', '')
# ...cpython-3X.pyc
# ...cpython-3X.opt-1.pyc
# ...cpython-3X.opt-2.pyc
LEVELS = (None, 1, 2)
# list of globs of test and other files that we expect not to have bytecode
not_compiled = [
'/usr/bin/*',
'*/test/bad_coding.py',
'*/test/bad_coding2.py',
'*/test/badsyntax_*.py',
'*/lib2to3/tests/data/bom.py',
'*/lib2to3/tests/data/crlf.py',
'*/lib2to3/tests/data/different_encoding.py',
'*/lib2to3/tests/data/false_encoding.py',
'*/lib2to3/tests/data/py2_test_grammar.py',
'*.debug-gdb.py',
]
def bytecode_expected(path):
path = Path(path[len(RPM_BUILD_ROOT):])
for glob in not_compiled:
if path.match(glob):
return False
return True
failed = 0
compiled = (path for path in sys.argv[1:] if bytecode_expected(path))
for path in compiled:
to_check = (cache_from_source(path, optimization=opt) for opt in LEVELS)
f_mtime = os.path.getmtime(path)
for pyc in to_check:
c_mtime = os.path.getmtime(pyc)
if c_mtime < f_mtime:
print('Failed bytecompilation timestamps check: '
f'Bytecode file {pyc} is older than source file {path}',
file=sys.stderr)
failed += 1
if failed:
print(f'\n{failed} files failed bytecompilation timestamps check.',
file=sys.stderr)
sys.exit(1)

@ -0,0 +1,28 @@
#! /bin/bash -ex
# Download a release of Python (if missing) and remove .exe files from it
version=$1
if [ -z "${version}" ]; then
echo "Usage: $0 VERSION" >& 2
echo "" >& 2
echo "example: $0 3.6.6" >& 2
exit 1
fi
versionedname=Python-${version}
orig_archive=${versionedname}.tar.xz
new_archive=${versionedname}-noexe.tar.xz
if [ ! -e ${orig_archive} ]; then
wget -N https://www.python.org/ftp/python/${version}/${orig_archive}
fi
deleted_names=$(tar --list -Jf ${orig_archive} | grep '\.exe$')
# tar --delete does not operate on compressed archives, so do
# xz compression/decompression explicitly
xz --decompress --stdout ${orig_archive} | \
tar --delete -v ${deleted_names} | \
xz --compress --stdout -3 -T0 > ${new_archive}

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- Copyright 2017 Zbigniew Jędrzejewski-Szmek -->
<application>
<id type="desktop">idle3.desktop</id>
<name>IDLE3</name>
<metadata_licence>CC0</metadata_licence>
<project_license>Python-2.0</project_license>
<summary>Python 3 Integrated Development and Learning Environment</summary>
<description>
<p>
IDLE is Pythons Integrated Development and Learning Environment.
The GUI is uniform between Windows, Unix, and Mac OS X.
IDLE provides an easy way to start writing, running, and debugging
Python code.
</p>
<p>
IDLE is written in pure Python, and uses the tkinter GUI toolkit.
It provides:
</p>
<ul>
<li>a Python shell window (interactive interpreter) with colorizing of code input, output, and error messages,</li>
<li>a multi-window text editor with multiple undo, Python colorizing, smart indent, call tips, auto completion, and other features,</li>
<li>search within any window, replace within editor windows, and search through multiple files (grep),</li>
<li>a debugger with persistent breakpoints, stepping, and viewing of global and local namespaces.</li>
</ul>
</description>
<url type="homepage">https://docs.python.org/3/library/idle.html</url>
<screenshots>
<screenshot type="default">http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-main-window.png</screenshot>
<screenshot>http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-class-browser.png</screenshot>
<screenshot>http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-code-viewer.png</screenshot>
</screenshots>
<update_contact>zbyszek@in.waw.pl</update_contact>
</application>

@ -0,0 +1,11 @@
[Desktop Entry]
Version=1.0
Name=IDLE 3
Comment=Python 3 Integrated Development and Learning Environment
Exec=idle3 %F
TryExec=idle3
Terminal=false
Type=Application
Icon=idle3
Categories=Development;IDE;
MimeType=text/x-python;

@ -0,0 +1,6 @@
%__python3 /usr/bin/python3.8
%python3_pkgversion 38
%py3_install_wheel() %{expand:\\\
CFLAGS="%{optflags}" %{__python3} -m pip install -I dist/%{1} --root %{buildroot} --no-deps
}

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save