From 0f5f52aa262447e389d0115e364e6ce89c06fe28 Mon Sep 17 00:00:00 2001 From: MSVSphere Packaging Team Date: Thu, 31 Aug 2023 12:56:59 +0300 Subject: [PATCH 1/6] import sos-4.5.6-1.el9 --- .gitignore | 2 +- .sos.metadata | 2 +- .../sos-bz2207562-clean-obfuscate-mac.patch | 57 ------------ ...-bz2218279-clean-respect-permissions.patch | 93 ------------------- SOURCES/sos-bz2226682-ovn-ic-db-files.patch | 42 +++++++++ SPECS/sos.spec | 12 ++- 6 files changed, 51 insertions(+), 157 deletions(-) delete mode 100644 SOURCES/sos-bz2207562-clean-obfuscate-mac.patch delete mode 100644 SOURCES/sos-bz2218279-clean-respect-permissions.patch create mode 100644 SOURCES/sos-bz2226682-ovn-ic-db-files.patch diff --git a/.gitignore b/.gitignore index 070b750..85aafbe 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,2 @@ -SOURCES/sos-4.5.5.tar.gz +SOURCES/sos-4.5.6.tar.gz SOURCES/sos-audit-0.3.tgz diff --git a/.sos.metadata b/.sos.metadata index 9b6751a..e0a9368 100644 --- a/.sos.metadata +++ b/.sos.metadata @@ -1,2 +1,2 @@ -cc86572817d14115c3dc5a942c79e89fa17514c4 SOURCES/sos-4.5.5.tar.gz +8f38e24add48538585441452d85e2ea4faa04c34 SOURCES/sos-4.5.6.tar.gz 9d478b9f0085da9178af103078bbf2fd77b0175a SOURCES/sos-audit-0.3.tgz diff --git a/SOURCES/sos-bz2207562-clean-obfuscate-mac.patch b/SOURCES/sos-bz2207562-clean-obfuscate-mac.patch deleted file mode 100644 index 51a866d..0000000 --- a/SOURCES/sos-bz2207562-clean-obfuscate-mac.patch +++ /dev/null @@ -1,57 +0,0 @@ -From 59c2660584734af92eca2eae31af3fbf5557f853 Mon Sep 17 00:00:00 2001 -From: Jan Jansky -Date: Mon, 10 Jul 2023 13:10:22 +0200 -Subject: [PATCH] [clean] Properly obfuscate MAC addresses - -Some of mac addresses was not properly obfuscated because -some collected data contains mac addresses in format -01: and parser was not ready for that. - -Also added mapper which will obfuscate mac address in case -it is in format with _ instead of : as for example - -00_50_56_87_5d_01 - -instead of - -00:50:56:87:5d:01 - -Format with _ is used for example by vmware plugin. - -Resolves: #3302 - -Signed-off-by: Jan Jansky ---- - sos/cleaner/mappings/mac_map.py | 2 +- - sos/cleaner/parsers/mac_parser.py | 4 ++-- - 2 files changed, 3 insertions(+), 3 deletions(-) - -diff --git a/sos/cleaner/mappings/mac_map.py b/sos/cleaner/mappings/mac_map.py -index 334a6681..4ccba25a 100644 ---- a/sos/cleaner/mappings/mac_map.py -+++ b/sos/cleaner/mappings/mac_map.py -@@ -75,5 +75,5 @@ class SoSMacMap(SoSMap): - if re.match('(([0-9a-fA-F]{4}:){3}([0-9a-fA-F]){4})', item): - return self.mac6_quad_template % hextets - # match 48-bit IPv4 MAC addresses -- if re.match('([0-9a-fA-F]:?){12}', item): -+ if re.match('([0-9a-fA-F][:_]?){12}', item): - return self.mac_template % hextets -diff --git a/sos/cleaner/parsers/mac_parser.py b/sos/cleaner/parsers/mac_parser.py -index 88b0ac2e..4e790018 100644 ---- a/sos/cleaner/parsers/mac_parser.py -+++ b/sos/cleaner/parsers/mac_parser.py -@@ -25,8 +25,8 @@ IPV6_REG_4HEX = ( - ) - # aa:bb:cc:dd:ee:ff avoiding ipv6 substring matches - IPV4_REG = ( -- r'((? -Date: Wed, 28 Jun 2023 11:49:56 +0200 -Subject: [PATCH 1/2] [clean] Respect permissions of sanitised files - -When copying files we applied a substitution in, we must replace just -original file content (shutil.copyfile) and not also its stat data -(shutil.copy). - -Resolves: #3292 - -Signed-off-by: Pavel Moravec ---- - sos/cleaner/__init__.py | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/sos/cleaner/__init__.py b/sos/cleaner/__init__.py -index feeedf66..fbcaa9c3 100644 ---- a/sos/cleaner/__init__.py -+++ b/sos/cleaner/__init__.py -@@ -778,7 +778,7 @@ third party. - % (short_name, err), caller=arc_name) - tfile.seek(0) - if subs: -- shutil.copy(tfile.name, filename) -+ shutil.copyfile(tfile.name, filename) - tfile.close() - - _ob_short_name = self.obfuscate_string(short_name.split('/')[-1]) --- -2.31.1 - - -From fc1489a621108d3613d3337489a64950e52d77c3 Mon Sep 17 00:00:00 2001 -From: Pavel Moravec -Date: Thu, 29 Jun 2023 22:57:46 +0200 -Subject: [PATCH 2/2] [tests] add test for #3292 - -Add a test that cleaner keeps permissions of a sanitised file - -Relevant to: #3292 - -Signed-off-by: Pavel Moravec ---- - .../basic_function_tests/report_with_mask.py | 18 ++++++++++++++++++ - 1 file changed, 18 insertions(+) - -diff --git a/tests/cleaner_tests/basic_function_tests/report_with_mask.py b/tests/cleaner_tests/basic_function_tests/report_with_mask.py -index 7c4d3905..baee836a 100644 ---- a/tests/cleaner_tests/basic_function_tests/report_with_mask.py -+++ b/tests/cleaner_tests/basic_function_tests/report_with_mask.py -@@ -9,6 +9,7 @@ - from sos_tests import StageOneReportTest, StageTwoReportTest - - import re -+from os import stat - - - class ReportWithMask(StageOneReportTest): -@@ -18,6 +19,17 @@ class ReportWithMask(StageOneReportTest): - """ - - sos_cmd = '--mask -o host,networking' -+ hosts_obfuscated = None -+ -+ def pre_sos_setup(self): -+ # obfuscate a random word from /etc/hosts and ensure the updated -+ # sanitised file has same permissions (a+r) -+ try: -+ self.hosts_obfuscated = open('/etc/hosts').read().strip('#\n').split()[-1] -+ except (FileNotFoundError, IndexError) as e: -+ self.warning(f"Unable to process /etc/hosts: {e}") -+ if self.hosts_obfuscated: -+ self.sos_cmd += f' --keywords={self.hosts_obfuscated}' - - def test_mask_was_run(self): - self.assertOutputContains('Beginning obfuscation') -@@ -53,6 +65,12 @@ class ReportWithMask(StageOneReportTest): - mac = line.strip().split()[1] - assert mac.startswith('53:4f:53'), "Found unobfuscated mac addr %s" % mac - -+ def test_perms_unchanged_on_modified_file(self): -+ if self.hosts_obfuscated: -+ imode_orig = stat('/etc/hosts').st_mode -+ imode_obfuscated = stat(self.get_name_in_archive('etc/hosts')).st_mode -+ self.assertEqual(imode_orig, imode_obfuscated) -+ - - class ReportWithCleanedKeywords(StageOneReportTest): - """Testing for obfuscated keywords provided by the user --- -2.31.1 - diff --git a/SOURCES/sos-bz2226682-ovn-ic-db-files.patch b/SOURCES/sos-bz2226682-ovn-ic-db-files.patch new file mode 100644 index 0000000..82a684d --- /dev/null +++ b/SOURCES/sos-bz2226682-ovn-ic-db-files.patch @@ -0,0 +1,42 @@ +From 0af74a8b24ec9dab0ca0089d5b834ab9908173ac Mon Sep 17 00:00:00 2001 +From: Periyasamy Palanisamy +Date: Fri, 21 Jul 2023 15:03:01 +0530 +Subject: [PATCH] Collect db files for ovn interconnect environment + +This updates openshift_ovn plugin to collect ovn db files +when it is running with interconnect configuration. + +Signed-off-by: Periyasamy Palanisamy +--- + sos/report/plugins/openshift_ovn.py | 10 ++++++++++ + 1 file changed, 10 insertions(+) + +diff --git a/sos/report/plugins/openshift_ovn.py b/sos/report/plugins/openshift_ovn.py +index 98522b1ed6..d81fc97aae 100644 +--- a/sos/report/plugins/openshift_ovn.py ++++ b/sos/report/plugins/openshift_ovn.py +@@ -27,7 +27,13 @@ def setup(self): + "/var/log/openvswitch/libreswan.log", + "/var/log/openvswitch/ovs-monitor-ipsec.log" + ]) ++ # Collect ovn interconnect specific files if exists. ++ self.add_copy_spec([ ++ "/var/lib/ovn-ic/etc/ovnnb_db.db", ++ "/var/lib/ovn-ic/etc/ovnsb_db.db" ++ ]) + ++ # The ovn cluster/status is not valid anymore for interconnect setup. + self.add_cmd_output([ + 'ovn-appctl -t /var/run/ovn/ovnnb_db.ctl ' + + 'cluster/status OVN_Northbound', +@@ -38,6 +44,10 @@ def setup(self): + 'ovs-appctl -t /var/run/ovn/ovn-controller.*.ctl ' + + 'ct-zone-list'], + container='ovnkube-node') ++ # Collect ovs ct-zone-list directly on host for interconnect setup. ++ self.add_cmd_output([ ++ 'ovs-appctl -t /var/run/ovn-ic/ovn-controller.*.ctl ' + ++ 'ct-zone-list']) + self.add_cmd_output([ + 'ovs-appctl -t ovs-monitor-ipsec tunnels/show', + 'ipsec status', diff --git a/SPECS/sos.spec b/SPECS/sos.spec index cc5ebbd..b6439ea 100644 --- a/SPECS/sos.spec +++ b/SPECS/sos.spec @@ -4,8 +4,8 @@ Summary: A set of tools to gather troubleshooting information from a system Name: sos -Version: 4.5.5 -Release: 2%{?dist} +Version: 4.5.6 +Release: 1%{?dist} Group: Applications/System Source0: https://github.com/sosreport/sos/archive/%{version}/sos-%{version}.tar.gz Source1: sos-audit-%{auditversion}.tgz @@ -22,8 +22,7 @@ Recommends: python3-pexpect Recommends: python3-pyyaml Conflicts: vdsm < 4.40 Obsoletes: sos-collector <= 1.9 -Patch1: sos-bz2218279-clean-respect-permissions.patch -Patch2: sos-bz2207562-clean-obfuscate-mac.patch +Patch1: sos-bz2226682-ovn-ic-db-files.patch %description Sos is a set of tools that gathers information about system @@ -35,7 +34,6 @@ support technicians and developers. %setup -qn %{name}-%{version} %setup -T -D -a1 -q %patch1 -p1 -%patch2 -p1 %build %py3_build @@ -108,6 +106,10 @@ of the system. Currently storage and filesystem commands are audited. %changelog +* Thu Jul 27 2023 Pavel Moravec = 4.5.6-1 +- Collect db files for ovn interconnect environment + Resolves: bz2226682 + * Fri Jul 14 2023 Jan Jansky - 4.5.5-2 - Adding patch for cleaning mac addresses Resolves: bz2217943 From 88ae3337e1c329c87ced28e264dff43b9c577d22 Mon Sep 17 00:00:00 2001 From: MSVSphere Packaging Team Date: Wed, 27 Sep 2023 03:00:27 +0300 Subject: [PATCH 2/6] import sos-4.6.0-2.el9 --- .gitignore | 2 +- .sos.metadata | 2 +- SOURCES/sos-SUPDEV145-ovnkube-logs.patch | 126 ++++++++++++++++++++ SOURCES/sos-bz2226682-ovn-ic-db-files.patch | 42 ------- SPECS/sos.spec | 18 ++- 5 files changed, 143 insertions(+), 47 deletions(-) create mode 100644 SOURCES/sos-SUPDEV145-ovnkube-logs.patch delete mode 100644 SOURCES/sos-bz2226682-ovn-ic-db-files.patch diff --git a/.gitignore b/.gitignore index 85aafbe..5146444 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,2 @@ -SOURCES/sos-4.5.6.tar.gz +SOURCES/sos-4.6.0.tar.gz SOURCES/sos-audit-0.3.tgz diff --git a/.sos.metadata b/.sos.metadata index e0a9368..f9d8147 100644 --- a/.sos.metadata +++ b/.sos.metadata @@ -1,2 +1,2 @@ -8f38e24add48538585441452d85e2ea4faa04c34 SOURCES/sos-4.5.6.tar.gz +90d8b664a4e0593d60357342bb5f73af9908e29d SOURCES/sos-4.6.0.tar.gz 9d478b9f0085da9178af103078bbf2fd77b0175a SOURCES/sos-audit-0.3.tgz diff --git a/SOURCES/sos-SUPDEV145-ovnkube-logs.patch b/SOURCES/sos-SUPDEV145-ovnkube-logs.patch new file mode 100644 index 0000000..e070948 --- /dev/null +++ b/SOURCES/sos-SUPDEV145-ovnkube-logs.patch @@ -0,0 +1,126 @@ +From 43714aa5aeb3dcb0dec17dd026ca5c394cc06afd Mon Sep 17 00:00:00 2001 +From: Periyasamy Palanisamy +Date: Fri, 11 Aug 2023 14:30:42 +0200 +Subject: [PATCH] Collect additional ovnkube node logs + +With Interconnect support in latest OVN-Kubernetes, ovnkube-nodes +logs grew large. This commit adds the ability to collect those +additional logs. + +Signed-off-by: Periyasamy Palanisamy +--- + sos/report/plugins/openshift_ovn.py | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) + +diff --git a/sos/report/plugins/openshift_ovn.py b/sos/report/plugins/openshift_ovn.py +index d81fc97aa..2d804e9ae 100644 +--- a/sos/report/plugins/openshift_ovn.py ++++ b/sos/report/plugins/openshift_ovn.py +@@ -30,7 +30,8 @@ def setup(self): + # Collect ovn interconnect specific files if exists. + self.add_copy_spec([ + "/var/lib/ovn-ic/etc/ovnnb_db.db", +- "/var/lib/ovn-ic/etc/ovnsb_db.db" ++ "/var/lib/ovn-ic/etc/ovnsb_db.db", ++ "/var/lib/ovn-ic/etc/libovsdb*log*" + ]) + + # The ovn cluster/status is not valid anymore for interconnect setup. +From e11a594f942f9ae98aeb644c573293b391050657 Mon Sep 17 00:00:00 2001 +From: Periyasamy Palanisamy +Date: Tue, 15 Aug 2023 11:47:20 +0200 +Subject: [PATCH] Collect ovn logs as much as possible + +The sosreport limits to collect logs at maximum of 25 MB in a given +collection passed into add_copy_spec method. so this may lead into +logs wouldn't have fully collected when user collected sos report +without --all-logs option. +Hence this commit ensures logs and dbs collected as much as possible +when --all-logs option is not specified. + +Signed-off-by: Periyasamy Palanisamy +--- + sos/report/plugins/openshift_ovn.py | 25 +++++++++++++++++-------- + 1 file changed, 17 insertions(+), 8 deletions(-) + +diff --git a/sos/report/plugins/openshift_ovn.py b/sos/report/plugins/openshift_ovn.py +index 2d804e9ae..347b15eea 100644 +--- a/sos/report/plugins/openshift_ovn.py ++++ b/sos/report/plugins/openshift_ovn.py +@@ -20,19 +20,28 @@ class OpenshiftOVN(Plugin, RedHatPlugin): + profiles = ('openshift',) + + def setup(self): ++ all_logs = self.get_option("all_logs") ++ + self.add_copy_spec([ + "/var/lib/ovn/etc/ovnnb_db.db", + "/var/lib/ovn/etc/ovnsb_db.db", +- "/var/lib/openvswitch/etc/keys", +- "/var/log/openvswitch/libreswan.log", +- "/var/log/openvswitch/ovs-monitor-ipsec.log" +- ]) +- # Collect ovn interconnect specific files if exists. ++ "/var/lib/openvswitch/etc/keys" ++ ], sizelimit=300) ++ ++ # Collect ovn interconnect specific db files if exists. + self.add_copy_spec([ + "/var/lib/ovn-ic/etc/ovnnb_db.db", +- "/var/lib/ovn-ic/etc/ovnsb_db.db", +- "/var/lib/ovn-ic/etc/libovsdb*log*" +- ]) ++ "/var/lib/ovn-ic/etc/ovnsb_db.db" ++ ], sizelimit=300) ++ ++ # Collect libovsdb logs in case of ovn interconnect setup. ++ if not all_logs: ++ self.add_copy_spec([ ++ "/var/lib/ovn-ic/etc/libovsdb.log", ++ "/var/lib/ovn-ic/etc/libovsdb*log.gz" ++ ], sizelimit=100) ++ else: ++ self.add_copy_spec("/var/lib/ovn-ic/etc/libovsdb*log*") + + # The ovn cluster/status is not valid anymore for interconnect setup. + self.add_cmd_output([ +From 7cd6f61fd15ae7fc93d62cca927204351cdc1322 Mon Sep 17 00:00:00 2001 +From: Periyasamy Palanisamy +Date: Wed, 30 Aug 2023 09:56:40 +0200 +Subject: [PATCH] Collect logs from ovnkube-controller container + +This enables ovn sos report plugin to collect logs ovnkube-controller +container because ovn-kubernetes now provides option to run both +ovnkube-node and ovnkube-controller in same container with this +PR https://github.com/ovn-org/ovn-kubernetes/pull/3807. + +Signed-off-by: Periyasamy Palanisamy +--- + sos/report/plugins/openshift_ovn.py | 7 ++++++- + 1 file changed, 6 insertions(+), 1 deletion(-) + +diff --git a/sos/report/plugins/openshift_ovn.py b/sos/report/plugins/openshift_ovn.py +index 347b15eea..cb48057d3 100644 +--- a/sos/report/plugins/openshift_ovn.py ++++ b/sos/report/plugins/openshift_ovn.py +@@ -16,7 +16,8 @@ class OpenshiftOVN(Plugin, RedHatPlugin): + """ + short_desc = 'Openshift OVN' + plugin_name = "openshift_ovn" +- containers = ('ovnkube-master', 'ovnkube-node', 'ovn-ipsec') ++ containers = ('ovnkube-master', 'ovnkube-node', 'ovn-ipsec', ++ 'ovnkube-controller') + profiles = ('openshift',) + + def setup(self): +@@ -54,6 +55,10 @@ def setup(self): + 'ovs-appctl -t /var/run/ovn/ovn-controller.*.ctl ' + + 'ct-zone-list'], + container='ovnkube-node') ++ self.add_cmd_output([ ++ 'ovs-appctl -t /var/run/ovn/ovn-controller.*.ctl ' + ++ 'ct-zone-list'], ++ container='ovnkube-controller') + # Collect ovs ct-zone-list directly on host for interconnect setup. + self.add_cmd_output([ + 'ovs-appctl -t /var/run/ovn-ic/ovn-controller.*.ctl ' + diff --git a/SOURCES/sos-bz2226682-ovn-ic-db-files.patch b/SOURCES/sos-bz2226682-ovn-ic-db-files.patch deleted file mode 100644 index 82a684d..0000000 --- a/SOURCES/sos-bz2226682-ovn-ic-db-files.patch +++ /dev/null @@ -1,42 +0,0 @@ -From 0af74a8b24ec9dab0ca0089d5b834ab9908173ac Mon Sep 17 00:00:00 2001 -From: Periyasamy Palanisamy -Date: Fri, 21 Jul 2023 15:03:01 +0530 -Subject: [PATCH] Collect db files for ovn interconnect environment - -This updates openshift_ovn plugin to collect ovn db files -when it is running with interconnect configuration. - -Signed-off-by: Periyasamy Palanisamy ---- - sos/report/plugins/openshift_ovn.py | 10 ++++++++++ - 1 file changed, 10 insertions(+) - -diff --git a/sos/report/plugins/openshift_ovn.py b/sos/report/plugins/openshift_ovn.py -index 98522b1ed6..d81fc97aae 100644 ---- a/sos/report/plugins/openshift_ovn.py -+++ b/sos/report/plugins/openshift_ovn.py -@@ -27,7 +27,13 @@ def setup(self): - "/var/log/openvswitch/libreswan.log", - "/var/log/openvswitch/ovs-monitor-ipsec.log" - ]) -+ # Collect ovn interconnect specific files if exists. -+ self.add_copy_spec([ -+ "/var/lib/ovn-ic/etc/ovnnb_db.db", -+ "/var/lib/ovn-ic/etc/ovnsb_db.db" -+ ]) - -+ # The ovn cluster/status is not valid anymore for interconnect setup. - self.add_cmd_output([ - 'ovn-appctl -t /var/run/ovn/ovnnb_db.ctl ' + - 'cluster/status OVN_Northbound', -@@ -38,6 +44,10 @@ def setup(self): - 'ovs-appctl -t /var/run/ovn/ovn-controller.*.ctl ' + - 'ct-zone-list'], - container='ovnkube-node') -+ # Collect ovs ct-zone-list directly on host for interconnect setup. -+ self.add_cmd_output([ -+ 'ovs-appctl -t /var/run/ovn-ic/ovn-controller.*.ctl ' + -+ 'ct-zone-list']) - self.add_cmd_output([ - 'ovs-appctl -t ovs-monitor-ipsec tunnels/show', - 'ipsec status', diff --git a/SPECS/sos.spec b/SPECS/sos.spec index b6439ea..7f00775 100644 --- a/SPECS/sos.spec +++ b/SPECS/sos.spec @@ -4,8 +4,8 @@ Summary: A set of tools to gather troubleshooting information from a system Name: sos -Version: 4.5.6 -Release: 1%{?dist} +Version: 4.6.0 +Release: 2%{?dist} Group: Applications/System Source0: https://github.com/sosreport/sos/archive/%{version}/sos-%{version}.tar.gz Source1: sos-audit-%{auditversion}.tgz @@ -22,7 +22,7 @@ Recommends: python3-pexpect Recommends: python3-pyyaml Conflicts: vdsm < 4.40 Obsoletes: sos-collector <= 1.9 -Patch1: sos-bz2226682-ovn-ic-db-files.patch +Patch1: sos-SUPDEV145-ovnkube-logs.patch %description Sos is a set of tools that gathers information about system @@ -106,6 +106,18 @@ of the system. Currently storage and filesystem commands are audited. %changelog +* Fri Sep 01 2023 Pavel Moravec = 4.6.0-2 +- [openshift_ovn] Collect additional ovnkube node logs + Resolves: SUPDEV145 + +* Wed Aug 23 2023 Jan Jansky = 4.6.0-1 +- [ultrapath] Add new plugin for Huawei UltraPath + Resolves: bz2187407 +- [cleaner] Use data filter for extraction + Resolves: bz2217906 +- [discovery] Enable the plugin by containers + Resolves: bz2222134 + * Thu Jul 27 2023 Pavel Moravec = 4.5.6-1 - Collect db files for ovn interconnect environment Resolves: bz2226682 From c999bca5a3b9a81cfb281e2d3854ae93b1e62c22 Mon Sep 17 00:00:00 2001 From: MSVSphere Packaging Team Date: Fri, 3 Nov 2023 03:04:02 +0300 Subject: [PATCH 3/6] import sos-4.6.0-5.el9 --- SOURCES/sos-RHEL-13701-aap-passwords.patch | 98 ++++++++++++++++ .../sos-SUPDEV148-microshift-greenboot.patch | 108 ++++++++++++++++++ SPECS/sos.spec | 18 ++- 3 files changed, 223 insertions(+), 1 deletion(-) create mode 100644 SOURCES/sos-RHEL-13701-aap-passwords.patch create mode 100644 SOURCES/sos-SUPDEV148-microshift-greenboot.patch diff --git a/SOURCES/sos-RHEL-13701-aap-passwords.patch b/SOURCES/sos-RHEL-13701-aap-passwords.patch new file mode 100644 index 0000000..7d3caa0 --- /dev/null +++ b/SOURCES/sos-RHEL-13701-aap-passwords.patch @@ -0,0 +1,98 @@ +From c6ab24eb8e2bf02c75d0ffa8447032543eb4ea43 Mon Sep 17 00:00:00 2001 +From: "Dr. Jason Breitweg" +Date: Tue, 10 Oct 2023 09:50:29 +0200 +Subject: [PATCH] Fix dynaconf obfuscation and add AUTH_LDAP_BIND_PASSWORD + +Signed-off-by: Dr. Jason Breitweg + +Fixed style issues +Signed-off-by: Jason Breitweg jbreitwe@redhat.com + +Signed-off-by: Dr. Jason Breitweg + +Fixed yet more linting errors +Signed-off-by: Jason Breitweg jbreitwe@redhat.com + +Signed-off-by: Dr. Jason Breitweg +--- + sos/report/plugins/pulp.py | 9 ++++++--- + 1 file changed, 6 insertions(+), 3 deletions(-) + +diff --git a/sos/report/plugins/pulp.py b/sos/report/plugins/pulp.py +index df007168a..f5c762f48 100644 +--- a/sos/report/plugins/pulp.py ++++ b/sos/report/plugins/pulp.py +@@ -170,10 +170,13 @@ def postproc(self): + repl = r"\1********" + self.do_path_regex_sub("/etc/pulp(.*)(.json$)", jreg, repl) + +- # obfuscate SECRET_KEY = .. and 'PASSWORD': .. in dynaconf list output +- # and also in settings.py ++ # obfuscate SECRET_KEY = .., 'PASSWORD': .., ++ # and AUTH_LDAP_BIND_PASSWORD = .. ++ # in dynaconf list output and also in settings.py + # count with option that PASSWORD is with(out) quotes or in capitals +- key_pass_re = r"(SECRET_KEY\s*=|(password|PASSWORD)(\"|'|:)+)\s*(\S*)" ++ key_pass_re = r"((?:SECRET_KEY|AUTH_LDAP_BIND_PASSWORD)" \ ++ r"(?:\<.+\>)?(\s*=)?|(password|PASSWORD)" \ ++ r"(\"|'|:)+)\s*(\S*)" + repl = r"\1 ********" + self.do_path_regex_sub("/etc/pulp/settings.py", key_pass_re, repl) + self.do_cmd_output_sub("dynaconf list", key_pass_re, repl) +From 866abe6119e846e243d586b1e353a6585ed83899 Mon Sep 17 00:00:00 2001 +From: Pavel Moravec +Date: Wed, 18 Oct 2023 13:38:29 +0200 +Subject: [PATCH] [pulpcore] Scrub AUTH_LDAP_BIND_PASSWORD value + +Likewise in #3379, scrub the password also in pulpcore plugin. + +Resolves: #3389 + +Signed-off-by: Pavel Moravec +--- + sos/report/plugins/pulpcore.py | 27 ++++++++------------------- + 1 file changed, 8 insertions(+), 19 deletions(-) + +diff --git a/sos/report/plugins/pulpcore.py b/sos/report/plugins/pulpcore.py +index 04efae9f8..649626ada 100644 +--- a/sos/report/plugins/pulpcore.py ++++ b/sos/report/plugins/pulpcore.py +@@ -144,29 +144,18 @@ def build_query_cmd(self, query, csv=False): + return _dbcmd % (self.dbhost, self.dbport, self.dbname, quote(query)) + + def postproc(self): +- # TODO obfuscate from /etc/pulp/settings.py : ++ # obfuscate from /etc/pulp/settings.py and "dynaconf list": + # SECRET_KEY = "eKfeDkTnvss7p5WFqYdGPWxXfHnsbDBx" + # 'PASSWORD': 'tGrag2DmtLqKLTWTQ6U68f6MAhbqZVQj', ++ # AUTH_LDAP_BIND_PASSWORD = 'ouch-a-secret' + # the PASSWORD can be also in an one-liner list, so detect its value + # in non-greedy manner till first ',' or '}' +- self.do_path_regex_sub( +- "/etc/pulp/settings.py", +- r"(SECRET_KEY\s*=\s*)(.*)", +- r"\1********") +- self.do_path_regex_sub( +- "/etc/pulp/settings.py", +- r"(PASSWORD\S*\s*:\s*)(.*?)(,|\})", +- r"\1********\3") +- # apply the same for "dynaconf list" output that prints settings.py +- # in a pythonic format +- self.do_cmd_output_sub( +- "dynaconf list", +- r"(SECRET_KEY\s*)'(.*)'", +- r"\1********") +- self.do_cmd_output_sub( +- "dynaconf list", +- r"(PASSWORD\S*\s*:\s*)(.*)", +- r"\1********") ++ key_pass_re = r"((?:SECRET_KEY|AUTH_LDAP_BIND_PASSWORD)" \ ++ r"(?:\<.+\>)?(\s*=)?|(password|PASSWORD)" \ ++ r"(\"|'|:)+)\s*(\S*)" ++ repl = r"\1 ********" ++ self.do_path_regex_sub("/etc/pulp/settings.py", key_pass_re, repl) ++ self.do_cmd_output_sub("dynaconf list", key_pass_re, repl) + + + # vim: set et ts=4 sw=4 : + diff --git a/SOURCES/sos-SUPDEV148-microshift-greenboot.patch b/SOURCES/sos-SUPDEV148-microshift-greenboot.patch new file mode 100644 index 0000000..9a91ff4 --- /dev/null +++ b/SOURCES/sos-SUPDEV148-microshift-greenboot.patch @@ -0,0 +1,108 @@ +From 6526985ea2464944c5cf4cd87c2d981a77363077 Mon Sep 17 00:00:00 2001 +From: Pablo Acevedo Montserrat +Date: Tue, 12 Sep 2023 10:24:38 +0200 +Subject: [PATCH] [microshift] Add microshift-etcd.scope service + +Signed-off-by: Pablo Acevedo Montserrat +--- + sos/report/plugins/microshift.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/sos/report/plugins/microshift.py b/sos/report/plugins/microshift.py +index 1b932d648..2cfafef04 100644 +--- a/sos/report/plugins/microshift.py ++++ b/sos/report/plugins/microshift.py +@@ -28,7 +28,7 @@ class Microshift(Plugin, RedHatPlugin): + plugin_timeout = 900 + packages = ('microshift', 'microshift-selinux', 'microshift-networking', + 'microshift-greenboot') +- services = (plugin_name, 'greenboot-healthcheck', ++ services = (plugin_name, 'microshift-etcd.scope', 'greenboot-healthcheck', + 'greenboot-task-runner', 'redboot-task-runner') + profiles = (plugin_name,) + localhost_kubeconfig = '/var/lib/microshift/resources/kubeadmin/kubeconfig' +From 765ac8f3cc8e8413278afbf2579eaac7c0419f72 Mon Sep 17 00:00:00 2001 +From: Evgeny Slutsky +Date: Thu, 7 Sep 2023 10:54:12 +0300 +Subject: [PATCH] [greenboot] seperate logs to a standalone plugin. + +Signed-off-by: Evgeny Slutsky +--- + sos/report/plugins/greenboot.py | 26 ++++++++++++++++++++++++++ + sos/report/plugins/microshift.py | 6 ++---- + 2 files changed, 28 insertions(+), 4 deletions(-) + create mode 100644 sos/report/plugins/greenboot.py + +diff --git a/sos/report/plugins/greenboot.py b/sos/report/plugins/greenboot.py +new file mode 100644 +index 000000000..69b6607b0 +--- /dev/null ++++ b/sos/report/plugins/greenboot.py +@@ -0,0 +1,26 @@ ++# Copyright 2023 Red Hat, Inc. Evgeny Slutsky ++# This file is part of the sos project: https://github.com/sosreport/sos ++# ++# This copyrighted material is made available to anyone wishing to use, ++# modify, copy, or redistribute it subject to the terms and conditions of ++# version 2 of the GNU General Public License. ++# ++# See the LICENSE file in the source distribution for further information. ++ ++from sos.report.plugins import Plugin, RedHatPlugin ++ ++ ++class Greenboot(Plugin, RedHatPlugin): ++ """The greenboot plugin collects systemd service logs and configuration. ++ """ ++ ++ short_desc = 'Greenboot' ++ plugin_name = 'greenboot' ++ services = (plugin_name, 'greenboot-healthcheck', ++ 'greenboot-task-runner', 'redboot-task-runner',) ++ profiles = ('system',) ++ ++ def setup(self): ++ self.add_copy_spec([ ++ "/etc/greenboot/greenboot.conf", ++ ]) +diff --git a/sos/report/plugins/microshift.py b/sos/report/plugins/microshift.py +index 2cfafef04..669f4c021 100644 +--- a/sos/report/plugins/microshift.py ++++ b/sos/report/plugins/microshift.py +@@ -26,10 +26,8 @@ class Microshift(Plugin, RedHatPlugin): + short_desc = 'Microshift' + plugin_name = 'microshift' + plugin_timeout = 900 +- packages = ('microshift', 'microshift-selinux', 'microshift-networking', +- 'microshift-greenboot') +- services = (plugin_name, 'microshift-etcd.scope', 'greenboot-healthcheck', +- 'greenboot-task-runner', 'redboot-task-runner') ++ packages = ('microshift', 'microshift-selinux', 'microshift-networking',) ++ services = (plugin_name, 'microshift-etcd.scope',) + profiles = (plugin_name,) + localhost_kubeconfig = '/var/lib/microshift/resources/kubeadmin/kubeconfig' + +From 0b72a1f07a5f46e22cb926d129bd8eb63ba20a9a Mon Sep 17 00:00:00 2001 +From: Pablo Acevedo Montserrat +Date: Tue, 19 Sep 2023 12:18:42 +0200 +Subject: [PATCH] [microshift] Add /etc/microshift file copy spec + +Signed-off-by: Pablo Acevedo Montserrat +--- + sos/report/plugins/microshift.py | 3 +++ + 1 file changed, 3 insertions(+) + +diff --git a/sos/report/plugins/microshift.py b/sos/report/plugins/microshift.py +index 669f4c021..8fe39ab29 100644 +--- a/sos/report/plugins/microshift.py ++++ b/sos/report/plugins/microshift.py +@@ -146,6 +146,9 @@ def setup(self): + Output format for this function is based on `oc adm inspect` command, + which is used to retrieve all API resources from the cluster. + """ ++ ++ self.add_copy_spec('/etc/microshift') ++ + if self.path_exists('/var/lib/microshift-backups'): + self.add_copy_spec(['/var/lib/microshift-backups/*/version', + '/var/lib/microshift-backups/*.json']) diff --git a/SPECS/sos.spec b/SPECS/sos.spec index 7f00775..0f89963 100644 --- a/SPECS/sos.spec +++ b/SPECS/sos.spec @@ -5,7 +5,7 @@ Summary: A set of tools to gather troubleshooting information from a system Name: sos Version: 4.6.0 -Release: 2%{?dist} +Release: 5%{?dist} Group: Applications/System Source0: https://github.com/sosreport/sos/archive/%{version}/sos-%{version}.tar.gz Source1: sos-audit-%{auditversion}.tgz @@ -23,6 +23,8 @@ Recommends: python3-pyyaml Conflicts: vdsm < 4.40 Obsoletes: sos-collector <= 1.9 Patch1: sos-SUPDEV145-ovnkube-logs.patch +Patch2: sos-SUPDEV148-microshift-greenboot.patch +Patch3: sos-RHEL-13701-aap-passwords.patch %description Sos is a set of tools that gathers information about system @@ -34,6 +36,8 @@ support technicians and developers. %setup -qn %{name}-%{version} %setup -T -D -a1 -q %patch1 -p1 +%patch2 -p1 +%patch3 -p1 %build %py3_build @@ -106,6 +110,18 @@ of the system. Currently storage and filesystem commands are audited. %changelog +* Wed Oct 18 2023 Pavel Moravec = 4.6.0-5 + [pulpcore] Scrub AUTH_LDAP_BIND_PASSWORD value + Resolves: RHEL-13701 + +* Tue Oct 17 2023 Pavel Moravec = 4.6.0-4 +- [pulp] Fix dynaconf obfuscation and add AUTH_LDAP_BIND_PASSWORD + Resolves: RHEL-13701 + +* Thu Oct 12 2023 Pavel Moravec = 4.6.0-3 +- [greenboot] seperate logs to a standalone plugin; enhance [microshift] + Resolves: SUPDEV148 + * Fri Sep 01 2023 Pavel Moravec = 4.6.0-2 - [openshift_ovn] Collect additional ovnkube node logs Resolves: SUPDEV145 From 4652a139d3f2eb584705a41830df9d6c70452daf Mon Sep 17 00:00:00 2001 From: MSVSphere Packaging Team Date: Thu, 8 Feb 2024 03:30:39 +0300 Subject: [PATCH 4/6] import sos-4.6.1-1.el9 --- .gitignore | 2 +- .sos.metadata | 2 +- SOURCES/sos-RHEL-13701-aap-passwords.patch | 98 ---- SOURCES/sos-RHEL-21178-device-auth.patch | 502 ++++++++++++++++++ SOURCES/sos-SUPDEV145-ovnkube-logs.patch | 126 ----- .../sos-SUPDEV148-microshift-greenboot.patch | 108 ---- SPECS/sos.spec | 20 +- 7 files changed, 515 insertions(+), 343 deletions(-) delete mode 100644 SOURCES/sos-RHEL-13701-aap-passwords.patch create mode 100644 SOURCES/sos-RHEL-21178-device-auth.patch delete mode 100644 SOURCES/sos-SUPDEV145-ovnkube-logs.patch delete mode 100644 SOURCES/sos-SUPDEV148-microshift-greenboot.patch diff --git a/.gitignore b/.gitignore index 5146444..454c605 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,2 @@ -SOURCES/sos-4.6.0.tar.gz +SOURCES/sos-4.6.1.tar.gz SOURCES/sos-audit-0.3.tgz diff --git a/.sos.metadata b/.sos.metadata index f9d8147..ebf25d3 100644 --- a/.sos.metadata +++ b/.sos.metadata @@ -1,2 +1,2 @@ -90d8b664a4e0593d60357342bb5f73af9908e29d SOURCES/sos-4.6.0.tar.gz +b6999d34ade3b3d0b88390ab525d31c6a8dc2950 SOURCES/sos-4.6.1.tar.gz 9d478b9f0085da9178af103078bbf2fd77b0175a SOURCES/sos-audit-0.3.tgz diff --git a/SOURCES/sos-RHEL-13701-aap-passwords.patch b/SOURCES/sos-RHEL-13701-aap-passwords.patch deleted file mode 100644 index 7d3caa0..0000000 --- a/SOURCES/sos-RHEL-13701-aap-passwords.patch +++ /dev/null @@ -1,98 +0,0 @@ -From c6ab24eb8e2bf02c75d0ffa8447032543eb4ea43 Mon Sep 17 00:00:00 2001 -From: "Dr. Jason Breitweg" -Date: Tue, 10 Oct 2023 09:50:29 +0200 -Subject: [PATCH] Fix dynaconf obfuscation and add AUTH_LDAP_BIND_PASSWORD - -Signed-off-by: Dr. Jason Breitweg - -Fixed style issues -Signed-off-by: Jason Breitweg jbreitwe@redhat.com - -Signed-off-by: Dr. Jason Breitweg - -Fixed yet more linting errors -Signed-off-by: Jason Breitweg jbreitwe@redhat.com - -Signed-off-by: Dr. Jason Breitweg ---- - sos/report/plugins/pulp.py | 9 ++++++--- - 1 file changed, 6 insertions(+), 3 deletions(-) - -diff --git a/sos/report/plugins/pulp.py b/sos/report/plugins/pulp.py -index df007168a..f5c762f48 100644 ---- a/sos/report/plugins/pulp.py -+++ b/sos/report/plugins/pulp.py -@@ -170,10 +170,13 @@ def postproc(self): - repl = r"\1********" - self.do_path_regex_sub("/etc/pulp(.*)(.json$)", jreg, repl) - -- # obfuscate SECRET_KEY = .. and 'PASSWORD': .. in dynaconf list output -- # and also in settings.py -+ # obfuscate SECRET_KEY = .., 'PASSWORD': .., -+ # and AUTH_LDAP_BIND_PASSWORD = .. -+ # in dynaconf list output and also in settings.py - # count with option that PASSWORD is with(out) quotes or in capitals -- key_pass_re = r"(SECRET_KEY\s*=|(password|PASSWORD)(\"|'|:)+)\s*(\S*)" -+ key_pass_re = r"((?:SECRET_KEY|AUTH_LDAP_BIND_PASSWORD)" \ -+ r"(?:\<.+\>)?(\s*=)?|(password|PASSWORD)" \ -+ r"(\"|'|:)+)\s*(\S*)" - repl = r"\1 ********" - self.do_path_regex_sub("/etc/pulp/settings.py", key_pass_re, repl) - self.do_cmd_output_sub("dynaconf list", key_pass_re, repl) -From 866abe6119e846e243d586b1e353a6585ed83899 Mon Sep 17 00:00:00 2001 -From: Pavel Moravec -Date: Wed, 18 Oct 2023 13:38:29 +0200 -Subject: [PATCH] [pulpcore] Scrub AUTH_LDAP_BIND_PASSWORD value - -Likewise in #3379, scrub the password also in pulpcore plugin. - -Resolves: #3389 - -Signed-off-by: Pavel Moravec ---- - sos/report/plugins/pulpcore.py | 27 ++++++++------------------- - 1 file changed, 8 insertions(+), 19 deletions(-) - -diff --git a/sos/report/plugins/pulpcore.py b/sos/report/plugins/pulpcore.py -index 04efae9f8..649626ada 100644 ---- a/sos/report/plugins/pulpcore.py -+++ b/sos/report/plugins/pulpcore.py -@@ -144,29 +144,18 @@ def build_query_cmd(self, query, csv=False): - return _dbcmd % (self.dbhost, self.dbport, self.dbname, quote(query)) - - def postproc(self): -- # TODO obfuscate from /etc/pulp/settings.py : -+ # obfuscate from /etc/pulp/settings.py and "dynaconf list": - # SECRET_KEY = "eKfeDkTnvss7p5WFqYdGPWxXfHnsbDBx" - # 'PASSWORD': 'tGrag2DmtLqKLTWTQ6U68f6MAhbqZVQj', -+ # AUTH_LDAP_BIND_PASSWORD = 'ouch-a-secret' - # the PASSWORD can be also in an one-liner list, so detect its value - # in non-greedy manner till first ',' or '}' -- self.do_path_regex_sub( -- "/etc/pulp/settings.py", -- r"(SECRET_KEY\s*=\s*)(.*)", -- r"\1********") -- self.do_path_regex_sub( -- "/etc/pulp/settings.py", -- r"(PASSWORD\S*\s*:\s*)(.*?)(,|\})", -- r"\1********\3") -- # apply the same for "dynaconf list" output that prints settings.py -- # in a pythonic format -- self.do_cmd_output_sub( -- "dynaconf list", -- r"(SECRET_KEY\s*)'(.*)'", -- r"\1********") -- self.do_cmd_output_sub( -- "dynaconf list", -- r"(PASSWORD\S*\s*:\s*)(.*)", -- r"\1********") -+ key_pass_re = r"((?:SECRET_KEY|AUTH_LDAP_BIND_PASSWORD)" \ -+ r"(?:\<.+\>)?(\s*=)?|(password|PASSWORD)" \ -+ r"(\"|'|:)+)\s*(\S*)" -+ repl = r"\1 ********" -+ self.do_path_regex_sub("/etc/pulp/settings.py", key_pass_re, repl) -+ self.do_cmd_output_sub("dynaconf list", key_pass_re, repl) - - - # vim: set et ts=4 sw=4 : - diff --git a/SOURCES/sos-RHEL-21178-device-auth.patch b/SOURCES/sos-RHEL-21178-device-auth.patch new file mode 100644 index 0000000..0cc9474 --- /dev/null +++ b/SOURCES/sos-RHEL-21178-device-auth.patch @@ -0,0 +1,502 @@ +From c1a08482f9f724395102be22d94382cbda14dbce Mon Sep 17 00:00:00 2001 +From: Jose Castillo +Date: Mon, 9 Oct 2023 16:28:15 +0100 +Subject: [PATCH] [redhat] Change authentication method for RHEL + +The authentication method for RHEL uploads to the +customer portal is changing in 2024 to Device Auth +tokens, from user/password basic authorization. +To accomplish this, one new class is created: +DeviceAuth (deviceauth.py), that takes care of +managing OID token authentication. + +Closes: RH: SUPDEV-63 + +Signed-off-by: Jose Castillo +--- + sos/policies/auth/__init__.py | 210 +++++++++++++++++++++++++++++++++ + sos/policies/distros/redhat.py | 121 ++++++++++++++----- + 2 files changed, 300 insertions(+), 31 deletions(-) + create mode 100644 sos/policies/auth/__init__.py + +diff --git a/sos/policies/auth/__init__.py b/sos/policies/auth/__init__.py +new file mode 100644 +index 000000000..5b62a4953 +--- /dev/null ++++ b/sos/policies/auth/__init__.py +@@ -0,0 +1,210 @@ ++# Copyright (C) 2023 Red Hat, Inc., Jose Castillo ++ ++# This file is part of the sos project: https://github.com/sosreport/sos ++# ++# This copyrighted material is made available to anyone wishing to use, ++# modify, copy, or redistribute it subject to the terms and conditions of ++# version 2 of the GNU General Public License. ++# ++# See the LICENSE file in the source distribution for further information. ++ ++import logging ++try: ++ import requests ++ REQUESTS_LOADED = True ++except ImportError: ++ REQUESTS_LOADED = False ++import time ++from datetime import datetime, timedelta ++ ++DEVICE_AUTH_CLIENT_ID = "sos-tools" ++GRANT_TYPE_DEVICE_CODE = "urn:ietf:params:oauth:grant-type:device_code" ++ ++logger = logging.getLogger("sos") ++ ++ ++class DeviceAuthorizationClass: ++ """ ++ Device Authorization Class ++ """ ++ ++ def __init__(self, client_identifier_url, token_endpoint): ++ ++ self._access_token = None ++ self._access_expires_at = None ++ self.__device_code = None ++ ++ self.client_identifier_url = client_identifier_url ++ self.token_endpoint = token_endpoint ++ self._use_device_code_grant() ++ ++ def _use_device_code_grant(self): ++ """ ++ Start the device auth flow. In the future we will ++ store the tokens in an in-memory keyring. ++ ++ """ ++ ++ self._request_device_code() ++ print( ++ "Please visit the following URL to authenticate this" ++ f" device: {self._verification_uri_complete}" ++ ) ++ self.poll_for_auth_completion() ++ ++ def _request_device_code(self): ++ """ ++ Initialize new Device Authorization Grant attempt by ++ requesting a new device code. ++ ++ """ ++ data = "client_id={}".format(DEVICE_AUTH_CLIENT_ID) ++ headers = {'content-type': 'application/x-www-form-urlencoded'} ++ if not REQUESTS_LOADED: ++ raise Exception("python3-requests is not installed and is required" ++ " for obtaining device auth token.") ++ try: ++ res = requests.post( ++ self.client_identifier_url, ++ data=data, ++ headers=headers) ++ res.raise_for_status() ++ response = res.json() ++ self._user_code = response.get("user_code") ++ self._verification_uri = response.get("verification_uri") ++ self._interval = response.get("interval") ++ self.__device_code = response.get("device_code") ++ self._verification_uri_complete = response.get( ++ "verification_uri_complete") ++ except requests.HTTPError as e: ++ raise requests.HTTPError("HTTP request failed " ++ "while attempting to acquire the tokens." ++ f"Error returned was {res.status_code} " ++ f"{e}") ++ ++ def poll_for_auth_completion(self): ++ """ ++ Continuously poll OIDC token endpoint until the user is successfully ++ authenticated or an error occurs. ++ ++ """ ++ token_data = {'grant_type': GRANT_TYPE_DEVICE_CODE, ++ 'client_id': DEVICE_AUTH_CLIENT_ID, ++ 'device_code': self.__device_code} ++ ++ if not REQUESTS_LOADED: ++ raise Exception("python3-requests is not installed and is required" ++ " for obtaining device auth token.") ++ while self._access_token is None: ++ time.sleep(self._interval) ++ try: ++ check_auth_completion = requests.post(self.token_endpoint, ++ data=token_data) ++ ++ status_code = check_auth_completion.status_code ++ ++ if status_code == 200: ++ logger.info("The SSO authentication is successful") ++ self._set_token_data(check_auth_completion.json()) ++ if status_code not in [200, 400]: ++ raise Exception(status_code, check_auth_completion.text) ++ if status_code == 400 and \ ++ check_auth_completion.json()['error'] not in \ ++ ("authorization_pending", "slow_down"): ++ raise Exception(status_code, check_auth_completion.text) ++ except requests.exceptions.RequestException as e: ++ logger.error(f"Error was found while posting a request: {e}") ++ ++ def _set_token_data(self, token_data): ++ """ ++ Set the class attributes as per the input token_data received. ++ In the future we will persist the token data in a local, ++ in-memory keyring, to avoid visting the browser frequently. ++ :param token_data: Token data containing access_token, refresh_token ++ and their expiry etc. ++ """ ++ self._access_token = token_data.get("access_token") ++ self._access_expires_at = datetime.utcnow() + \ ++ timedelta(seconds=token_data.get("expires_in")) ++ self._refresh_token = token_data.get("refresh_token") ++ self._refresh_expires_in = token_data.get("refresh_expires_in") ++ if self._refresh_expires_in == 0: ++ self._refresh_expires_at = datetime.max ++ else: ++ self._refresh_expires_at = datetime.utcnow() + \ ++ timedelta(seconds=self._refresh_expires_in) ++ ++ def get_access_token(self): ++ """ ++ Get the valid access_token at any given time. ++ :return: Access_token ++ :rtype: string ++ """ ++ if self.is_access_token_valid(): ++ return self._access_token ++ else: ++ if self.is_refresh_token_valid(): ++ self._use_refresh_token_grant() ++ return self._access_token ++ else: ++ self._use_device_code_grant() ++ return self._access_token ++ ++ def is_access_token_valid(self): ++ """ ++ Check the validity of access_token. We are considering it invalid 180 ++ sec. prior to it's exact expiry time. ++ :return: True/False ++ ++ """ ++ return self._access_token and self._access_expires_at and \ ++ self._access_expires_at - timedelta(seconds=180) > \ ++ datetime.utcnow() ++ ++ def is_refresh_token_valid(self): ++ """ ++ Check the validity of refresh_token. We are considering it invalid ++ 180 sec. prior to it's exact expiry time. ++ ++ :return: True/False ++ ++ """ ++ return self._refresh_token and self._refresh_expires_at and \ ++ self._refresh_expires_at - timedelta(seconds=180) > \ ++ datetime.utcnow() ++ ++ def _use_refresh_token_grant(self, refresh_token=None): ++ """ ++ Fetch the new access_token and refresh_token using the existing ++ refresh_token and persist it. ++ :param refresh_token: optional param for refresh_token ++ ++ """ ++ if not REQUESTS_LOADED: ++ raise Exception("python3-requests is not installed and is required" ++ " for obtaining device auth token.") ++ refresh_token_data = {'client_id': DEVICE_AUTH_CLIENT_ID, ++ 'grant_type': 'refresh_token', ++ 'refresh_token': self._refresh_token if not ++ refresh_token else refresh_token} ++ ++ refresh_token_res = requests.post(self.token_endpoint, ++ data=refresh_token_data) ++ ++ if refresh_token_res.status_code == 200: ++ self._set_token_data(refresh_token_res.json()) ++ ++ elif refresh_token_res.status_code == 400 and 'invalid' in\ ++ refresh_token_res.json()['error']: ++ logger.warning("Problem while fetching the new tokens from refresh" ++ " token grant - {} {}." ++ " New Device code will be requested !".format ++ (refresh_token_res.status_code, ++ refresh_token_res.json()['error'])) ++ self._use_device_code_grant() ++ else: ++ raise Exception( ++ "Something went wrong while using the " ++ "Refresh token grant for fetching tokens:" ++ f" Returned status code {refresh_token_res.status_code}" ++ f" and error {refresh_token_res.json()['error']}") +diff --git a/sos/policies/distros/redhat.py b/sos/policies/distros/redhat.py +index bdbe8f952..02cc4cc2f 100644 +--- a/sos/policies/distros/redhat.py ++++ b/sos/policies/distros/redhat.py +@@ -12,6 +12,7 @@ + import os + import sys + import re ++from sos.policies.auth import DeviceAuthorizationClass + + from sos.report.plugins import RedHatPlugin + from sos.presets.redhat import (RHEL_PRESETS, ATOMIC_PRESETS, RHV, RHEL, +@@ -51,6 +52,10 @@ class RedHatPolicy(LinuxPolicy): + default_container_runtime = 'podman' + sos_pkg_name = 'sos' + sos_bin_path = '/usr/sbin' ++ client_identifier_url = "https://sso.redhat.com/auth/"\ ++ "realms/redhat-external/protocol/openid-connect/auth/device" ++ token_endpoint = "https://sso.redhat.com/auth/realms/"\ ++ "redhat-external/protocol/openid-connect/token" + + def __init__(self, sysroot=None, init=None, probe_runtime=True, + remote_exec=None): +@@ -228,6 +233,7 @@ class RHELPolicy(RedHatPolicy): + """ + disclaimer_text + "%(vendor_text)s\n") + _upload_url = RH_SFTP_HOST + _upload_method = 'post' ++ _device_token = None + + def __init__(self, sysroot=None, init=None, probe_runtime=True, + remote_exec=None): +@@ -266,24 +272,23 @@ def check(cls, remote=''): + + def prompt_for_upload_user(self): + if self.commons['cmdlineopts'].upload_user: +- return +- # Not using the default, so don't call this prompt for RHCP +- if self.commons['cmdlineopts'].upload_url: +- super(RHELPolicy, self).prompt_for_upload_user() +- return +- if not self.get_upload_user(): +- if self.case_id: +- self.upload_user = input(_( +- "Enter your Red Hat Customer Portal username for " +- "uploading [empty for anonymous SFTP]: ") +- ) +- else: # no case id provided => failover to SFTP +- self.upload_url = RH_SFTP_HOST +- self.ui_log.info("No case id provided, uploading to SFTP") +- self.upload_user = input(_( +- "Enter your Red Hat Customer Portal username for " +- "uploading to SFTP [empty for anonymous]: ") +- ) ++ self.ui_log.info( ++ _("The option --upload-user has been deprecated in favour" ++ " of device authorization in RHEL") ++ ) ++ if not self.case_id: ++ # no case id provided => failover to SFTP ++ self.upload_url = RH_SFTP_HOST ++ self.ui_log.info("No case id provided, uploading to SFTP") ++ ++ def prompt_for_upload_password(self): ++ # With OIDC we don't ask for user/pass anymore ++ if self.commons['cmdlineopts'].upload_pass: ++ self.ui_log.info( ++ _("The option --upload-pass has been deprecated in favour" ++ " of device authorization in RHEL") ++ ) ++ return + + def get_upload_url(self): + if self.upload_url: +@@ -292,10 +297,42 @@ def get_upload_url(self): + return self.commons['cmdlineopts'].upload_url + elif self.commons['cmdlineopts'].upload_protocol == 'sftp': + return RH_SFTP_HOST ++ elif not self.commons['cmdlineopts'].case_id: ++ self.ui_log.info("No case id provided, uploading to SFTP") ++ return RH_SFTP_HOST + else: + rh_case_api = "/support/v1/cases/%s/attachments" + return RH_API_HOST + rh_case_api % self.case_id + ++ def _get_upload_https_auth(self): ++ str_auth = "Bearer {}".format(self._device_token) ++ return {'Authorization': str_auth} ++ ++ def _upload_https_post(self, archive, verify=True): ++ """If upload_https() needs to use requests.post(), use this method. ++ ++ Policies should override this method instead of the base upload_https() ++ ++ :param archive: The open archive file object ++ """ ++ files = { ++ 'file': (archive.name.split('/')[-1], archive, ++ self._get_upload_headers()) ++ } ++ # Get the access token at this point. With this, ++ # we cover the cases where report generation takes ++ # longer than the token timeout ++ RHELAuth = DeviceAuthorizationClass( ++ self.client_identifier_url, ++ self.token_endpoint ++ ) ++ self._device_token = RHELAuth.get_access_token() ++ self.ui_log.info("Device authorized correctly. Uploading file to " ++ f"{self.get_upload_url_string()}") ++ return requests.post(self.get_upload_url(), files=files, ++ headers=self._get_upload_https_auth(), ++ verify=verify) ++ + def _get_upload_headers(self): + if self.get_upload_url().startswith(RH_API_HOST): + return {'isPrivate': 'false', 'cache-control': 'no-cache'} +@@ -332,15 +369,38 @@ def upload_sftp(self): + " for obtaining SFTP auth token.") + _token = None + _user = None ++ ++ # We may have a device token already if we attempted ++ # to upload via http but the upload failed. So ++ # lets check first if there isn't one. ++ if not self._device_token: ++ try: ++ RHELAuth = DeviceAuthorizationClass( ++ self.client_identifier_url, ++ self.token_endpoint ++ ) ++ except Exception as e: ++ # We end up here if the user cancels the device ++ # authentication in the web interface ++ if "end user denied" in str(e): ++ self.ui_log.info( ++ "Device token authorization " ++ "has been cancelled by the user." ++ ) ++ else: ++ self._device_token = RHELAuth.get_access_token() ++ if self._device_token: ++ self.ui_log.info("Device authorized correctly. Uploading file to" ++ f" {self.get_upload_url_string()}") ++ + url = RH_API_HOST + '/support/v2/sftp/token' +- # we have a username and password, but we need to reset the password +- # to be the token returned from the auth endpoint +- if self.get_upload_user() and self.get_upload_password(): +- auth = self.get_upload_https_auth() +- ret = requests.post(url, auth=auth, timeout=10) ++ ret = None ++ if self._device_token: ++ headers = self._get_upload_https_auth() ++ ret = requests.post(url, headers=headers, timeout=10) + if ret.status_code == 200: + # credentials are valid +- _user = self.get_upload_user() ++ _user = json.loads(ret.text)['username'] + _token = json.loads(ret.text)['token'] + else: + self.ui_log.debug( +@@ -351,8 +411,7 @@ def upload_sftp(self): + "Unable to retrieve Red Hat auth token using provided " + "credentials. Will try anonymous." + ) +- # we either do not have a username or password/token, or both +- if not _token: ++ else: + adata = {"isAnonymous": True} + anon = requests.post(url, data=json.dumps(adata), timeout=10) + if anon.status_code == 200: +@@ -368,7 +427,6 @@ def upload_sftp(self): + f"DEBUG: anonymous request failed (status: " + f"{anon.status_code}): {anon.json()}" + ) +- + if _user and _token: + return super(RHELPolicy, self).upload_sftp(user=_user, + password=_token) +@@ -380,17 +438,18 @@ def upload_archive(self, archive): + """ + try: + if self.upload_url and self.upload_url.startswith(RH_API_HOST) and\ +- (not self.get_upload_user() or not self.get_upload_password()): ++ (not self.get_upload_user() or ++ not self.get_upload_password()): + self.upload_url = RH_SFTP_HOST + uploaded = super(RHELPolicy, self).upload_archive(archive) +- except Exception: ++ except Exception as e: + uploaded = False + if not self.upload_url.startswith(RH_API_HOST): + raise + else: + self.ui_log.error( +- _(f"Upload to Red Hat Customer Portal failed. Trying " +- f"{RH_SFTP_HOST}") ++ _(f"Upload to Red Hat Customer Portal failed due to " ++ f"{e}. Trying {RH_SFTP_HOST}") + ) + self.upload_url = RH_SFTP_HOST + uploaded = super(RHELPolicy, self).upload_archive(archive) +From d338a232cd7c829ca8ca5e5febef51035d1f7da5 Mon Sep 17 00:00:00 2001 +From: Pavel Moravec +Date: Wed, 10 Jan 2024 16:47:44 +0100 +Subject: [PATCH] [build] Bump version to 4.6.1 + +Signed-off-by: Pavel Moravec +--- + docs/conf.py | 4 ++-- + sos.spec | 5 ++++- + sos/__init__.py | 2 +- + 3 files changed, 7 insertions(+), 4 deletions(-) + +diff --git a/docs/conf.py b/docs/conf.py +index 5f105373e..57d1b9297 100644 +--- a/docs/conf.py ++++ b/docs/conf.py +@@ -59,9 +59,9 @@ + # built documents. + # + # The short X.Y version. +-version = '4.6.0' ++version = '4.6.1' + # The full version, including alpha/beta/rc tags. +-release = '4.6.0' ++release = '4.6.1' + + # The language for content autogenerated by Sphinx. Refer to documentation + # for a list of supported languages. +diff --git a/sos.spec b/sos.spec +index b575b5232..a08e2857b 100644 +--- a/sos.spec ++++ b/sos.spec +@@ -1,6 +1,6 @@ + Summary: A set of tools to gather troubleshooting information from a system + Name: sos +-Version: 4.6.0 ++Version: 4.6.1 + Release: 1%{?dist} + Source0: https://github.com/sosreport/sos/archive/%{name}-%{version}.tar.gz + License: GPL-2.0-or-later +@@ -90,6 +90,9 @@ rm -rf %{buildroot}/usr/config/ + %config(noreplace) %{_sysconfdir}/sos/sos.conf + + %changelog ++* Wed Jan 10 2024 Pavel Moravec = 4.6.1 ++- New upstream release ++ + * Thu Aug 17 2023 Jake Hunsaker = 4.6.0 + - New upstream release + +diff --git a/sos/__init__.py b/sos/__init__.py +index 78e452676..18d18c4c7 100644 +--- a/sos/__init__.py ++++ b/sos/__init__.py +@@ -14,7 +14,7 @@ + This module houses the i18n setup and message function. The default is to use + gettext to internationalize messages. + """ +-__version__ = "4.6.0" ++__version__ = "4.6.1" + + import os + import sys diff --git a/SOURCES/sos-SUPDEV145-ovnkube-logs.patch b/SOURCES/sos-SUPDEV145-ovnkube-logs.patch deleted file mode 100644 index e070948..0000000 --- a/SOURCES/sos-SUPDEV145-ovnkube-logs.patch +++ /dev/null @@ -1,126 +0,0 @@ -From 43714aa5aeb3dcb0dec17dd026ca5c394cc06afd Mon Sep 17 00:00:00 2001 -From: Periyasamy Palanisamy -Date: Fri, 11 Aug 2023 14:30:42 +0200 -Subject: [PATCH] Collect additional ovnkube node logs - -With Interconnect support in latest OVN-Kubernetes, ovnkube-nodes -logs grew large. This commit adds the ability to collect those -additional logs. - -Signed-off-by: Periyasamy Palanisamy ---- - sos/report/plugins/openshift_ovn.py | 3 ++- - 1 file changed, 2 insertions(+), 1 deletion(-) - -diff --git a/sos/report/plugins/openshift_ovn.py b/sos/report/plugins/openshift_ovn.py -index d81fc97aa..2d804e9ae 100644 ---- a/sos/report/plugins/openshift_ovn.py -+++ b/sos/report/plugins/openshift_ovn.py -@@ -30,7 +30,8 @@ def setup(self): - # Collect ovn interconnect specific files if exists. - self.add_copy_spec([ - "/var/lib/ovn-ic/etc/ovnnb_db.db", -- "/var/lib/ovn-ic/etc/ovnsb_db.db" -+ "/var/lib/ovn-ic/etc/ovnsb_db.db", -+ "/var/lib/ovn-ic/etc/libovsdb*log*" - ]) - - # The ovn cluster/status is not valid anymore for interconnect setup. -From e11a594f942f9ae98aeb644c573293b391050657 Mon Sep 17 00:00:00 2001 -From: Periyasamy Palanisamy -Date: Tue, 15 Aug 2023 11:47:20 +0200 -Subject: [PATCH] Collect ovn logs as much as possible - -The sosreport limits to collect logs at maximum of 25 MB in a given -collection passed into add_copy_spec method. so this may lead into -logs wouldn't have fully collected when user collected sos report -without --all-logs option. -Hence this commit ensures logs and dbs collected as much as possible -when --all-logs option is not specified. - -Signed-off-by: Periyasamy Palanisamy ---- - sos/report/plugins/openshift_ovn.py | 25 +++++++++++++++++-------- - 1 file changed, 17 insertions(+), 8 deletions(-) - -diff --git a/sos/report/plugins/openshift_ovn.py b/sos/report/plugins/openshift_ovn.py -index 2d804e9ae..347b15eea 100644 ---- a/sos/report/plugins/openshift_ovn.py -+++ b/sos/report/plugins/openshift_ovn.py -@@ -20,19 +20,28 @@ class OpenshiftOVN(Plugin, RedHatPlugin): - profiles = ('openshift',) - - def setup(self): -+ all_logs = self.get_option("all_logs") -+ - self.add_copy_spec([ - "/var/lib/ovn/etc/ovnnb_db.db", - "/var/lib/ovn/etc/ovnsb_db.db", -- "/var/lib/openvswitch/etc/keys", -- "/var/log/openvswitch/libreswan.log", -- "/var/log/openvswitch/ovs-monitor-ipsec.log" -- ]) -- # Collect ovn interconnect specific files if exists. -+ "/var/lib/openvswitch/etc/keys" -+ ], sizelimit=300) -+ -+ # Collect ovn interconnect specific db files if exists. - self.add_copy_spec([ - "/var/lib/ovn-ic/etc/ovnnb_db.db", -- "/var/lib/ovn-ic/etc/ovnsb_db.db", -- "/var/lib/ovn-ic/etc/libovsdb*log*" -- ]) -+ "/var/lib/ovn-ic/etc/ovnsb_db.db" -+ ], sizelimit=300) -+ -+ # Collect libovsdb logs in case of ovn interconnect setup. -+ if not all_logs: -+ self.add_copy_spec([ -+ "/var/lib/ovn-ic/etc/libovsdb.log", -+ "/var/lib/ovn-ic/etc/libovsdb*log.gz" -+ ], sizelimit=100) -+ else: -+ self.add_copy_spec("/var/lib/ovn-ic/etc/libovsdb*log*") - - # The ovn cluster/status is not valid anymore for interconnect setup. - self.add_cmd_output([ -From 7cd6f61fd15ae7fc93d62cca927204351cdc1322 Mon Sep 17 00:00:00 2001 -From: Periyasamy Palanisamy -Date: Wed, 30 Aug 2023 09:56:40 +0200 -Subject: [PATCH] Collect logs from ovnkube-controller container - -This enables ovn sos report plugin to collect logs ovnkube-controller -container because ovn-kubernetes now provides option to run both -ovnkube-node and ovnkube-controller in same container with this -PR https://github.com/ovn-org/ovn-kubernetes/pull/3807. - -Signed-off-by: Periyasamy Palanisamy ---- - sos/report/plugins/openshift_ovn.py | 7 ++++++- - 1 file changed, 6 insertions(+), 1 deletion(-) - -diff --git a/sos/report/plugins/openshift_ovn.py b/sos/report/plugins/openshift_ovn.py -index 347b15eea..cb48057d3 100644 ---- a/sos/report/plugins/openshift_ovn.py -+++ b/sos/report/plugins/openshift_ovn.py -@@ -16,7 +16,8 @@ class OpenshiftOVN(Plugin, RedHatPlugin): - """ - short_desc = 'Openshift OVN' - plugin_name = "openshift_ovn" -- containers = ('ovnkube-master', 'ovnkube-node', 'ovn-ipsec') -+ containers = ('ovnkube-master', 'ovnkube-node', 'ovn-ipsec', -+ 'ovnkube-controller') - profiles = ('openshift',) - - def setup(self): -@@ -54,6 +55,10 @@ def setup(self): - 'ovs-appctl -t /var/run/ovn/ovn-controller.*.ctl ' + - 'ct-zone-list'], - container='ovnkube-node') -+ self.add_cmd_output([ -+ 'ovs-appctl -t /var/run/ovn/ovn-controller.*.ctl ' + -+ 'ct-zone-list'], -+ container='ovnkube-controller') - # Collect ovs ct-zone-list directly on host for interconnect setup. - self.add_cmd_output([ - 'ovs-appctl -t /var/run/ovn-ic/ovn-controller.*.ctl ' + diff --git a/SOURCES/sos-SUPDEV148-microshift-greenboot.patch b/SOURCES/sos-SUPDEV148-microshift-greenboot.patch deleted file mode 100644 index 9a91ff4..0000000 --- a/SOURCES/sos-SUPDEV148-microshift-greenboot.patch +++ /dev/null @@ -1,108 +0,0 @@ -From 6526985ea2464944c5cf4cd87c2d981a77363077 Mon Sep 17 00:00:00 2001 -From: Pablo Acevedo Montserrat -Date: Tue, 12 Sep 2023 10:24:38 +0200 -Subject: [PATCH] [microshift] Add microshift-etcd.scope service - -Signed-off-by: Pablo Acevedo Montserrat ---- - sos/report/plugins/microshift.py | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/sos/report/plugins/microshift.py b/sos/report/plugins/microshift.py -index 1b932d648..2cfafef04 100644 ---- a/sos/report/plugins/microshift.py -+++ b/sos/report/plugins/microshift.py -@@ -28,7 +28,7 @@ class Microshift(Plugin, RedHatPlugin): - plugin_timeout = 900 - packages = ('microshift', 'microshift-selinux', 'microshift-networking', - 'microshift-greenboot') -- services = (plugin_name, 'greenboot-healthcheck', -+ services = (plugin_name, 'microshift-etcd.scope', 'greenboot-healthcheck', - 'greenboot-task-runner', 'redboot-task-runner') - profiles = (plugin_name,) - localhost_kubeconfig = '/var/lib/microshift/resources/kubeadmin/kubeconfig' -From 765ac8f3cc8e8413278afbf2579eaac7c0419f72 Mon Sep 17 00:00:00 2001 -From: Evgeny Slutsky -Date: Thu, 7 Sep 2023 10:54:12 +0300 -Subject: [PATCH] [greenboot] seperate logs to a standalone plugin. - -Signed-off-by: Evgeny Slutsky ---- - sos/report/plugins/greenboot.py | 26 ++++++++++++++++++++++++++ - sos/report/plugins/microshift.py | 6 ++---- - 2 files changed, 28 insertions(+), 4 deletions(-) - create mode 100644 sos/report/plugins/greenboot.py - -diff --git a/sos/report/plugins/greenboot.py b/sos/report/plugins/greenboot.py -new file mode 100644 -index 000000000..69b6607b0 ---- /dev/null -+++ b/sos/report/plugins/greenboot.py -@@ -0,0 +1,26 @@ -+# Copyright 2023 Red Hat, Inc. Evgeny Slutsky -+# This file is part of the sos project: https://github.com/sosreport/sos -+# -+# This copyrighted material is made available to anyone wishing to use, -+# modify, copy, or redistribute it subject to the terms and conditions of -+# version 2 of the GNU General Public License. -+# -+# See the LICENSE file in the source distribution for further information. -+ -+from sos.report.plugins import Plugin, RedHatPlugin -+ -+ -+class Greenboot(Plugin, RedHatPlugin): -+ """The greenboot plugin collects systemd service logs and configuration. -+ """ -+ -+ short_desc = 'Greenboot' -+ plugin_name = 'greenboot' -+ services = (plugin_name, 'greenboot-healthcheck', -+ 'greenboot-task-runner', 'redboot-task-runner',) -+ profiles = ('system',) -+ -+ def setup(self): -+ self.add_copy_spec([ -+ "/etc/greenboot/greenboot.conf", -+ ]) -diff --git a/sos/report/plugins/microshift.py b/sos/report/plugins/microshift.py -index 2cfafef04..669f4c021 100644 ---- a/sos/report/plugins/microshift.py -+++ b/sos/report/plugins/microshift.py -@@ -26,10 +26,8 @@ class Microshift(Plugin, RedHatPlugin): - short_desc = 'Microshift' - plugin_name = 'microshift' - plugin_timeout = 900 -- packages = ('microshift', 'microshift-selinux', 'microshift-networking', -- 'microshift-greenboot') -- services = (plugin_name, 'microshift-etcd.scope', 'greenboot-healthcheck', -- 'greenboot-task-runner', 'redboot-task-runner') -+ packages = ('microshift', 'microshift-selinux', 'microshift-networking',) -+ services = (plugin_name, 'microshift-etcd.scope',) - profiles = (plugin_name,) - localhost_kubeconfig = '/var/lib/microshift/resources/kubeadmin/kubeconfig' - -From 0b72a1f07a5f46e22cb926d129bd8eb63ba20a9a Mon Sep 17 00:00:00 2001 -From: Pablo Acevedo Montserrat -Date: Tue, 19 Sep 2023 12:18:42 +0200 -Subject: [PATCH] [microshift] Add /etc/microshift file copy spec - -Signed-off-by: Pablo Acevedo Montserrat ---- - sos/report/plugins/microshift.py | 3 +++ - 1 file changed, 3 insertions(+) - -diff --git a/sos/report/plugins/microshift.py b/sos/report/plugins/microshift.py -index 669f4c021..8fe39ab29 100644 ---- a/sos/report/plugins/microshift.py -+++ b/sos/report/plugins/microshift.py -@@ -146,6 +146,9 @@ def setup(self): - Output format for this function is based on `oc adm inspect` command, - which is used to retrieve all API resources from the cluster. - """ -+ -+ self.add_copy_spec('/etc/microshift') -+ - if self.path_exists('/var/lib/microshift-backups'): - self.add_copy_spec(['/var/lib/microshift-backups/*/version', - '/var/lib/microshift-backups/*.json']) diff --git a/SPECS/sos.spec b/SPECS/sos.spec index 0f89963..0962491 100644 --- a/SPECS/sos.spec +++ b/SPECS/sos.spec @@ -4,8 +4,8 @@ Summary: A set of tools to gather troubleshooting information from a system Name: sos -Version: 4.6.0 -Release: 5%{?dist} +Version: 4.6.1 +Release: 1%{?dist} Group: Applications/System Source0: https://github.com/sosreport/sos/archive/%{version}/sos-%{version}.tar.gz Source1: sos-audit-%{auditversion}.tgz @@ -22,9 +22,7 @@ Recommends: python3-pexpect Recommends: python3-pyyaml Conflicts: vdsm < 4.40 Obsoletes: sos-collector <= 1.9 -Patch1: sos-SUPDEV145-ovnkube-logs.patch -Patch2: sos-SUPDEV148-microshift-greenboot.patch -Patch3: sos-RHEL-13701-aap-passwords.patch +Patch1: sos-RHEL-21178-device-auth.patch %description Sos is a set of tools that gathers information about system @@ -36,11 +34,9 @@ support technicians and developers. %setup -qn %{name}-%{version} %setup -T -D -a1 -q %patch1 -p1 -%patch2 -p1 -%patch3 -p1 %build -%py3_build +%py3_build %install %py3_install '--install-scripts=%{_sbindir}' @@ -110,6 +106,12 @@ of the system. Currently storage and filesystem commands are audited. %changelog +* Thu Jan 11 2024 Pavel Moravec = 4.6.1-1 +- rebase to upstream 4.6.1 + Resolves: RHEL-21174 +- [redhat] Change authentication method for RHEL + Resolves: RHEL-21178 + * Wed Oct 18 2023 Pavel Moravec = 4.6.0-5 [pulpcore] Scrub AUTH_LDAP_BIND_PASSWORD value Resolves: RHEL-13701 @@ -158,7 +160,7 @@ of the system. Currently storage and filesystem commands are audited. - [powerpc]: To collect lparnumascore logs Resolves: bz2177984 -* Wed Mar 15 2023 MSVSphere Packaging Team - 4.3-5 +* Wed Mar 15 2023 MSVSphere Packaging Team - = 4.5.1-3 - Rebuilt for MSVSphere 9.1. * Wed Mar 08 2023 Pavel Moravec = 4.5.1-3 From 90ed366b553b87faf54d294abaa5bd96bc121131 Mon Sep 17 00:00:00 2001 From: MSVSphere Packaging Team Date: Thu, 11 Apr 2024 03:30:27 +0300 Subject: [PATCH 5/6] import sos-4.7.0-1.el9 --- .gitignore | 2 +- .sos.metadata | 2 +- SOURCES/sos-RHEL-21178-device-auth.patch | 502 ----------------------- SPECS/sos.spec | 8 +- 4 files changed, 7 insertions(+), 507 deletions(-) delete mode 100644 SOURCES/sos-RHEL-21178-device-auth.patch diff --git a/.gitignore b/.gitignore index 454c605..8f5236b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,2 @@ -SOURCES/sos-4.6.1.tar.gz +SOURCES/sos-4.7.0.tar.gz SOURCES/sos-audit-0.3.tgz diff --git a/.sos.metadata b/.sos.metadata index ebf25d3..67aeec0 100644 --- a/.sos.metadata +++ b/.sos.metadata @@ -1,2 +1,2 @@ -b6999d34ade3b3d0b88390ab525d31c6a8dc2950 SOURCES/sos-4.6.1.tar.gz +7d1629848263be2d613983fb15cd418dccdf1c76 SOURCES/sos-4.7.0.tar.gz 9d478b9f0085da9178af103078bbf2fd77b0175a SOURCES/sos-audit-0.3.tgz diff --git a/SOURCES/sos-RHEL-21178-device-auth.patch b/SOURCES/sos-RHEL-21178-device-auth.patch deleted file mode 100644 index 0cc9474..0000000 --- a/SOURCES/sos-RHEL-21178-device-auth.patch +++ /dev/null @@ -1,502 +0,0 @@ -From c1a08482f9f724395102be22d94382cbda14dbce Mon Sep 17 00:00:00 2001 -From: Jose Castillo -Date: Mon, 9 Oct 2023 16:28:15 +0100 -Subject: [PATCH] [redhat] Change authentication method for RHEL - -The authentication method for RHEL uploads to the -customer portal is changing in 2024 to Device Auth -tokens, from user/password basic authorization. -To accomplish this, one new class is created: -DeviceAuth (deviceauth.py), that takes care of -managing OID token authentication. - -Closes: RH: SUPDEV-63 - -Signed-off-by: Jose Castillo ---- - sos/policies/auth/__init__.py | 210 +++++++++++++++++++++++++++++++++ - sos/policies/distros/redhat.py | 121 ++++++++++++++----- - 2 files changed, 300 insertions(+), 31 deletions(-) - create mode 100644 sos/policies/auth/__init__.py - -diff --git a/sos/policies/auth/__init__.py b/sos/policies/auth/__init__.py -new file mode 100644 -index 000000000..5b62a4953 ---- /dev/null -+++ b/sos/policies/auth/__init__.py -@@ -0,0 +1,210 @@ -+# Copyright (C) 2023 Red Hat, Inc., Jose Castillo -+ -+# This file is part of the sos project: https://github.com/sosreport/sos -+# -+# This copyrighted material is made available to anyone wishing to use, -+# modify, copy, or redistribute it subject to the terms and conditions of -+# version 2 of the GNU General Public License. -+# -+# See the LICENSE file in the source distribution for further information. -+ -+import logging -+try: -+ import requests -+ REQUESTS_LOADED = True -+except ImportError: -+ REQUESTS_LOADED = False -+import time -+from datetime import datetime, timedelta -+ -+DEVICE_AUTH_CLIENT_ID = "sos-tools" -+GRANT_TYPE_DEVICE_CODE = "urn:ietf:params:oauth:grant-type:device_code" -+ -+logger = logging.getLogger("sos") -+ -+ -+class DeviceAuthorizationClass: -+ """ -+ Device Authorization Class -+ """ -+ -+ def __init__(self, client_identifier_url, token_endpoint): -+ -+ self._access_token = None -+ self._access_expires_at = None -+ self.__device_code = None -+ -+ self.client_identifier_url = client_identifier_url -+ self.token_endpoint = token_endpoint -+ self._use_device_code_grant() -+ -+ def _use_device_code_grant(self): -+ """ -+ Start the device auth flow. In the future we will -+ store the tokens in an in-memory keyring. -+ -+ """ -+ -+ self._request_device_code() -+ print( -+ "Please visit the following URL to authenticate this" -+ f" device: {self._verification_uri_complete}" -+ ) -+ self.poll_for_auth_completion() -+ -+ def _request_device_code(self): -+ """ -+ Initialize new Device Authorization Grant attempt by -+ requesting a new device code. -+ -+ """ -+ data = "client_id={}".format(DEVICE_AUTH_CLIENT_ID) -+ headers = {'content-type': 'application/x-www-form-urlencoded'} -+ if not REQUESTS_LOADED: -+ raise Exception("python3-requests is not installed and is required" -+ " for obtaining device auth token.") -+ try: -+ res = requests.post( -+ self.client_identifier_url, -+ data=data, -+ headers=headers) -+ res.raise_for_status() -+ response = res.json() -+ self._user_code = response.get("user_code") -+ self._verification_uri = response.get("verification_uri") -+ self._interval = response.get("interval") -+ self.__device_code = response.get("device_code") -+ self._verification_uri_complete = response.get( -+ "verification_uri_complete") -+ except requests.HTTPError as e: -+ raise requests.HTTPError("HTTP request failed " -+ "while attempting to acquire the tokens." -+ f"Error returned was {res.status_code} " -+ f"{e}") -+ -+ def poll_for_auth_completion(self): -+ """ -+ Continuously poll OIDC token endpoint until the user is successfully -+ authenticated or an error occurs. -+ -+ """ -+ token_data = {'grant_type': GRANT_TYPE_DEVICE_CODE, -+ 'client_id': DEVICE_AUTH_CLIENT_ID, -+ 'device_code': self.__device_code} -+ -+ if not REQUESTS_LOADED: -+ raise Exception("python3-requests is not installed and is required" -+ " for obtaining device auth token.") -+ while self._access_token is None: -+ time.sleep(self._interval) -+ try: -+ check_auth_completion = requests.post(self.token_endpoint, -+ data=token_data) -+ -+ status_code = check_auth_completion.status_code -+ -+ if status_code == 200: -+ logger.info("The SSO authentication is successful") -+ self._set_token_data(check_auth_completion.json()) -+ if status_code not in [200, 400]: -+ raise Exception(status_code, check_auth_completion.text) -+ if status_code == 400 and \ -+ check_auth_completion.json()['error'] not in \ -+ ("authorization_pending", "slow_down"): -+ raise Exception(status_code, check_auth_completion.text) -+ except requests.exceptions.RequestException as e: -+ logger.error(f"Error was found while posting a request: {e}") -+ -+ def _set_token_data(self, token_data): -+ """ -+ Set the class attributes as per the input token_data received. -+ In the future we will persist the token data in a local, -+ in-memory keyring, to avoid visting the browser frequently. -+ :param token_data: Token data containing access_token, refresh_token -+ and their expiry etc. -+ """ -+ self._access_token = token_data.get("access_token") -+ self._access_expires_at = datetime.utcnow() + \ -+ timedelta(seconds=token_data.get("expires_in")) -+ self._refresh_token = token_data.get("refresh_token") -+ self._refresh_expires_in = token_data.get("refresh_expires_in") -+ if self._refresh_expires_in == 0: -+ self._refresh_expires_at = datetime.max -+ else: -+ self._refresh_expires_at = datetime.utcnow() + \ -+ timedelta(seconds=self._refresh_expires_in) -+ -+ def get_access_token(self): -+ """ -+ Get the valid access_token at any given time. -+ :return: Access_token -+ :rtype: string -+ """ -+ if self.is_access_token_valid(): -+ return self._access_token -+ else: -+ if self.is_refresh_token_valid(): -+ self._use_refresh_token_grant() -+ return self._access_token -+ else: -+ self._use_device_code_grant() -+ return self._access_token -+ -+ def is_access_token_valid(self): -+ """ -+ Check the validity of access_token. We are considering it invalid 180 -+ sec. prior to it's exact expiry time. -+ :return: True/False -+ -+ """ -+ return self._access_token and self._access_expires_at and \ -+ self._access_expires_at - timedelta(seconds=180) > \ -+ datetime.utcnow() -+ -+ def is_refresh_token_valid(self): -+ """ -+ Check the validity of refresh_token. We are considering it invalid -+ 180 sec. prior to it's exact expiry time. -+ -+ :return: True/False -+ -+ """ -+ return self._refresh_token and self._refresh_expires_at and \ -+ self._refresh_expires_at - timedelta(seconds=180) > \ -+ datetime.utcnow() -+ -+ def _use_refresh_token_grant(self, refresh_token=None): -+ """ -+ Fetch the new access_token and refresh_token using the existing -+ refresh_token and persist it. -+ :param refresh_token: optional param for refresh_token -+ -+ """ -+ if not REQUESTS_LOADED: -+ raise Exception("python3-requests is not installed and is required" -+ " for obtaining device auth token.") -+ refresh_token_data = {'client_id': DEVICE_AUTH_CLIENT_ID, -+ 'grant_type': 'refresh_token', -+ 'refresh_token': self._refresh_token if not -+ refresh_token else refresh_token} -+ -+ refresh_token_res = requests.post(self.token_endpoint, -+ data=refresh_token_data) -+ -+ if refresh_token_res.status_code == 200: -+ self._set_token_data(refresh_token_res.json()) -+ -+ elif refresh_token_res.status_code == 400 and 'invalid' in\ -+ refresh_token_res.json()['error']: -+ logger.warning("Problem while fetching the new tokens from refresh" -+ " token grant - {} {}." -+ " New Device code will be requested !".format -+ (refresh_token_res.status_code, -+ refresh_token_res.json()['error'])) -+ self._use_device_code_grant() -+ else: -+ raise Exception( -+ "Something went wrong while using the " -+ "Refresh token grant for fetching tokens:" -+ f" Returned status code {refresh_token_res.status_code}" -+ f" and error {refresh_token_res.json()['error']}") -diff --git a/sos/policies/distros/redhat.py b/sos/policies/distros/redhat.py -index bdbe8f952..02cc4cc2f 100644 ---- a/sos/policies/distros/redhat.py -+++ b/sos/policies/distros/redhat.py -@@ -12,6 +12,7 @@ - import os - import sys - import re -+from sos.policies.auth import DeviceAuthorizationClass - - from sos.report.plugins import RedHatPlugin - from sos.presets.redhat import (RHEL_PRESETS, ATOMIC_PRESETS, RHV, RHEL, -@@ -51,6 +52,10 @@ class RedHatPolicy(LinuxPolicy): - default_container_runtime = 'podman' - sos_pkg_name = 'sos' - sos_bin_path = '/usr/sbin' -+ client_identifier_url = "https://sso.redhat.com/auth/"\ -+ "realms/redhat-external/protocol/openid-connect/auth/device" -+ token_endpoint = "https://sso.redhat.com/auth/realms/"\ -+ "redhat-external/protocol/openid-connect/token" - - def __init__(self, sysroot=None, init=None, probe_runtime=True, - remote_exec=None): -@@ -228,6 +233,7 @@ class RHELPolicy(RedHatPolicy): - """ + disclaimer_text + "%(vendor_text)s\n") - _upload_url = RH_SFTP_HOST - _upload_method = 'post' -+ _device_token = None - - def __init__(self, sysroot=None, init=None, probe_runtime=True, - remote_exec=None): -@@ -266,24 +272,23 @@ def check(cls, remote=''): - - def prompt_for_upload_user(self): - if self.commons['cmdlineopts'].upload_user: -- return -- # Not using the default, so don't call this prompt for RHCP -- if self.commons['cmdlineopts'].upload_url: -- super(RHELPolicy, self).prompt_for_upload_user() -- return -- if not self.get_upload_user(): -- if self.case_id: -- self.upload_user = input(_( -- "Enter your Red Hat Customer Portal username for " -- "uploading [empty for anonymous SFTP]: ") -- ) -- else: # no case id provided => failover to SFTP -- self.upload_url = RH_SFTP_HOST -- self.ui_log.info("No case id provided, uploading to SFTP") -- self.upload_user = input(_( -- "Enter your Red Hat Customer Portal username for " -- "uploading to SFTP [empty for anonymous]: ") -- ) -+ self.ui_log.info( -+ _("The option --upload-user has been deprecated in favour" -+ " of device authorization in RHEL") -+ ) -+ if not self.case_id: -+ # no case id provided => failover to SFTP -+ self.upload_url = RH_SFTP_HOST -+ self.ui_log.info("No case id provided, uploading to SFTP") -+ -+ def prompt_for_upload_password(self): -+ # With OIDC we don't ask for user/pass anymore -+ if self.commons['cmdlineopts'].upload_pass: -+ self.ui_log.info( -+ _("The option --upload-pass has been deprecated in favour" -+ " of device authorization in RHEL") -+ ) -+ return - - def get_upload_url(self): - if self.upload_url: -@@ -292,10 +297,42 @@ def get_upload_url(self): - return self.commons['cmdlineopts'].upload_url - elif self.commons['cmdlineopts'].upload_protocol == 'sftp': - return RH_SFTP_HOST -+ elif not self.commons['cmdlineopts'].case_id: -+ self.ui_log.info("No case id provided, uploading to SFTP") -+ return RH_SFTP_HOST - else: - rh_case_api = "/support/v1/cases/%s/attachments" - return RH_API_HOST + rh_case_api % self.case_id - -+ def _get_upload_https_auth(self): -+ str_auth = "Bearer {}".format(self._device_token) -+ return {'Authorization': str_auth} -+ -+ def _upload_https_post(self, archive, verify=True): -+ """If upload_https() needs to use requests.post(), use this method. -+ -+ Policies should override this method instead of the base upload_https() -+ -+ :param archive: The open archive file object -+ """ -+ files = { -+ 'file': (archive.name.split('/')[-1], archive, -+ self._get_upload_headers()) -+ } -+ # Get the access token at this point. With this, -+ # we cover the cases where report generation takes -+ # longer than the token timeout -+ RHELAuth = DeviceAuthorizationClass( -+ self.client_identifier_url, -+ self.token_endpoint -+ ) -+ self._device_token = RHELAuth.get_access_token() -+ self.ui_log.info("Device authorized correctly. Uploading file to " -+ f"{self.get_upload_url_string()}") -+ return requests.post(self.get_upload_url(), files=files, -+ headers=self._get_upload_https_auth(), -+ verify=verify) -+ - def _get_upload_headers(self): - if self.get_upload_url().startswith(RH_API_HOST): - return {'isPrivate': 'false', 'cache-control': 'no-cache'} -@@ -332,15 +369,38 @@ def upload_sftp(self): - " for obtaining SFTP auth token.") - _token = None - _user = None -+ -+ # We may have a device token already if we attempted -+ # to upload via http but the upload failed. So -+ # lets check first if there isn't one. -+ if not self._device_token: -+ try: -+ RHELAuth = DeviceAuthorizationClass( -+ self.client_identifier_url, -+ self.token_endpoint -+ ) -+ except Exception as e: -+ # We end up here if the user cancels the device -+ # authentication in the web interface -+ if "end user denied" in str(e): -+ self.ui_log.info( -+ "Device token authorization " -+ "has been cancelled by the user." -+ ) -+ else: -+ self._device_token = RHELAuth.get_access_token() -+ if self._device_token: -+ self.ui_log.info("Device authorized correctly. Uploading file to" -+ f" {self.get_upload_url_string()}") -+ - url = RH_API_HOST + '/support/v2/sftp/token' -- # we have a username and password, but we need to reset the password -- # to be the token returned from the auth endpoint -- if self.get_upload_user() and self.get_upload_password(): -- auth = self.get_upload_https_auth() -- ret = requests.post(url, auth=auth, timeout=10) -+ ret = None -+ if self._device_token: -+ headers = self._get_upload_https_auth() -+ ret = requests.post(url, headers=headers, timeout=10) - if ret.status_code == 200: - # credentials are valid -- _user = self.get_upload_user() -+ _user = json.loads(ret.text)['username'] - _token = json.loads(ret.text)['token'] - else: - self.ui_log.debug( -@@ -351,8 +411,7 @@ def upload_sftp(self): - "Unable to retrieve Red Hat auth token using provided " - "credentials. Will try anonymous." - ) -- # we either do not have a username or password/token, or both -- if not _token: -+ else: - adata = {"isAnonymous": True} - anon = requests.post(url, data=json.dumps(adata), timeout=10) - if anon.status_code == 200: -@@ -368,7 +427,6 @@ def upload_sftp(self): - f"DEBUG: anonymous request failed (status: " - f"{anon.status_code}): {anon.json()}" - ) -- - if _user and _token: - return super(RHELPolicy, self).upload_sftp(user=_user, - password=_token) -@@ -380,17 +438,18 @@ def upload_archive(self, archive): - """ - try: - if self.upload_url and self.upload_url.startswith(RH_API_HOST) and\ -- (not self.get_upload_user() or not self.get_upload_password()): -+ (not self.get_upload_user() or -+ not self.get_upload_password()): - self.upload_url = RH_SFTP_HOST - uploaded = super(RHELPolicy, self).upload_archive(archive) -- except Exception: -+ except Exception as e: - uploaded = False - if not self.upload_url.startswith(RH_API_HOST): - raise - else: - self.ui_log.error( -- _(f"Upload to Red Hat Customer Portal failed. Trying " -- f"{RH_SFTP_HOST}") -+ _(f"Upload to Red Hat Customer Portal failed due to " -+ f"{e}. Trying {RH_SFTP_HOST}") - ) - self.upload_url = RH_SFTP_HOST - uploaded = super(RHELPolicy, self).upload_archive(archive) -From d338a232cd7c829ca8ca5e5febef51035d1f7da5 Mon Sep 17 00:00:00 2001 -From: Pavel Moravec -Date: Wed, 10 Jan 2024 16:47:44 +0100 -Subject: [PATCH] [build] Bump version to 4.6.1 - -Signed-off-by: Pavel Moravec ---- - docs/conf.py | 4 ++-- - sos.spec | 5 ++++- - sos/__init__.py | 2 +- - 3 files changed, 7 insertions(+), 4 deletions(-) - -diff --git a/docs/conf.py b/docs/conf.py -index 5f105373e..57d1b9297 100644 ---- a/docs/conf.py -+++ b/docs/conf.py -@@ -59,9 +59,9 @@ - # built documents. - # - # The short X.Y version. --version = '4.6.0' -+version = '4.6.1' - # The full version, including alpha/beta/rc tags. --release = '4.6.0' -+release = '4.6.1' - - # The language for content autogenerated by Sphinx. Refer to documentation - # for a list of supported languages. -diff --git a/sos.spec b/sos.spec -index b575b5232..a08e2857b 100644 ---- a/sos.spec -+++ b/sos.spec -@@ -1,6 +1,6 @@ - Summary: A set of tools to gather troubleshooting information from a system - Name: sos --Version: 4.6.0 -+Version: 4.6.1 - Release: 1%{?dist} - Source0: https://github.com/sosreport/sos/archive/%{name}-%{version}.tar.gz - License: GPL-2.0-or-later -@@ -90,6 +90,9 @@ rm -rf %{buildroot}/usr/config/ - %config(noreplace) %{_sysconfdir}/sos/sos.conf - - %changelog -+* Wed Jan 10 2024 Pavel Moravec = 4.6.1 -+- New upstream release -+ - * Thu Aug 17 2023 Jake Hunsaker = 4.6.0 - - New upstream release - -diff --git a/sos/__init__.py b/sos/__init__.py -index 78e452676..18d18c4c7 100644 ---- a/sos/__init__.py -+++ b/sos/__init__.py -@@ -14,7 +14,7 @@ - This module houses the i18n setup and message function. The default is to use - gettext to internationalize messages. - """ --__version__ = "4.6.0" -+__version__ = "4.6.1" - - import os - import sys diff --git a/SPECS/sos.spec b/SPECS/sos.spec index 0962491..864c1a9 100644 --- a/SPECS/sos.spec +++ b/SPECS/sos.spec @@ -4,7 +4,7 @@ Summary: A set of tools to gather troubleshooting information from a system Name: sos -Version: 4.6.1 +Version: 4.7.0 Release: 1%{?dist} Group: Applications/System Source0: https://github.com/sosreport/sos/archive/%{version}/sos-%{version}.tar.gz @@ -22,7 +22,6 @@ Recommends: python3-pexpect Recommends: python3-pyyaml Conflicts: vdsm < 4.40 Obsoletes: sos-collector <= 1.9 -Patch1: sos-RHEL-21178-device-auth.patch %description Sos is a set of tools that gathers information about system @@ -33,7 +32,6 @@ support technicians and developers. %prep %setup -qn %{name}-%{version} %setup -T -D -a1 -q -%patch1 -p1 %build %py3_build @@ -106,6 +104,10 @@ of the system. Currently storage and filesystem commands are audited. %changelog +* Tue Feb 20 2024 Jan Jansky = 4.7.0-1 +- rebase to upstream 4.7.0 + Resolves: RHEL-26115 + * Thu Jan 11 2024 Pavel Moravec = 4.6.1-1 - rebase to upstream 4.6.1 Resolves: RHEL-21174 From 0173d9a8648e4d7a7f45effdf5ad012bc9dbdcd6 Mon Sep 17 00:00:00 2001 From: MSVSphere Packaging Team Date: Mon, 24 Jun 2024 03:30:23 +0300 Subject: [PATCH 6/6] import sos-4.7.1-3.el9 --- .gitignore | 2 +- .sos.metadata | 2 +- .../sos-RHEL-35945-sos-clean-on-archive.patch | 30 +++++++++++++++++++ SPECS/sos.spec | 14 +++++++-- 4 files changed, 44 insertions(+), 4 deletions(-) create mode 100644 SOURCES/sos-RHEL-35945-sos-clean-on-archive.patch diff --git a/.gitignore b/.gitignore index 8f5236b..4aaeab5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,2 @@ -SOURCES/sos-4.7.0.tar.gz +SOURCES/sos-4.7.1.tar.gz SOURCES/sos-audit-0.3.tgz diff --git a/.sos.metadata b/.sos.metadata index 67aeec0..273af72 100644 --- a/.sos.metadata +++ b/.sos.metadata @@ -1,2 +1,2 @@ -7d1629848263be2d613983fb15cd418dccdf1c76 SOURCES/sos-4.7.0.tar.gz +9ced981872d308e13c5dc47fee21071592ceefc2 SOURCES/sos-4.7.1.tar.gz 9d478b9f0085da9178af103078bbf2fd77b0175a SOURCES/sos-audit-0.3.tgz diff --git a/SOURCES/sos-RHEL-35945-sos-clean-on-archive.patch b/SOURCES/sos-RHEL-35945-sos-clean-on-archive.patch new file mode 100644 index 0000000..ddfeaca --- /dev/null +++ b/SOURCES/sos-RHEL-35945-sos-clean-on-archive.patch @@ -0,0 +1,30 @@ +From a0c2586e230c9600d3d3f70ab89c9f6eb52ed3ed Mon Sep 17 00:00:00 2001 +From: Pavel Moravec +Date: Tue, 23 Apr 2024 11:00:11 +0200 +Subject: [PATCH] [archive] Fix get_archive_root after files reordering + +Commit d5d8c21 reordered files in the archive, such that the first +member is not the archive root directory further more. Let change the +get_archive_root method accordingly to prevent self.archive_root being +empty. + +Resolves: #3616 + +Signed-off-by: Pavel Moravec +--- + sos/cleaner/archives/__init__.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/sos/cleaner/archives/__init__.py b/sos/cleaner/archives/__init__.py +index f7c5eb587..0fa1ef43f 100644 +--- a/sos/cleaner/archives/__init__.py ++++ b/sos/cleaner/archives/__init__.py +@@ -104,7 +104,7 @@ def get_archive_root(self): + if toplevel.isdir(): + return toplevel.name + else: +- return os.sep ++ return os.path.dirname(toplevel.name) or os.sep + return os.path.abspath(self.archive_path) + + def report_msg(self, msg): diff --git a/SPECS/sos.spec b/SPECS/sos.spec index 864c1a9..2fe65b8 100644 --- a/SPECS/sos.spec +++ b/SPECS/sos.spec @@ -4,8 +4,8 @@ Summary: A set of tools to gather troubleshooting information from a system Name: sos -Version: 4.7.0 -Release: 1%{?dist} +Version: 4.7.1 +Release: 3%{?dist} Group: Applications/System Source0: https://github.com/sosreport/sos/archive/%{version}/sos-%{version}.tar.gz Source1: sos-audit-%{auditversion}.tgz @@ -22,6 +22,7 @@ Recommends: python3-pexpect Recommends: python3-pyyaml Conflicts: vdsm < 4.40 Obsoletes: sos-collector <= 1.9 +Patch1: sos-RHEL-35945-sos-clean-on-archive.patch %description Sos is a set of tools that gathers information about system @@ -32,6 +33,7 @@ support technicians and developers. %prep %setup -qn %{name}-%{version} %setup -T -D -a1 -q +%patch1 -p1 %build %py3_build @@ -104,6 +106,14 @@ of the system. Currently storage and filesystem commands are audited. %changelog +* Thu May 09 2024 Pavel Moravec = 4.7.1-3 +- [archive] Fix get_archive_root after files reordering + Resolves: RHEL-35945 + +* Mon Apr 08 2024 Jan Jansky = 4.7.1-1 +- rebase to upstream 4.7.1 + Resolves: RHEL-32106 + * Tue Feb 20 2024 Jan Jansky = 4.7.0-1 - rebase to upstream 4.7.0 Resolves: RHEL-26115