From 5af0dfb7feab1b6e5ccdb3de31935226bbc314ed45de7e3a7167c8fa5a9b45e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Thu, 16 Sep 2021 07:59:31 +0000 Subject: [PATCH] osc copypac from project:systemsmanagement:saltstack:testing package:salt revision:418 OBS-URL: https://build.opensuse.org/package/show/systemsmanagement:saltstack/salt?expand=0&rev=189 --- _lastrevision | 2 +- ...h64-to-rpm-package-architectures-405.patch | 58 ++ ...iminary-support-for-rocky.-59682-391.patch | 97 ++ ...rt-of-upstream-pr59492-to-3002.2-404.patch | 227 +++++ backport-thread.is_alive-fix-390.patch | 127 +++ ...of-bad-public-keys-from-minions-bsc-.patch | 218 +++++ ...notify-is-executable-bsc-1186674-376.patch | 23 + ...ices-states-when-running-systemd-in-.patch | 631 ++++++++++++ ...use-shell-sbin-nologin-in-requisites.patch | 39 + ...when-inotify-beacon-is-missing-pyino.patch | 30 + ...nscap-module-add-xccdf_eval-call-386.patch | 514 ++++++++++ ...-path-of-a-download-url-to-prevent-i.patch | 69 ++ ...n-interpreter-to-use-inside-containe.patch | 126 +++ ...ng-in-openscap-module-bsc-1188647-40.patch | 40 + ...-yumpkg.remove-for-not-installed-pac.patch | 64 ++ fix-failing-unit-tests-for-systemd.patch | 80 ++ ...g-errors-in-ansiblegate-state-module.patch | 44 + ...ing-minion-returns-in-batch-mode-360.patch | 30 + ...ptables-state-module-bsc-1185131-372.patch | 433 +++++++++ ...port-old-non-intel-kernels-bsc-11806.patch | 51 + ...ps-data-when-states-are-applied-by-t.patch | 109 +++ ...on-stopped-pools-in-virt.vm_info-373.patch | 152 +++ ...f-held-unheld-functions-for-state-pk.patch | 903 ++++++++++++++++++ ...dor-change-logic-to-zypper-class-355.patch | 843 ++++++++++++++++ ...t-of-version-provided-during-pkg-rem.patch | 165 ++++ ...injection-in-the-snapper-module-bsc-.patch | 43 + salt-tmpfiles.d | 6 +- salt.changes | 188 +++- salt.spec | 409 +++++--- ...he-globals-up-to-the-environment-jin.patch | 112 +++ ...date-detect-recursion-in-the-executo.patch | 52 + transactional_update.conf | 4 + ...or-when-getting-domain-capabilities-.patch | 74 ++ virt-use-dev-kvm-to-detect-kvm-383.patch | 77 ++ 34 files changed, 5884 insertions(+), 156 deletions(-) create mode 100644 add-missing-aarch64-to-rpm-package-architectures-405.patch create mode 100644 adding-preliminary-support-for-rocky.-59682-391.patch create mode 100644 backport-of-upstream-pr59492-to-3002.2-404.patch create mode 100644 backport-thread.is_alive-fix-390.patch create mode 100644 better-handling-of-bad-public-keys-from-minions-bsc-.patch create mode 100644 check-if-dpkgnotify-is-executable-bsc-1186674-376.patch create mode 100644 do-noop-for-services-states-when-running-systemd-in-.patch create mode 100644 don-t-use-shell-sbin-nologin-in-requisites.patch create mode 100644 enhance-logging-when-inotify-beacon-is-missing-pyino.patch create mode 100644 enhance-openscap-module-add-xccdf_eval-call-386.patch create mode 100644 exclude-the-full-path-of-a-download-url-to-prevent-i.patch create mode 100644 figure-out-python-interpreter-to-use-inside-containe.patch create mode 100644 fix-error-handling-in-openscap-module-bsc-1188647-40.patch create mode 100644 fix-exception-in-yumpkg.remove-for-not-installed-pac.patch create mode 100644 fix-failing-unit-tests-for-systemd.patch create mode 100644 fix-issue-parsing-errors-in-ansiblegate-state-module.patch create mode 100644 fix-missing-minion-returns-in-batch-mode-360.patch create mode 100644 fix-save-for-iptables-state-module-bsc-1185131-372.patch create mode 100644 grains.extra-support-old-non-intel-kernels-bsc-11806.patch create mode 100644 handle-master-tops-data-when-states-are-applied-by-t.patch create mode 100644 handle-volumes-on-stopped-pools-in-virt.vm_info-373.patch create mode 100644 implementation-of-held-unheld-functions-for-state-pk.patch create mode 100644 move-vendor-change-logic-to-zypper-class-355.patch create mode 100644 parsing-epoch-out-of-version-provided-during-pkg-rem.patch create mode 100644 prevent-command-injection-in-the-snapper-module-bsc-.patch create mode 100644 templates-move-the-globals-up-to-the-environment-jin.patch create mode 100644 transactional_update-detect-recursion-in-the-executo.patch create mode 100644 transactional_update.conf create mode 100644 virt-pass-emulator-when-getting-domain-capabilities-.patch create mode 100644 virt-use-dev-kvm-to-detect-kvm-383.patch diff --git a/_lastrevision b/_lastrevision index 463ecd2..3ea8210 100644 --- a/_lastrevision +++ b/_lastrevision @@ -1 +1 @@ -a94708ad2eba9aa15413d989ab3361b2c980589e \ No newline at end of file +71392e10750f7481475066788a23a39ad92d0c64 \ No newline at end of file diff --git a/add-missing-aarch64-to-rpm-package-architectures-405.patch b/add-missing-aarch64-to-rpm-package-architectures-405.patch new file mode 100644 index 0000000..62afcbc --- /dev/null +++ b/add-missing-aarch64-to-rpm-package-architectures-405.patch @@ -0,0 +1,58 @@ +From e7723f081cc79088156a986cf940349fec7f00a3 Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Wed, 18 Aug 2021 15:05:42 +0300 +Subject: [PATCH] Add missing aarch64 to rpm package architectures + (#405) + +Required to prevent false negative results on using pkg.installed +with architecture specification in package name (ex. `bash.aarch64`) +--- + salt/utils/pkg/rpm.py | 2 +- + tests/unit/modules/test_zypperpkg.py | 20 ++++++++++++++++++++ + 2 files changed, 21 insertions(+), 1 deletion(-) + +diff --git a/salt/utils/pkg/rpm.py b/salt/utils/pkg/rpm.py +index d1b149ea0b..8b8ea2e4b1 100644 +--- a/salt/utils/pkg/rpm.py ++++ b/salt/utils/pkg/rpm.py +@@ -33,7 +33,7 @@ ARCHES_ALPHA = ( + "alphaev68", + "alphaev7", + ) +-ARCHES_ARM = ("armv5tel", "armv5tejl", "armv6l", "armv7l") ++ARCHES_ARM = ("armv5tel", "armv5tejl", "armv6l", "armv7l", "aarch64") + ARCHES_SH = ("sh3", "sh4", "sh4a") + + ARCHES = ( +diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py +index 5c01bbbfbd..d6a6a6d852 100644 +--- a/tests/unit/modules/test_zypperpkg.py ++++ b/tests/unit/modules/test_zypperpkg.py +@@ -2477,3 +2477,23 @@ pattern() = package-c""" + with patch("salt.modules.zypperpkg.__zypper__", zypper_mock): + assert zypper.services_need_restart() == expected + zypper_mock(root=None).nolock.call.assert_called_with("ps", "-sss") ++ ++ def test_normalize_name(self): ++ """ ++ Test that package is normalized only when it should be ++ """ ++ with patch.dict(zypper.__grains__, {"osarch": "x86_64"}): ++ result = zypper.normalize_name("foo") ++ assert result == "foo", result ++ result = zypper.normalize_name("foo.x86_64") ++ assert result == "foo", result ++ result = zypper.normalize_name("foo.noarch") ++ assert result == "foo", result ++ ++ with patch.dict(zypper.__grains__, {"osarch": "aarch64"}): ++ result = zypper.normalize_name("foo") ++ assert result == "foo", result ++ result = zypper.normalize_name("foo.aarch64") ++ assert result == "foo", result ++ result = zypper.normalize_name("foo.noarch") ++ assert result == "foo", result +-- +2.32.0 + + diff --git a/adding-preliminary-support-for-rocky.-59682-391.patch b/adding-preliminary-support-for-rocky.-59682-391.patch new file mode 100644 index 0000000..5b2eee1 --- /dev/null +++ b/adding-preliminary-support-for-rocky.-59682-391.patch @@ -0,0 +1,97 @@ +From 34a913b0b54b55edf042dc899250e56ef0eaec77 Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Mon, 5 Jul 2021 18:57:26 +0300 +Subject: [PATCH] Adding preliminary support for Rocky. (#59682) (#391) + +* Adding preliminary support for Rocky. + +* Adding changelog and test per MR guidence. + +* Update test_core.py + +Fix a clean up issue + +Co-authored-by: Megan Wilhite +Co-authored-by: Gareth J. Greenaway + +Co-authored-by: StackKorora <42156355+StackKorora@users.noreply.github.com> +Co-authored-by: Megan Wilhite +Co-authored-by: Gareth J. Greenaway +--- + changelog/59682.added | 1 + + salt/grains/core.py | 2 ++ + tests/unit/grains/test_core.py | 29 +++++++++++++++++++++++++++++ + 3 files changed, 32 insertions(+) + create mode 100644 changelog/59682.added + +diff --git a/changelog/59682.added b/changelog/59682.added +new file mode 100644 +index 0000000000..93b4a3d1fc +--- /dev/null ++++ b/changelog/59682.added +@@ -0,0 +1 @@ ++Rocky Linux has been added to the RedHat os_family. +diff --git a/salt/grains/core.py b/salt/grains/core.py +index 2b965a2a8a..ace0e4bff9 100644 +--- a/salt/grains/core.py ++++ b/salt/grains/core.py +@@ -1547,6 +1547,7 @@ _OS_NAME_MAP = { + "slesexpand": "RES", + "linuxmint": "Mint", + "neon": "KDE neon", ++ "rocky": "Rocky", + "alibabaclo": "Alinux", + } + +@@ -1621,6 +1622,7 @@ _OS_FAMILY_MAP = { + "Funtoo": "Gentoo", + "AIX": "AIX", + "TurnKey": "Debian", ++ "Rocky": "RedHat", + "AstraLinuxCE": "Debian", + "Alinux": "RedHat", + } +diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py +index 8280d6de47..61a6956e32 100644 +--- a/tests/unit/grains/test_core.py ++++ b/tests/unit/grains/test_core.py +@@ -678,6 +678,35 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin): + } + self._run_os_grains_tests(None, _os_release_map, expectation) + ++ @skipIf(not salt.utils.platform.is_linux(), "System is not Linux") ++ def test_rocky_8_os_grains(self): ++ """ ++ Test if OS grains are parsed correctly in Rocky 8 ++ """ ++ _os_release_map = { ++ "os_release_file": { ++ "NAME": "Rocky", ++ "VERSION_ID": "8.3", ++ "PRETTY_NAME": "Rocky 8", ++ "ID": "Rocky", ++ "ANSI_COLOR": "0;31", ++ "CPE_NAME": "cpe:/o:rocky:rocky:8.3", ++ }, ++ "_linux_distribution": ("rocky", "8.3", ""), ++ } ++ ++ expectation = { ++ "os": "Rocky", ++ "os_family": "RedHat", ++ "oscodename": "Rocky 8", ++ "osfullname": "Rocky", ++ "osrelease": "8.3", ++ "osrelease_info": (8, 3,), ++ "osmajorrelease": 8, ++ "osfinger": "Rocky-8", ++ } ++ self._run_os_grains_tests(None, _os_release_map, expectation) ++ + @skipIf(not salt.utils.platform.is_linux(), "System is not Linux") + def test_almalinux_8_os_grains(self): + """ +-- +2.32.0 + + diff --git a/backport-of-upstream-pr59492-to-3002.2-404.patch b/backport-of-upstream-pr59492-to-3002.2-404.patch new file mode 100644 index 0000000..ffb1cde --- /dev/null +++ b/backport-of-upstream-pr59492-to-3002.2-404.patch @@ -0,0 +1,227 @@ +From fba6631e0a66a5f8ea76a104e9acf385ce06471c Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Wed, 18 Aug 2021 15:05:30 +0300 +Subject: [PATCH] Backport of upstream PR59492 to 3002.2 (#404) + +* Fix failing integration tests + +* Fix unless logic and failing tests + +* Revert some of the changes in the onlyif code + +Co-authored-by: twangboy +--- + salt/state.py | 24 +++++++++------ + .../files/file/base/issue-35384.sls | 7 +++++ + tests/unit/test_state.py | 30 ++++++++++++++----- + 3 files changed, 44 insertions(+), 17 deletions(-) + +diff --git a/salt/state.py b/salt/state.py +index 070a914636..64c5225728 100644 +--- a/salt/state.py ++++ b/salt/state.py +@@ -929,7 +929,8 @@ class State: + + def _run_check_onlyif(self, low_data, cmd_opts): + """ +- Check that unless doesn't return 0, and that onlyif returns a 0. ++ Make sure that all commands return True for the state to run. If any ++ command returns False (non 0), the state will not run + """ + ret = {"result": False} + +@@ -938,7 +939,9 @@ class State: + else: + low_data_onlyif = low_data["onlyif"] + ++ # If any are False the state will NOT run + def _check_cmd(cmd): ++ # Don't run condition (False) + if cmd != 0 and ret["result"] is False: + ret.update( + { +@@ -1001,7 +1004,8 @@ class State: + + def _run_check_unless(self, low_data, cmd_opts): + """ +- Check that unless doesn't return 0, and that onlyif returns a 0. ++ Check if any of the commands return False (non 0). If any are False the ++ state will run. + """ + ret = {"result": False} + +@@ -1010,8 +1014,10 @@ class State: + else: + low_data_unless = low_data["unless"] + ++ # If any are False the state will run + def _check_cmd(cmd): +- if cmd == 0 and ret["result"] is False: ++ # Don't run condition ++ if cmd == 0: + ret.update( + { + "comment": "unless condition is true", +@@ -1020,9 +1026,10 @@ class State: + } + ) + return False +- elif cmd != 0: ++ else: ++ ret.pop("skip_watch", None) + ret.update({"comment": "unless condition is false", "result": False}) +- return True ++ return True + + for entry in low_data_unless: + if isinstance(entry, str): +@@ -1034,7 +1041,7 @@ class State: + except CommandExecutionError: + # Command failed, so notify unless to skip the item + cmd = 0 +- if not _check_cmd(cmd): ++ if _check_cmd(cmd): + return ret + elif isinstance(entry, dict): + if "fun" not in entry: +@@ -1047,7 +1054,7 @@ class State: + if get_return: + result = salt.utils.data.traverse_dict_and_list(result, get_return) + if self.state_con.get("retcode", 0): +- if not _check_cmd(self.state_con["retcode"]): ++ if _check_cmd(self.state_con["retcode"]): + return ret + elif result: + ret.update( +@@ -1057,11 +1064,11 @@ class State: + "result": True, + } + ) +- return ret + else: + ret.update( + {"comment": "unless condition is false", "result": False} + ) ++ return ret + else: + ret.update( + { +@@ -1069,7 +1076,6 @@ class State: + "result": False, + } + ) +- return ret + + # No reason to stop, return ret + return ret +diff --git a/tests/integration/files/file/base/issue-35384.sls b/tests/integration/files/file/base/issue-35384.sls +index 3c41617ca8..2aa436bb37 100644 +--- a/tests/integration/files/file/base/issue-35384.sls ++++ b/tests/integration/files/file/base/issue-35384.sls +@@ -2,5 +2,12 @@ cmd_run_unless_multiple: + cmd.run: + - name: echo "hello" + - unless: ++ {% if grains["os"] == "Windows" %} ++ - "exit 0" ++ - "exit 1" ++ - "exit 0" ++ {% else %} + - "$(which true)" + - "$(which false)" ++ - "$(which true)" ++ {% endif %} +diff --git a/tests/unit/test_state.py b/tests/unit/test_state.py +index 95018a9cf3..79a261d837 100644 +--- a/tests/unit/test_state.py ++++ b/tests/unit/test_state.py +@@ -142,7 +142,7 @@ class StateCompilerTestCase(TestCase, AdaptedConfigurationTestCaseMixin): + def test_verify_onlyif_cmd_error(self): + """ + Simulates a failure in cmd.retcode from onlyif +- This could occur is runas is specified with a user that does not exist ++ This could occur if runas is specified with a user that does not exist + """ + low_data = { + "onlyif": "somecommand", +@@ -175,7 +175,7 @@ class StateCompilerTestCase(TestCase, AdaptedConfigurationTestCaseMixin): + def test_verify_unless_cmd_error(self): + """ + Simulates a failure in cmd.retcode from unless +- This could occur is runas is specified with a user that does not exist ++ This could occur if runas is specified with a user that does not exist + """ + low_data = { + "unless": "somecommand", +@@ -206,6 +206,10 @@ class StateCompilerTestCase(TestCase, AdaptedConfigurationTestCaseMixin): + self.assertEqual(expected_result, return_result) + + def test_verify_unless_list_cmd(self): ++ """ ++ If any of the unless commands return False (non 0) then the state should ++ run (no skip_watch). ++ """ + low_data = { + "state": "cmd", + "name": 'echo "something"', +@@ -217,9 +221,8 @@ class StateCompilerTestCase(TestCase, AdaptedConfigurationTestCaseMixin): + "fun": "run", + } + expected_result = { +- "comment": "unless condition is true", +- "result": True, +- "skip_watch": True, ++ "comment": "unless condition is false", ++ "result": False, + } + with patch("salt.state.State._gather_pillar") as state_patch: + minion_opts = self.get_temp_config("minion") +@@ -228,6 +231,10 @@ class StateCompilerTestCase(TestCase, AdaptedConfigurationTestCaseMixin): + self.assertEqual(expected_result, return_result) + + def test_verify_unless_list_cmd_different_order(self): ++ """ ++ If any of the unless commands return False (non 0) then the state should ++ run (no skip_watch). The order shouldn't matter. ++ """ + low_data = { + "state": "cmd", + "name": 'echo "something"', +@@ -239,9 +246,8 @@ class StateCompilerTestCase(TestCase, AdaptedConfigurationTestCaseMixin): + "fun": "run", + } + expected_result = { +- "comment": "unless condition is true", +- "result": True, +- "skip_watch": True, ++ "comment": "unless condition is false", ++ "result": False, + } + with patch("salt.state.State._gather_pillar") as state_patch: + minion_opts = self.get_temp_config("minion") +@@ -272,6 +278,10 @@ class StateCompilerTestCase(TestCase, AdaptedConfigurationTestCaseMixin): + self.assertEqual(expected_result, return_result) + + def test_verify_unless_list_cmd_valid(self): ++ """ ++ If any of the unless commands return False (non 0) then the state should ++ run (no skip_watch). This tests all commands return False. ++ """ + low_data = { + "state": "cmd", + "name": 'echo "something"', +@@ -308,6 +318,10 @@ class StateCompilerTestCase(TestCase, AdaptedConfigurationTestCaseMixin): + self.assertEqual(expected_result, return_result) + + def test_verify_unless_list_cmd_invalid(self): ++ """ ++ If any of the unless commands return False (non 0) then the state should ++ run (no skip_watch). This tests all commands return True ++ """ + low_data = { + "state": "cmd", + "name": 'echo "something"', +-- +2.32.0 + + diff --git a/backport-thread.is_alive-fix-390.patch b/backport-thread.is_alive-fix-390.patch new file mode 100644 index 0000000..a4bc135 --- /dev/null +++ b/backport-thread.is_alive-fix-390.patch @@ -0,0 +1,127 @@ +From a782af246a2f3d4b91afee2ee847c87f71e8904b Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Fri, 25 Jun 2021 13:34:38 +0200 +Subject: [PATCH] Backport Thread.is_alive fix (#390) + +* Change thread.isAlive() to thread.is_alive() + +(cherry picked from commit b1dc0cee03896c8abad55a609805b0be6c7aaefa) + +* Run pre-commit on salt/utils/timed_subprocess.py + +(cherry picked from commit 178e3b83e6c21abf5d6db454c19c104ceb8bd92c) + +* Fix the six removal made by pre-commit + +(cherry picked from commit aaa8ca3b7f129568637799d6d49d7ad3708f73bc) + +* Remove the PY2 code in salt/utils/timed_subprocess.py + +(cherry picked from commit 3a702a510b965e9af1ad318c953e19114925357e) + +Co-authored-by: Petr Messner +Co-authored-by: Petr Messner +--- + salt/utils/timed_subprocess.py | 39 ++++++++++++++-------------------- + 1 file changed, 16 insertions(+), 23 deletions(-) + +diff --git a/salt/utils/timed_subprocess.py b/salt/utils/timed_subprocess.py +index 5c4ac35ac3..b043a3bde2 100644 +--- a/salt/utils/timed_subprocess.py ++++ b/salt/utils/timed_subprocess.py +@@ -1,8 +1,6 @@ +-# -*- coding: utf-8 -*- + """ + For running command line executables with a timeout + """ +-from __future__ import absolute_import, print_function, unicode_literals + + import shlex + import subprocess +@@ -10,10 +8,9 @@ import threading + + import salt.exceptions + import salt.utils.data +-from salt.ext import six + + +-class TimedProc(object): ++class TimedProc: + """ + Create a TimedProc object, calls subprocess.Popen with passed args and **kwargs + """ +@@ -46,7 +43,7 @@ class TimedProc(object): + + if self.timeout and not isinstance(self.timeout, (int, float)): + raise salt.exceptions.TimedProcTimeoutError( +- "Error: timeout {0} must be a number".format(self.timeout) ++ "Error: timeout {} must be a number".format(self.timeout) + ) + if kwargs.get("shell", False): + args = salt.utils.data.decode(args, to_str=True) +@@ -59,28 +56,24 @@ class TimedProc(object): + try: + args = shlex.split(args) + except AttributeError: +- args = shlex.split(six.text_type(args)) ++ args = shlex.split(str(args)) + str_args = [] + for arg in args: +- if not isinstance(arg, six.string_types): +- str_args.append(six.text_type(arg)) ++ if not isinstance(arg, str): ++ str_args.append(str(arg)) + else: + str_args.append(arg) + args = str_args + else: +- if not isinstance(args, (list, tuple, six.string_types)): ++ if not isinstance(args, (list, tuple, str)): + # Handle corner case where someone does a 'cmd.run 3' +- args = six.text_type(args) ++ args = str(args) + # Ensure that environment variables are strings +- for key, val in six.iteritems(kwargs.get("env", {})): +- if not isinstance(val, six.string_types): +- kwargs["env"][key] = six.text_type(val) +- if not isinstance(key, six.string_types): +- kwargs["env"][six.text_type(key)] = kwargs["env"].pop(key) +- if six.PY2 and "env" in kwargs: +- # Ensure no unicode in custom env dict, as it can cause +- # problems with subprocess. +- kwargs["env"] = salt.utils.data.encode_dict(kwargs["env"]) ++ for key, val in kwargs.get("env", {}).items(): ++ if not isinstance(val, str): ++ kwargs["env"][key] = str(val) ++ if not isinstance(key, str): ++ kwargs["env"][str(key)] = kwargs["env"].pop(key) + args = salt.utils.data.decode(args) + self.process = subprocess.Popen(args, **kwargs) + self.command = args +@@ -103,18 +96,18 @@ class TimedProc(object): + rt = threading.Thread(target=receive) + rt.start() + rt.join(self.timeout) +- if rt.isAlive(): ++ if rt.is_alive(): + # Subprocess cleanup (best effort) + self.process.kill() + + def terminate(): +- if rt.isAlive(): ++ if rt.is_alive(): + self.process.terminate() + + threading.Timer(10, terminate).start() + raise salt.exceptions.TimedProcTimeoutError( +- "{0} : Timed out after {1} seconds".format( +- self.command, six.text_type(self.timeout), ++ "{} : Timed out after {} seconds".format( ++ self.command, str(self.timeout), + ) + ) + return self.process.returncode +-- +2.32.0 + + diff --git a/better-handling-of-bad-public-keys-from-minions-bsc-.patch b/better-handling-of-bad-public-keys-from-minions-bsc-.patch new file mode 100644 index 0000000..62f7386 --- /dev/null +++ b/better-handling-of-bad-public-keys-from-minions-bsc-.patch @@ -0,0 +1,218 @@ +From cd64b9a063771829f85d6be0e42259825cfb10c8 Mon Sep 17 00:00:00 2001 +From: "Daniel A. Wozniak" +Date: Mon, 2 Aug 2021 13:50:37 -0700 +Subject: [PATCH] Better handling of bad public keys from minions + (bsc#1189040) + +Add changelog for #57733 + +Fix pre-commit check + +Add missing test + +Fix test on older pythons +--- + changelog/57733.fixed | 1 + + salt/crypt.py | 11 ++++++-- + salt/exceptions.py | 6 ++++ + salt/key.py | 15 ++++++++-- + salt/transport/mixins/auth.py | 12 ++++---- + .../pytests/integration/cli/test_salt_key.py | 28 +++++++++++++++++++ + tests/pytests/unit/test_crypt.py | 20 +++++++++++++ + 7 files changed, 83 insertions(+), 10 deletions(-) + create mode 100644 changelog/57733.fixed + create mode 100644 tests/pytests/unit/test_crypt.py + +diff --git a/changelog/57733.fixed b/changelog/57733.fixed +new file mode 100644 +index 0000000000..0cd55b19a6 +--- /dev/null ++++ b/changelog/57733.fixed +@@ -0,0 +1 @@ ++Better handling of bad RSA public keys from minions +diff --git a/salt/crypt.py b/salt/crypt.py +index 0a8b728f50..e6e4f3181e 100644 +--- a/salt/crypt.py ++++ b/salt/crypt.py +@@ -36,6 +36,7 @@ import salt.utils.verify + import salt.version + from salt.exceptions import ( + AuthenticationError, ++ InvalidKeyError, + MasterExit, + SaltClientError, + SaltReqTimeoutError, +@@ -217,10 +218,16 @@ def get_rsa_pub_key(path): + with salt.utils.files.fopen(path, "rb") as f: + data = f.read().replace(b"RSA ", b"") + bio = BIO.MemoryBuffer(data) +- key = RSA.load_pub_key_bio(bio) ++ try: ++ key = RSA.load_pub_key_bio(bio) ++ except RSA.RSAError: ++ raise InvalidKeyError("Encountered bad RSA public key") + else: + with salt.utils.files.fopen(path) as f: +- key = RSA.importKey(f.read()) ++ try: ++ key = RSA.importKey(f.read()) ++ except (ValueError, IndexError, TypeError): ++ raise InvalidKeyError("Encountered bad RSA public key") + return key + + +diff --git a/salt/exceptions.py b/salt/exceptions.py +index 033a19cc54..1da15f9e69 100644 +--- a/salt/exceptions.py ++++ b/salt/exceptions.py +@@ -111,6 +111,12 @@ class AuthenticationError(SaltException): + """ + + ++class InvalidKeyError(SaltException): ++ """ ++ Raised when we encounter an invalid RSA key. ++ """ ++ ++ + class CommandNotFoundError(SaltException): + """ + Used in modules or grains when a required binary is not available +diff --git a/salt/key.py b/salt/key.py +index 75777ede06..59090c979c 100644 +--- a/salt/key.py ++++ b/salt/key.py +@@ -11,6 +11,7 @@ import fnmatch + import logging + import os + import shutil ++import sys + + # Import salt libs + import salt.cache +@@ -652,17 +653,27 @@ class Key(object): + keydirs.append(self.REJ) + if include_denied: + keydirs.append(self.DEN) ++ invalid_keys = [] + for keydir in keydirs: + for key in matches.get(keydir, []): ++ key_path = os.path.join(self.opts["pki_dir"], keydir, key) ++ try: ++ salt.crypt.get_rsa_pub_key(key_path) ++ except salt.exceptions.InvalidKeyError: ++ log.error("Invalid RSA public key: %s", key) ++ invalid_keys.append((keydir, key)) ++ continue + try: + shutil.move( +- os.path.join(self.opts["pki_dir"], keydir, key), +- os.path.join(self.opts["pki_dir"], self.ACC, key), ++ key_path, os.path.join(self.opts["pki_dir"], self.ACC, key), + ) + eload = {"result": True, "act": "accept", "id": key} + self.event.fire_event(eload, salt.utils.event.tagify(prefix="key")) + except (IOError, OSError): + pass ++ for keydir, key in invalid_keys: ++ matches[keydir].remove(key) ++ sys.stderr.write("Unable to accept invalid key for {}.\n".format(key)) + return self.name_match(match) if match is not None else self.dict_match(matches) + + def accept_all(self): +diff --git a/salt/transport/mixins/auth.py b/salt/transport/mixins/auth.py +index 003cbd8275..0f0c615408 100644 +--- a/salt/transport/mixins/auth.py ++++ b/salt/transport/mixins/auth.py +@@ -184,11 +184,11 @@ class AESReqServerMixin(object): + tagged "auth" and returns a dict with information about the auth + event + +- # Verify that the key we are receiving matches the stored key +- # Store the key if it is not there +- # Make an RSA key with the pub key +- # Encrypt the AES key as an encrypted salt.payload +- # Package the return and return it ++ - Verify that the key we are receiving matches the stored key ++ - Store the key if it is not there ++ - Make an RSA key with the pub key ++ - Encrypt the AES key as an encrypted salt.payload ++ - Package the return and return it + """ + + if not salt.utils.verify.valid_id(self.opts, load["id"]): +@@ -460,7 +460,7 @@ class AESReqServerMixin(object): + # and an empty request comes in + try: + pub = salt.crypt.get_rsa_pub_key(pubfn) +- except (ValueError, IndexError, TypeError) as err: ++ except salt.crypt.InvalidKeyError as err: + log.error('Corrupt public key "%s": %s', pubfn, err) + return {"enc": "clear", "load": {"ret": False}} + +diff --git a/tests/pytests/integration/cli/test_salt_key.py b/tests/pytests/integration/cli/test_salt_key.py +index 0edb2cf86c..2583348ce6 100644 +--- a/tests/pytests/integration/cli/test_salt_key.py ++++ b/tests/pytests/integration/cli/test_salt_key.py +@@ -328,3 +328,31 @@ def test_keys_generation_keysize_max(salt_key_cli): + ) + assert ret.exitcode != 0 + assert "error: The maximum value for keysize is 32768" in ret.stderr ++ ++def test_keys_generation_keysize_max(salt_key_cli, tmp_path): ++ ret = salt_key_cli.run( ++ "--gen-keys", "minibar", "--gen-keys-dir", str(tmp_path), "--keysize", "32769" ++ ) ++ assert ret.exitcode != 0 ++ assert "error: The maximum value for keysize is 32768" in ret.stderr ++ ++ ++def test_accept_bad_key(salt_master, salt_key_cli): ++ """ ++ test salt-key -d usage ++ """ ++ min_name = random_string("minibar-") ++ pki_dir = salt_master.config["pki_dir"] ++ key = os.path.join(pki_dir, "minions_pre", min_name) ++ ++ with salt.utils.files.fopen(key, "w") as fp: ++ fp.write("") ++ ++ try: ++ # Check Key ++ ret = salt_key_cli.run("-y", "-a", min_name) ++ assert ret.exitcode == 0 ++ assert "invalid key for {}".format(min_name) in ret.stderr ++ finally: ++ if os.path.exists(key): ++ os.remove(key) +diff --git a/tests/pytests/unit/test_crypt.py b/tests/pytests/unit/test_crypt.py +new file mode 100644 +index 0000000000..aa8f439b8c +--- /dev/null ++++ b/tests/pytests/unit/test_crypt.py +@@ -0,0 +1,20 @@ ++""" ++tests.pytests.unit.test_crypt ++~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ++ ++Unit tests for salt's crypt module ++""" ++import pytest ++import salt.crypt ++import salt.utils.files ++ ++ ++def test_get_rsa_pub_key_bad_key(tmp_path): ++ """ ++ get_rsa_pub_key raises InvalidKeyError when encoutering a bad key ++ """ ++ key_path = str(tmp_path / "key") ++ with salt.utils.files.fopen(key_path, "w") as fp: ++ fp.write("") ++ with pytest.raises(salt.crypt.InvalidKeyError): ++ salt.crypt.get_rsa_pub_key(key_path) +-- +2.32.0 + + diff --git a/check-if-dpkgnotify-is-executable-bsc-1186674-376.patch b/check-if-dpkgnotify-is-executable-bsc-1186674-376.patch new file mode 100644 index 0000000..3a69fe6 --- /dev/null +++ b/check-if-dpkgnotify-is-executable-bsc-1186674-376.patch @@ -0,0 +1,23 @@ +From b477b00447b49fc2f221cfb6d2c491bcd1970119 Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Tue, 1 Jun 2021 13:04:43 +0300 +Subject: [PATCH] Check if dpkgnotify is executable (bsc#1186674) + (#376) + +It prevents fails on removing salt-minion package +when the dpkg configuration is still active +--- + scripts/suse/dpkg/99dpkgnotify | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/scripts/suse/dpkg/99dpkgnotify b/scripts/suse/dpkg/99dpkgnotify +index 8013387a57..f89815f605 100644 +--- a/scripts/suse/dpkg/99dpkgnotify ++++ b/scripts/suse/dpkg/99dpkgnotify +@@ -1 +1 @@ +-DPkg::Post-Invoke {"/usr/bin/dpkgnotify";}; ++DPkg::Post-Invoke {"if [ -x /usr/bin/dpkgnotify ]; then /usr/bin/dpkgnotify; fi;";}; +-- +2.31.1 + + diff --git a/do-noop-for-services-states-when-running-systemd-in-.patch b/do-noop-for-services-states-when-running-systemd-in-.patch new file mode 100644 index 0000000..e05741b --- /dev/null +++ b/do-noop-for-services-states-when-running-systemd-in-.patch @@ -0,0 +1,631 @@ +From 6837044f5a207cf39f3064428b0ed276226a5e39 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Fri, 9 Jul 2021 09:05:55 +0100 +Subject: [PATCH] Do noop for services states when running systemd in + offline mode (bsc#1187787) + +transactional_updates: do not execute states in parallel but use a queue (bsc#1188170) + +Add changes suggested by pre-commit + +Fix unit tests for transactional_updates module + +Add unit tests to cover queue cases on transaction_update states + +Refactor offline checkers and add unit tests + +Fix regression that always consider offline mode + +Add proper mocking and skip tests when running in offline mode +--- + salt/modules/systemd_service.py | 5 + + salt/modules/transactional_update.py | 56 +++- + salt/states/service.py | 14 + + tests/integration/states/test_service.py | 4 + + .../unit/modules/test_transactional_update.py | 264 +++++++++++++++++- + tests/unit/states/test_service.py | 43 ++- + 6 files changed, 377 insertions(+), 9 deletions(-) + +diff --git a/salt/modules/systemd_service.py b/salt/modules/systemd_service.py +index 49e5bd813f..8d495433f8 100644 +--- a/salt/modules/systemd_service.py ++++ b/salt/modules/systemd_service.py +@@ -102,6 +102,11 @@ def _check_available(name): + """ + Returns boolean telling whether or not the named service is available + """ ++ if offline(): ++ raise CommandExecutionError( ++ "Cannot run in offline mode. Failed to get information on unit '%s'" % name ++ ) ++ + _status = _systemctl_status(name) + sd_version = salt.utils.systemd.version(__context__) + if sd_version is not None and sd_version >= 231: +diff --git a/salt/modules/transactional_update.py b/salt/modules/transactional_update.py +index 9cdaddb91a..3af9d91822 100644 +--- a/salt/modules/transactional_update.py ++++ b/salt/modules/transactional_update.py +@@ -281,10 +281,14 @@ import os + import sys + import tempfile + ++# required by _check_queue invocation later ++import time # pylint: disable=unused-import ++ + import salt.client.ssh.state + import salt.client.ssh.wrapper.state + import salt.exceptions + import salt.utils.args ++from salt.modules.state import _check_queue, _prior_running_states, _wait, running + + __func_alias__ = {"apply_": "apply"} + +@@ -295,7 +299,14 @@ def __virtual__(): + """ + transactional-update command is required. + """ ++ global _check_queue, _wait, _prior_running_states, running + if __utils__["path.which"]("transactional-update"): ++ _check_queue = salt.utils.functools.namespaced_function(_check_queue, globals()) ++ _wait = salt.utils.functools.namespaced_function(_wait, globals()) ++ _prior_running_states = salt.utils.functools.namespaced_function( ++ _prior_running_states, globals() ++ ) ++ running = salt.utils.functools.namespaced_function(running, globals()) + return True + else: + return (False, "Module transactional_update requires a transactional system") +@@ -1068,7 +1079,13 @@ def _create_and_execute_salt_state( + + + def sls( +- mods, saltenv="base", test=None, exclude=None, activate_transaction=False, **kwargs ++ mods, ++ saltenv="base", ++ test=None, ++ exclude=None, ++ activate_transaction=False, ++ queue=False, ++ **kwargs + ): + """Execute the states in one or more SLS files inside a transaction. + +@@ -1093,6 +1110,13 @@ def sls( + (i.e there is a new snaphot in the system), a new reboot will + be scheduled (default False) + ++ queue ++ Instead of failing immediately when another state run is in progress, ++ queue the new state run to begin running once the other has finished. ++ ++ This option starts a new thread for each queued state run, so use this ++ option sparingly. (Default: False) ++ + For a formal description of the possible parameters accepted in + this function, check `state.sls` documentation. + +@@ -1104,6 +1128,10 @@ def sls( + salt microos transactional_update.sls stuff activate_transaction=True + + """ ++ conflict = _check_queue(queue, kwargs) ++ if conflict is not None: ++ return conflict ++ + # Get a copy of the pillar data, to avoid overwriting the current + # pillar, instead the one delegated + pillar = copy.deepcopy(__pillar__) +@@ -1156,7 +1184,7 @@ def sls( + ) + + +-def highstate(activate_transaction=False, **kwargs): ++def highstate(activate_transaction=False, queue=False, **kwargs): + """Retrieve the state data from the salt master for this minion and + execute it inside a transaction. + +@@ -1168,6 +1196,13 @@ def highstate(activate_transaction=False, **kwargs): + (i.e there is a new snaphot in the system), a new reboot will + be scheduled (default False) + ++ queue ++ Instead of failing immediately when another state run is in progress, ++ queue the new state run to begin running once the other has finished. ++ ++ This option starts a new thread for each queued state run, so use this ++ option sparingly. (Default: False) ++ + CLI Example: + + .. code-block:: bash +@@ -1177,6 +1212,10 @@ def highstate(activate_transaction=False, **kwargs): + salt microos transactional_update.highstate activate_transaction=True + + """ ++ conflict = _check_queue(queue, kwargs) ++ if conflict is not None: ++ return conflict ++ + # Get a copy of the pillar data, to avoid overwriting the current + # pillar, instead the one delegated + pillar = copy.deepcopy(__pillar__) +@@ -1210,7 +1249,7 @@ def highstate(activate_transaction=False, **kwargs): + ) + + +-def single(fun, name, test=None, activate_transaction=False, **kwargs): ++def single(fun, name, test=None, activate_transaction=False, queue=False, **kwargs): + """Execute a single state function with the named kwargs, returns + False if insufficient data is sent to the command + +@@ -1224,6 +1263,13 @@ def single(fun, name, test=None, activate_transaction=False, **kwargs): + (i.e there is a new snaphot in the system), a new reboot will + be scheduled (default False) + ++ queue ++ Instead of failing immediately when another state run is in progress, ++ queue the new state run to begin running once the other has finished. ++ ++ This option starts a new thread for each queued state run, so use this ++ option sparingly. (Default: False) ++ + CLI Example: + + .. code-block:: bash +@@ -1232,6 +1278,10 @@ def single(fun, name, test=None, activate_transaction=False, **kwargs): + salt microos transactional_update.single pkg.installed name=emacs activate_transaction=True + + """ ++ conflict = _check_queue(queue, kwargs) ++ if conflict is not None: ++ return conflict ++ + # Get a copy of the pillar data, to avoid overwriting the current + # pillar, instead the one delegated + pillar = copy.deepcopy(__pillar__) +diff --git a/salt/states/service.py b/salt/states/service.py +index 4ea36a78f6..3a216920f4 100644 +--- a/salt/states/service.py ++++ b/salt/states/service.py +@@ -342,6 +342,10 @@ def _disable(name, started, result=True, **kwargs): + return ret + + ++def _offline(): ++ return "service.offline" in __salt__ and __salt__["service.offline"]() ++ ++ + def _available(name, ret): + """ + Check if the service is available +@@ -436,6 +440,11 @@ def running(name, enable=None, sig=None, init_delay=None, **kwargs): + if isinstance(enable, str): + enable = salt.utils.data.is_true(enable) + ++ if _offline(): ++ ret["result"] = True ++ ret["comment"] = "Running in OFFLINE mode. Nothing to do" ++ return ret ++ + # Check if the service is available + try: + if not _available(name, ret): +@@ -631,6 +640,11 @@ def dead(name, enable=None, sig=None, init_delay=None, **kwargs): + if isinstance(enable, str): + enable = salt.utils.data.is_true(enable) + ++ if _offline(): ++ ret["result"] = True ++ ret["comment"] = "Running in OFFLINE mode. Nothing to do" ++ return ret ++ + # Check if the service is available + try: + if not _available(name, ret): +diff --git a/tests/integration/states/test_service.py b/tests/integration/states/test_service.py +index 81359d44ea..9c89d2cfd0 100644 +--- a/tests/integration/states/test_service.py ++++ b/tests/integration/states/test_service.py +@@ -26,6 +26,7 @@ class ServiceTest(ModuleCase, SaltReturnAssertsMixin): + cmd_name = "crontab" + os_family = self.run_function("grains.get", ["os_family"]) + os_release = self.run_function("grains.get", ["osrelease"]) ++ is_systemd = self.run_function("grains.get", ["systemd"]) + self.stopped = False + self.running = True + if os_family == "RedHat": +@@ -53,6 +54,9 @@ class ServiceTest(ModuleCase, SaltReturnAssertsMixin): + if os_family != "Windows" and salt.utils.path.which(cmd_name) is None: + self.skipTest("{} is not installed".format(cmd_name)) + ++ if is_systemd and self.run_function("service.offline"): ++ self.skipTest("systemd is OFFLINE") ++ + def tearDown(self): + if self.post_srv_disable: + self.run_function("service.disable", name=self.service_name) +diff --git a/tests/unit/modules/test_transactional_update.py b/tests/unit/modules/test_transactional_update.py +index 2d30f296d7..6f8587baa0 100644 +--- a/tests/unit/modules/test_transactional_update.py ++++ b/tests/unit/modules/test_transactional_update.py +@@ -1,6 +1,7 @@ + import sys + + import pytest ++import salt.modules.state as statemod + import salt.modules.transactional_update as tu + import salt.utils.platform + from salt.exceptions import CommandExecutionError +@@ -16,7 +17,10 @@ class TransactionalUpdateTestCase(TestCase, LoaderModuleMockMixin): + """ + + def setup_loader_modules(self): +- return {tu: {"__salt__": {}, "__utils__": {}}} ++ return { ++ tu: {"__salt__": {}, "__utils__": {}}, ++ statemod: {"__salt__": {}, "__context__": {}}, ++ } + + def test__global_params_no_self_update(self): + """Test transactional_update._global_params without self_update""" +@@ -643,11 +647,103 @@ class TransactionalUpdateTestCase(TestCase, LoaderModuleMockMixin): + opts_mock = { + "hash_type": "md5", + } ++ salt_mock = { ++ "saltutil.is_running": MagicMock(return_value=[]), ++ } + get_sls_opts.return_value = opts_mock +- with patch.dict(tu.__opts__, opts_mock): ++ with patch.dict(tu.__opts__, opts_mock), patch.dict( ++ statemod.__salt__, salt_mock ++ ): + assert tu.sls("module") == "result" + _create_and_execute_salt_state.assert_called_once() + ++ @patch("salt.modules.transactional_update._create_and_execute_salt_state") ++ @patch("salt.modules.transactional_update.TransactionalUpdateHighstate") ++ @patch("salt.fileclient.get_file_client") ++ @patch("salt.utils.state.get_sls_opts") ++ def test_sls_queue_true( ++ self, ++ get_sls_opts, ++ get_file_client, ++ TransactionalUpdateHighstate, ++ _create_and_execute_salt_state, ++ ): ++ """Test transactional_update.sls""" ++ TransactionalUpdateHighstate.return_value = TransactionalUpdateHighstate ++ TransactionalUpdateHighstate.render_highstate.return_value = (None, []) ++ TransactionalUpdateHighstate.state.reconcile_extend.return_value = (None, []) ++ TransactionalUpdateHighstate.state.requisite_in.return_value = (None, []) ++ TransactionalUpdateHighstate.state.verify_high.return_value = [] ++ ++ _create_and_execute_salt_state.return_value = "result" ++ opts_mock = { ++ "hash_type": "md5", ++ } ++ salt_mock = { ++ "saltutil.is_running": MagicMock( ++ side_effect=[ ++ [ ++ { ++ "fun": "state.running", ++ "pid": "4126", ++ "jid": "20150325123407204096", ++ } ++ ], ++ [], ++ ] ++ ), ++ } ++ get_sls_opts.return_value = opts_mock ++ with patch.dict(tu.__opts__, opts_mock), patch.dict( ++ statemod.__salt__, salt_mock ++ ): ++ assert tu.sls("module", queue=True) == "result" ++ _create_and_execute_salt_state.assert_called_once() ++ ++ @patch("salt.modules.transactional_update._create_and_execute_salt_state") ++ @patch("salt.modules.transactional_update.TransactionalUpdateHighstate") ++ @patch("salt.fileclient.get_file_client") ++ @patch("salt.utils.state.get_sls_opts") ++ def test_sls_queue_false_failing( ++ self, ++ get_sls_opts, ++ get_file_client, ++ TransactionalUpdateHighstate, ++ _create_and_execute_salt_state, ++ ): ++ """Test transactional_update.sls""" ++ TransactionalUpdateHighstate.return_value = TransactionalUpdateHighstate ++ TransactionalUpdateHighstate.render_highstate.return_value = (None, []) ++ TransactionalUpdateHighstate.state.reconcile_extend.return_value = (None, []) ++ TransactionalUpdateHighstate.state.requisite_in.return_value = (None, []) ++ TransactionalUpdateHighstate.state.verify_high.return_value = [] ++ ++ _create_and_execute_salt_state.return_value = "result" ++ opts_mock = { ++ "hash_type": "md5", ++ } ++ salt_mock = { ++ "saltutil.is_running": MagicMock( ++ side_effect=[ ++ [ ++ { ++ "fun": "state.running", ++ "pid": "4126", ++ "jid": "20150325123407204096", ++ } ++ ], ++ [], ++ ] ++ ), ++ } ++ get_sls_opts.return_value = opts_mock ++ with patch.dict(tu.__opts__, opts_mock), patch.dict( ++ statemod.__salt__, salt_mock ++ ): ++ assert tu.sls("module", queue=False) == [ ++ 'The function "state.running" is running as PID 4126 and was started at 2015, Mar 25 12:34:07.204096 with jid 20150325123407204096' ++ ] ++ + @patch("salt.modules.transactional_update._create_and_execute_salt_state") + @patch("salt.modules.transactional_update.TransactionalUpdateHighstate") + @patch("salt.fileclient.get_file_client") +@@ -666,11 +762,95 @@ class TransactionalUpdateTestCase(TestCase, LoaderModuleMockMixin): + opts_mock = { + "hash_type": "md5", + } ++ salt_mock = { ++ "saltutil.is_running": MagicMock(return_value=[]), ++ } + get_sls_opts.return_value = opts_mock +- with patch.dict(tu.__opts__, opts_mock): ++ with patch.dict(tu.__opts__, opts_mock), patch.dict( ++ statemod.__salt__, salt_mock ++ ): + assert tu.highstate() == "result" + _create_and_execute_salt_state.assert_called_once() + ++ @patch("salt.modules.transactional_update._create_and_execute_salt_state") ++ @patch("salt.modules.transactional_update.TransactionalUpdateHighstate") ++ @patch("salt.fileclient.get_file_client") ++ @patch("salt.utils.state.get_sls_opts") ++ def test_highstate_queue_true( ++ self, ++ get_sls_opts, ++ get_file_client, ++ TransactionalUpdateHighstate, ++ _create_and_execute_salt_state, ++ ): ++ """Test transactional_update.highstage""" ++ TransactionalUpdateHighstate.return_value = TransactionalUpdateHighstate ++ ++ _create_and_execute_salt_state.return_value = "result" ++ opts_mock = { ++ "hash_type": "md5", ++ } ++ salt_mock = { ++ "saltutil.is_running": MagicMock( ++ side_effect=[ ++ [ ++ { ++ "fun": "state.running", ++ "pid": "4126", ++ "jid": "20150325123407204096", ++ } ++ ], ++ [], ++ ] ++ ), ++ } ++ get_sls_opts.return_value = opts_mock ++ with patch.dict(tu.__opts__, opts_mock), patch.dict( ++ statemod.__salt__, salt_mock ++ ): ++ assert tu.highstate(queue=True) == "result" ++ _create_and_execute_salt_state.assert_called_once() ++ ++ @patch("salt.modules.transactional_update._create_and_execute_salt_state") ++ @patch("salt.modules.transactional_update.TransactionalUpdateHighstate") ++ @patch("salt.fileclient.get_file_client") ++ @patch("salt.utils.state.get_sls_opts") ++ def test_highstate_queue_false_failing( ++ self, ++ get_sls_opts, ++ get_file_client, ++ TransactionalUpdateHighstate, ++ _create_and_execute_salt_state, ++ ): ++ """Test transactional_update.highstage""" ++ TransactionalUpdateHighstate.return_value = TransactionalUpdateHighstate ++ ++ _create_and_execute_salt_state.return_value = "result" ++ opts_mock = { ++ "hash_type": "md5", ++ } ++ salt_mock = { ++ "saltutil.is_running": MagicMock( ++ side_effect=[ ++ [ ++ { ++ "fun": "state.running", ++ "pid": "4126", ++ "jid": "20150325123407204096", ++ } ++ ], ++ [], ++ ] ++ ), ++ } ++ get_sls_opts.return_value = opts_mock ++ with patch.dict(tu.__opts__, opts_mock), patch.dict( ++ statemod.__salt__, salt_mock ++ ): ++ assert tu.highstate(queue=False) == [ ++ 'The function "state.running" is running as PID 4126 and was started at 2015, Mar 25 12:34:07.204096 with jid 20150325123407204096' ++ ] ++ + @patch("salt.modules.transactional_update._create_and_execute_salt_state") + @patch("salt.client.ssh.state.SSHState") + @patch("salt.utils.state.get_sls_opts") +@@ -683,7 +863,83 @@ class TransactionalUpdateTestCase(TestCase, LoaderModuleMockMixin): + opts_mock = { + "hash_type": "md5", + } ++ salt_mock = { ++ "saltutil.is_running": MagicMock(return_value=[]), ++ } + get_sls_opts.return_value = opts_mock +- with patch.dict(tu.__opts__, opts_mock): ++ with patch.dict(tu.__opts__, opts_mock), patch.dict( ++ statemod.__salt__, salt_mock ++ ): + assert tu.single("pkg.installed", name="emacs") == "result" + _create_and_execute_salt_state.assert_called_once() ++ ++ @patch("salt.modules.transactional_update._create_and_execute_salt_state") ++ @patch("salt.client.ssh.state.SSHState") ++ @patch("salt.utils.state.get_sls_opts") ++ def test_single_queue_false_failing( ++ self, get_sls_opts, SSHState, _create_and_execute_salt_state ++ ): ++ """Test transactional_update.single""" ++ SSHState.return_value = SSHState ++ SSHState.verify_data.return_value = None ++ ++ _create_and_execute_salt_state.return_value = "result" ++ opts_mock = { ++ "hash_type": "md5", ++ } ++ salt_mock = { ++ "saltutil.is_running": MagicMock( ++ side_effect=[ ++ [ ++ { ++ "fun": "state.running", ++ "pid": "4126", ++ "jid": "20150325123407204096", ++ } ++ ], ++ [], ++ ] ++ ), ++ } ++ get_sls_opts.return_value = opts_mock ++ with patch.dict(tu.__opts__, opts_mock), patch.dict( ++ statemod.__salt__, salt_mock ++ ): ++ assert tu.single("pkg.installed", name="emacs", queue=False) == [ ++ 'The function "state.running" is running as PID 4126 and was started at 2015, Mar 25 12:34:07.204096 with jid 20150325123407204096' ++ ] ++ ++ @patch("salt.modules.transactional_update._create_and_execute_salt_state") ++ @patch("salt.client.ssh.state.SSHState") ++ @patch("salt.utils.state.get_sls_opts") ++ def test_single_queue_true( ++ self, get_sls_opts, SSHState, _create_and_execute_salt_state ++ ): ++ """Test transactional_update.single""" ++ SSHState.return_value = SSHState ++ SSHState.verify_data.return_value = None ++ ++ _create_and_execute_salt_state.return_value = "result" ++ opts_mock = { ++ "hash_type": "md5", ++ } ++ salt_mock = { ++ "saltutil.is_running": MagicMock( ++ side_effect=[ ++ [ ++ { ++ "fun": "state.running", ++ "pid": "4126", ++ "jid": "20150325123407204096", ++ } ++ ], ++ [], ++ ] ++ ), ++ } ++ get_sls_opts.return_value = opts_mock ++ with patch.dict(tu.__opts__, opts_mock), patch.dict( ++ statemod.__salt__, salt_mock ++ ): ++ assert tu.single("pkg.installed", name="emacs", queue=True) == "result" ++ _create_and_execute_salt_state.assert_called_once() +diff --git a/tests/unit/states/test_service.py b/tests/unit/states/test_service.py +index 51755fc5a1..de09f2f8ab 100644 +--- a/tests/unit/states/test_service.py ++++ b/tests/unit/states/test_service.py +@@ -304,6 +304,24 @@ class ServiceTestCase(TestCase, LoaderModuleMockMixin): + service.__context__, {"service.state": "running"} + ) + ++ def test_running_in_offline_mode(self): ++ """ ++ Tests the case in which a service.running state is executed on an offline environemnt ++ ++ """ ++ name = "thisisnotarealservice" ++ with patch.object(service, "_offline", MagicMock(return_value=True)): ++ ret = service.running(name=name) ++ self.assertDictEqual( ++ ret, ++ { ++ "changes": {}, ++ "comment": "Running in OFFLINE mode. Nothing to do", ++ "result": True, ++ "name": name, ++ }, ++ ) ++ + def test_dead(self): + """ + Test to ensure that the named service is dead +@@ -443,6 +461,24 @@ class ServiceTestCase(TestCase, LoaderModuleMockMixin): + }, + ) + ++ def test_dead_in_offline_mode(self): ++ """ ++ Tests the case in which a service.dead state is executed on an offline environemnt ++ ++ """ ++ name = "thisisnotarealservice" ++ with patch.object(service, "_offline", MagicMock(return_value=True)): ++ ret = service.dead(name=name) ++ self.assertDictEqual( ++ ret, ++ { ++ "changes": {}, ++ "comment": "Running in OFFLINE mode. Nothing to do", ++ "result": True, ++ "name": name, ++ }, ++ ) ++ + def test_enabled(self): + """ + Test to verify that the service is enabled +@@ -567,8 +603,11 @@ class ServiceTestCaseFunctional(TestCase, LoaderModuleMockMixin): + @slowTest + def test_running_with_reload(self): + with patch.dict(service.__opts__, {"test": False}): +- service.dead(self.service_name, enable=False) +- result = service.running(name=self.service_name, enable=True, reload=False) ++ with patch("salt.utils.systemd.offline", MagicMock(return_value=False)): ++ service.dead(self.service_name, enable=False) ++ result = service.running( ++ name=self.service_name, enable=True, reload=False ++ ) + + if salt.utils.platform.is_windows(): + comment = "Started Service {}".format(self.service_name) +-- +2.32.0 + + diff --git a/don-t-use-shell-sbin-nologin-in-requisites.patch b/don-t-use-shell-sbin-nologin-in-requisites.patch new file mode 100644 index 0000000..f773381 --- /dev/null +++ b/don-t-use-shell-sbin-nologin-in-requisites.patch @@ -0,0 +1,39 @@ +From 9a8ca020a3cacbcfbbc33f209cd0ea6c3da3f788 Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Tue, 17 Aug 2021 11:52:00 +0200 +Subject: [PATCH] Don't use shell="/sbin/nologin" in requisites + +Using shell="/sbin/nologin" in an onlyif/unless requisite does not +really make sense since the condition can't be run. shell=/sbin/nologin +is also a common argument, e.g. for user.present. + +Fixes: bsc#1188259 +--- + salt/state.py | 9 +++++++-- + 1 file changed, 7 insertions(+), 2 deletions(-) + +diff --git a/salt/state.py b/salt/state.py +index 64c5225728..c6742101b2 100644 +--- a/salt/state.py ++++ b/salt/state.py +@@ -889,9 +889,14 @@ class State: + cmd_opts[run_cmd_arg] = low_data.get(run_cmd_arg) + + if "shell" in low_data: +- cmd_opts["shell"] = low_data["shell"] ++ shell = low_data["shell"] + elif "shell" in self.opts["grains"]: +- cmd_opts["shell"] = self.opts["grains"].get("shell") ++ shell = self.opts["grains"].get("shell") ++ else: ++ shell = None ++ # /sbin/nologin always causes the onlyif / unless cmd to fail ++ if shell is not None and shell != "/sbin/nologin": ++ cmd_opts["shell"] = shell + + if "onlyif" in low_data: + _ret = self._run_check_onlyif(low_data, cmd_opts) +-- +2.32.0 + + diff --git a/enhance-logging-when-inotify-beacon-is-missing-pyino.patch b/enhance-logging-when-inotify-beacon-is-missing-pyino.patch new file mode 100644 index 0000000..7b81993 --- /dev/null +++ b/enhance-logging-when-inotify-beacon-is-missing-pyino.patch @@ -0,0 +1,30 @@ +From cde0f9385e1afb9fa97fe2c86cfa77ae3b899aa0 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Fri, 18 Jun 2021 13:09:22 +0100 +Subject: [PATCH] Enhance logging when inotify beacon is missing + pyinotify (bsc#1186310) + +--- + salt/beacons/inotify.py | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/salt/beacons/inotify.py b/salt/beacons/inotify.py +index fa2f73c35f..a6b7548f97 100644 +--- a/salt/beacons/inotify.py ++++ b/salt/beacons/inotify.py +@@ -49,7 +49,9 @@ log = logging.getLogger(__name__) + def __virtual__(): + if HAS_PYINOTIFY: + return __virtualname__ +- return False ++ err_msg = "pyinotify library is missing" ++ log.error("Unable to load inotify beacon: {}".format(err_msg)) ++ return False, err_msg + + + def _get_mask(mask): +-- +2.31.1 + + diff --git a/enhance-openscap-module-add-xccdf_eval-call-386.patch b/enhance-openscap-module-add-xccdf_eval-call-386.patch new file mode 100644 index 0000000..814523d --- /dev/null +++ b/enhance-openscap-module-add-xccdf_eval-call-386.patch @@ -0,0 +1,514 @@ +From 1fd51c17bc03e679a040f2c6d9ac107a2c57b7c8 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Wed, 7 Jul 2021 15:41:48 +0100 +Subject: [PATCH] Enhance openscap module: add "xccdf_eval" call (#386) + +* Enhance openscap module: add xccdf_eval call + +* Allow 'tailoring_file' and 'tailoring_id' parameters + +* Fix wrong reference to subprocess.PIPE in openscap unit tests + +* Add changes suggested by pre-commit + +Co-authored-by: Michael Calmer +--- + changelog/59756.added | 1 + + salt/modules/openscap.py | 120 ++++++++++++- + tests/unit/modules/test_openscap.py | 262 +++++++++++++++++++++++++--- + 3 files changed, 353 insertions(+), 30 deletions(-) + create mode 100644 changelog/59756.added + +diff --git a/changelog/59756.added b/changelog/59756.added +new file mode 100644 +index 0000000000..a59fb21eef +--- /dev/null ++++ b/changelog/59756.added +@@ -0,0 +1 @@ ++adding new call for openscap xccdf eval supporting new parameters +diff --git a/salt/modules/openscap.py b/salt/modules/openscap.py +index 6f8ff4a76d..f75e1c5e6b 100644 +--- a/salt/modules/openscap.py ++++ b/salt/modules/openscap.py +@@ -1,20 +1,15 @@ +-# -*- coding: utf-8 -*- + """ + Module for OpenSCAP Management + + """ + +-# Import Python libs +-from __future__ import absolute_import, print_function, unicode_literals + ++import os.path + import shlex + import shutil + import tempfile + from subprocess import PIPE, Popen + +-# Import Salt libs +-from salt.ext import six +- + ArgumentParser = object + + try: +@@ -44,7 +39,7 @@ def __virtual__(): + + class _ArgumentParser(ArgumentParser): + def __init__(self, action=None, *args, **kwargs): +- super(_ArgumentParser, self).__init__(*args, prog="oscap", **kwargs) ++ super().__init__(*args, prog="oscap", **kwargs) + self.add_argument("action", choices=["eval"]) + add_arg = None + for params, kwparams in _XCCDF_MAP["eval"]["parser_arguments"]: +@@ -61,6 +56,115 @@ _OSCAP_EXIT_CODES_MAP = { + } + + ++def xccdf_eval(xccdffile, ovalfiles=None, **kwargs): ++ """ ++ Run ``oscap xccdf eval`` commands on minions. ++ It uses cp.push_dir to upload the generated files to the salt master ++ in the master's minion files cachedir ++ (defaults to ``/var/cache/salt/master/minions/minion-id/files``) ++ ++ It needs ``file_recv`` set to ``True`` in the master configuration file. ++ ++ xccdffile ++ the path to the xccdf file to evaluate ++ ++ ovalfiles ++ additional oval definition files ++ ++ profile ++ the name of Profile to be evaluated ++ ++ rule ++ the name of a single rule to be evaluated ++ ++ oval_results ++ save OVAL results as well (True or False) ++ ++ results ++ write XCCDF Results into given file ++ ++ report ++ write HTML report into given file ++ ++ fetch_remote_resources ++ download remote content referenced by XCCDF (True or False) ++ ++ tailoring_file ++ use given XCCDF Tailoring file ++ ++ tailoring_id ++ use given DS component as XCCDF Tailoring file ++ ++ remediate ++ automatically execute XCCDF fix elements for failed rules. ++ Use of this option is always at your own risk. (True or False) ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' openscap.xccdf_eval /usr/share/openscap/scap-yast2sec-xccdf.xml profile=Default ++ ++ """ ++ success = True ++ error = None ++ upload_dir = None ++ returncode = None ++ if not ovalfiles: ++ ovalfiles = [] ++ ++ cmd_opts = ["oscap", "xccdf", "eval"] ++ if kwargs.get("oval_results"): ++ cmd_opts.append("--oval-results") ++ if "results" in kwargs: ++ cmd_opts.append("--results") ++ cmd_opts.append(kwargs["results"]) ++ if "report" in kwargs: ++ cmd_opts.append("--report") ++ cmd_opts.append(kwargs["report"]) ++ if "profile" in kwargs: ++ cmd_opts.append("--profile") ++ cmd_opts.append(kwargs["profile"]) ++ if "rule" in kwargs: ++ cmd_opts.append("--rule") ++ cmd_opts.append(kwargs["rule"]) ++ if "tailoring_file" in kwargs: ++ cmd_opts.append("--tailoring-file") ++ cmd_opts.append(kwargs["tailoring_file"]) ++ if "tailoring_id" in kwargs: ++ cmd_opts.append("--tailoring-id") ++ cmd_opts.append(kwargs["tailoring_id"]) ++ if kwargs.get("fetch_remote_resources"): ++ cmd_opts.append("--fetch-remote-resources") ++ if kwargs.get("remediate"): ++ cmd_opts.append("--remediate") ++ cmd_opts.append(xccdffile) ++ cmd_opts.extend(ovalfiles) ++ ++ if not os.path.exists(xccdffile): ++ success = False ++ error = "XCCDF File '{}' does not exist".format(xccdffile) ++ for ofile in ovalfiles: ++ if success and not os.path.exists(ofile): ++ success = False ++ error = "Oval File '{}' does not exist".format(ofile) ++ ++ if success: ++ tempdir = tempfile.mkdtemp() ++ proc = Popen(cmd_opts, stdout=PIPE, stderr=PIPE, cwd=tempdir) ++ (stdoutdata, error) = proc.communicate() ++ success = _OSCAP_EXIT_CODES_MAP[proc.returncode] ++ returncode = proc.returncode ++ if success: ++ __salt__["cp.push_dir"](tempdir) ++ upload_dir = tempdir ++ shutil.rmtree(tempdir, ignore_errors=True) ++ ++ return dict( ++ success=success, upload_dir=upload_dir, error=error, returncode=returncode ++ ) ++ ++ + def xccdf(params): + """ + Run ``oscap xccdf`` commands on minions. +@@ -91,7 +195,7 @@ def xccdf(params): + args, argv = _ArgumentParser(action=action).parse_known_args(args=params) + except Exception as err: # pylint: disable=broad-except + success = False +- error = six.text_type(err) ++ error = str(err) + + if success: + cmd = _XCCDF_MAP[action]["cmd_pattern"].format(args.profile, policy) +diff --git a/tests/unit/modules/test_openscap.py b/tests/unit/modules/test_openscap.py +index 04cf00a1d3..e5be151bf2 100644 +--- a/tests/unit/modules/test_openscap.py ++++ b/tests/unit/modules/test_openscap.py +@@ -1,18 +1,8 @@ +-# -*- coding: utf-8 -*- +- +-# Import python libs +-from __future__ import absolute_import, print_function, unicode_literals +- + from subprocess import PIPE + +-# Import salt libs + import salt.modules.openscap as openscap +- +-# Import 3rd-party libs + from salt.ext import six + from tests.support.mock import MagicMock, Mock, patch +- +-# Import salt test libs + from tests.support.unit import TestCase + + +@@ -32,6 +22,7 @@ class OpenscapTestCase(TestCase): + "salt.modules.openscap.tempfile.mkdtemp", + Mock(return_value=self.random_temp_dir), + ), ++ patch("salt.modules.openscap.os.path.exists", Mock(return_value=True)), + ] + for patcher in patchers: + self.apply_patch(patcher) +@@ -50,7 +41,7 @@ class OpenscapTestCase(TestCase): + ), + ): + response = openscap.xccdf( +- "eval --profile Default {0}".format(self.policy_file) ++ "eval --profile Default {}".format(self.policy_file) + ) + + self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1) +@@ -97,7 +88,7 @@ class OpenscapTestCase(TestCase): + ), + ): + response = openscap.xccdf( +- "eval --profile Default {0}".format(self.policy_file) ++ "eval --profile Default {}".format(self.policy_file) + ) + + self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1) +@@ -136,10 +127,7 @@ class OpenscapTestCase(TestCase): + + def test_openscap_xccdf_eval_fail_no_profile(self): + response = openscap.xccdf("eval --param Default /unknown/param") +- if six.PY2: +- error = "argument --profile is required" +- else: +- error = "the following arguments are required: --profile" ++ error = "the following arguments are required: --profile" + self.assertEqual( + response, + {"error": error, "upload_dir": None, "success": False, "returncode": None}, +@@ -199,7 +187,7 @@ class OpenscapTestCase(TestCase): + ), + ): + response = openscap.xccdf( +- "eval --profile Default {0}".format(self.policy_file) ++ "eval --profile Default {}".format(self.policy_file) + ) + + self.assertEqual( +@@ -213,11 +201,8 @@ class OpenscapTestCase(TestCase): + ) + + def test_openscap_xccdf_eval_fail_not_implemented_action(self): +- response = openscap.xccdf("info {0}".format(self.policy_file)) +- if six.PY2: +- mock_err = "argument action: invalid choice: 'info' (choose from u'eval')" +- else: +- mock_err = "argument action: invalid choice: 'info' (choose from 'eval')" ++ response = openscap.xccdf("info {}".format(self.policy_file)) ++ mock_err = "argument action: invalid choice: 'info' (choose from 'eval')" + + self.assertEqual( + response, +@@ -228,3 +213,236 @@ class OpenscapTestCase(TestCase): + "returncode": None, + }, + ) ++ ++ def test_new_openscap_xccdf_eval_success(self): ++ with patch( ++ "salt.modules.openscap.Popen", ++ MagicMock( ++ return_value=Mock( ++ **{"returncode": 0, "communicate.return_value": ("", "")} ++ ) ++ ), ++ ): ++ response = openscap.xccdf_eval( ++ self.policy_file, ++ profile="Default", ++ oval_results=True, ++ results="results.xml", ++ report="report.html", ++ ) ++ ++ self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1) ++ expected_cmd = [ ++ "oscap", ++ "xccdf", ++ "eval", ++ "--oval-results", ++ "--results", ++ "results.xml", ++ "--report", ++ "report.html", ++ "--profile", ++ "Default", ++ self.policy_file, ++ ] ++ openscap.Popen.assert_called_once_with( ++ expected_cmd, ++ cwd=openscap.tempfile.mkdtemp.return_value, ++ stderr=PIPE, ++ stdout=PIPE, ++ ) ++ openscap.__salt__["cp.push_dir"].assert_called_once_with( ++ self.random_temp_dir ++ ) ++ self.assertEqual(openscap.shutil.rmtree.call_count, 1) ++ self.assertEqual( ++ response, ++ { ++ "upload_dir": self.random_temp_dir, ++ "error": "", ++ "success": True, ++ "returncode": 0, ++ }, ++ ) ++ ++ def test_new_openscap_xccdf_eval_success_with_extra_ovalfiles(self): ++ with patch( ++ "salt.modules.openscap.Popen", ++ MagicMock( ++ return_value=Mock( ++ **{"returncode": 0, "communicate.return_value": ("", "")} ++ ) ++ ), ++ ): ++ response = openscap.xccdf_eval( ++ self.policy_file, ++ ["/usr/share/xml/another-oval.xml", "/usr/share/xml/oval.xml"], ++ profile="Default", ++ oval_results=True, ++ results="results.xml", ++ report="report.html", ++ ) ++ ++ self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1) ++ expected_cmd = [ ++ "oscap", ++ "xccdf", ++ "eval", ++ "--oval-results", ++ "--results", ++ "results.xml", ++ "--report", ++ "report.html", ++ "--profile", ++ "Default", ++ self.policy_file, ++ "/usr/share/xml/another-oval.xml", ++ "/usr/share/xml/oval.xml", ++ ] ++ openscap.Popen.assert_called_once_with( ++ expected_cmd, ++ cwd=openscap.tempfile.mkdtemp.return_value, ++ stderr=PIPE, ++ stdout=PIPE, ++ ) ++ openscap.__salt__["cp.push_dir"].assert_called_once_with( ++ self.random_temp_dir ++ ) ++ self.assertEqual(openscap.shutil.rmtree.call_count, 1) ++ self.assertEqual( ++ response, ++ { ++ "upload_dir": self.random_temp_dir, ++ "error": "", ++ "success": True, ++ "returncode": 0, ++ }, ++ ) ++ ++ def test_new_openscap_xccdf_eval_success_with_failing_rules(self): ++ with patch( ++ "salt.modules.openscap.Popen", ++ MagicMock( ++ return_value=Mock( ++ **{"returncode": 2, "communicate.return_value": ("", "some error")} ++ ) ++ ), ++ ): ++ response = openscap.xccdf_eval( ++ self.policy_file, ++ profile="Default", ++ oval_results=True, ++ results="results.xml", ++ report="report.html", ++ ) ++ ++ self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1) ++ expected_cmd = [ ++ "oscap", ++ "xccdf", ++ "eval", ++ "--oval-results", ++ "--results", ++ "results.xml", ++ "--report", ++ "report.html", ++ "--profile", ++ "Default", ++ self.policy_file, ++ ] ++ openscap.Popen.assert_called_once_with( ++ expected_cmd, ++ cwd=openscap.tempfile.mkdtemp.return_value, ++ stderr=PIPE, ++ stdout=PIPE, ++ ) ++ openscap.__salt__["cp.push_dir"].assert_called_once_with( ++ self.random_temp_dir ++ ) ++ self.assertEqual(openscap.shutil.rmtree.call_count, 1) ++ self.assertEqual( ++ response, ++ { ++ "upload_dir": self.random_temp_dir, ++ "error": "some error", ++ "success": True, ++ "returncode": 2, ++ }, ++ ) ++ ++ def test_new_openscap_xccdf_eval_success_ignore_unknown_params(self): ++ with patch( ++ "salt.modules.openscap.Popen", ++ MagicMock( ++ return_value=Mock( ++ **{"returncode": 2, "communicate.return_value": ("", "some error")} ++ ) ++ ), ++ ): ++ response = openscap.xccdf_eval( ++ "/policy/file", ++ param="Default", ++ profile="Default", ++ oval_results=True, ++ results="results.xml", ++ report="report.html", ++ ) ++ ++ self.assertEqual( ++ response, ++ { ++ "upload_dir": self.random_temp_dir, ++ "error": "some error", ++ "success": True, ++ "returncode": 2, ++ }, ++ ) ++ expected_cmd = [ ++ "oscap", ++ "xccdf", ++ "eval", ++ "--oval-results", ++ "--results", ++ "results.xml", ++ "--report", ++ "report.html", ++ "--profile", ++ "Default", ++ "/policy/file", ++ ] ++ openscap.Popen.assert_called_once_with( ++ expected_cmd, ++ cwd=openscap.tempfile.mkdtemp.return_value, ++ stderr=PIPE, ++ stdout=PIPE, ++ ) ++ ++ def test_new_openscap_xccdf_eval_evaluation_error(self): ++ with patch( ++ "salt.modules.openscap.Popen", ++ MagicMock( ++ return_value=Mock( ++ **{ ++ "returncode": 1, ++ "communicate.return_value": ("", "evaluation error"), ++ } ++ ) ++ ), ++ ): ++ response = openscap.xccdf_eval( ++ self.policy_file, ++ profile="Default", ++ oval_results=True, ++ results="results.xml", ++ report="report.html", ++ ) ++ ++ self.assertEqual( ++ response, ++ { ++ "upload_dir": None, ++ "error": "evaluation error", ++ "success": False, ++ "returncode": 1, ++ }, ++ ) +-- +2.32.0 + + diff --git a/exclude-the-full-path-of-a-download-url-to-prevent-i.patch b/exclude-the-full-path-of-a-download-url-to-prevent-i.patch new file mode 100644 index 0000000..dfc3a5a --- /dev/null +++ b/exclude-the-full-path-of-a-download-url-to-prevent-i.patch @@ -0,0 +1,69 @@ +From 57ed9c41a177f57e3d56465662750617ac36cc95 Mon Sep 17 00:00:00 2001 +From: Joe Eacott +Date: Mon, 28 Jun 2021 16:46:35 -0600 +Subject: [PATCH] Exclude the full path of a download URL to prevent + injection of malicious code (bsc#1190265) (CVE-2021-21996) + +--- + salt/fileclient.py | 7 +++++++ + tests/unit/test_fileclient.py | 18 ++++++++++++++++++ + 2 files changed, 25 insertions(+) + +diff --git a/salt/fileclient.py b/salt/fileclient.py +index 88dcf1668d..bdf450ffe6 100644 +--- a/salt/fileclient.py ++++ b/salt/fileclient.py +@@ -28,6 +28,7 @@ import salt.utils.platform + import salt.utils.stringutils + import salt.utils.templates + import salt.utils.url ++import salt.utils.verify + import salt.utils.versions + from salt.exceptions import CommandExecutionError, MinionError + +@@ -858,6 +859,12 @@ class Client: + else: + file_name = url_data.path + ++ # clean_path returns an empty string if the check fails ++ root_path = salt.utils.path.join(cachedir, "extrn_files", saltenv, netloc) ++ new_path = os.path.sep.join([root_path, file_name]) ++ if not salt.utils.verify.clean_path(root_path, new_path, subdir=True): ++ return "Invalid path" ++ + if len(file_name) > MAX_FILENAME_LENGTH: + file_name = salt.utils.hashutils.sha256_digest(file_name) + +diff --git a/tests/unit/test_fileclient.py b/tests/unit/test_fileclient.py +index 3aa7b7cf84..b6cc84a871 100644 +--- a/tests/unit/test_fileclient.py ++++ b/tests/unit/test_fileclient.py +@@ -63,6 +63,24 @@ class FileclientTestCase(TestCase): + ) as c_ref_itr: + assert c_ref_itr == "/__test__/files/base/testfile" + ++ def test_cache_extrn_path_valid(self): ++ """ ++ Tests for extrn_filepath for a given url ++ """ ++ file_name = "http://localhost:8000/test/location/src/dev/usr/file" ++ ++ ret = fileclient.Client(self.opts)._extrn_path(file_name, "base") ++ assert ret == os.path.join("__test__", "extrn_files", "base", ret) ++ ++ def test_cache_extrn_path_invalid(self): ++ """ ++ Tests for extrn_filepath for a given url ++ """ ++ file_name = "http://localhost:8000/../../../../../usr/bin/bad" ++ ++ ret = fileclient.Client(self.opts)._extrn_path(file_name, "base") ++ assert ret == "Invalid path" ++ + def test_extrn_path_with_long_filename(self): + safe_file_name = os.path.split( + fileclient.Client(self.opts)._extrn_path( +-- +2.33.0 + + diff --git a/figure-out-python-interpreter-to-use-inside-containe.patch b/figure-out-python-interpreter-to-use-inside-containe.patch new file mode 100644 index 0000000..609a338 --- /dev/null +++ b/figure-out-python-interpreter-to-use-inside-containe.patch @@ -0,0 +1,126 @@ +From 271826b0baa6b2281bc2eac9118a0fcc4675f106 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Wed, 19 May 2021 16:24:27 +0100 +Subject: [PATCH] Figure out Python interpreter to use inside + containers + +Fix unit test for dockermod.call function +--- + salt/modules/dockermod.py | 28 +++++++++++++++++++++++--- + tests/unit/modules/test_dockermod.py | 30 +++++++++++++++------------- + 2 files changed, 41 insertions(+), 17 deletions(-) + +diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py +index 176b4db926..cad307e7af 100644 +--- a/salt/modules/dockermod.py ++++ b/salt/modules/dockermod.py +@@ -217,7 +217,6 @@ import re + import shutil + import string + import subprocess +-import sys + import time + import uuid + +@@ -6865,9 +6864,32 @@ def call(name, function, *args, **kwargs): + name, thin_path, os.path.join(thin_dest_path, os.path.basename(thin_path)) + ) + ++ # figure out available python interpreter inside the container ++ pycmds = ( ++ "python3", ++ "/usr/libexec/platform-python", ++ "python27", ++ "python2.7", ++ "python26", ++ "python2.6", ++ "python2", ++ "python", ++ ) ++ container_python_bin = None ++ for py_cmd in pycmds: ++ cmd = [py_cmd] + ["--version"] ++ ret = run_all(name, subprocess.list2cmdline(cmd)) ++ if ret["retcode"] == 0: ++ container_python_bin = py_cmd ++ break ++ if not container_python_bin: ++ raise CommandExecutionError( ++ "Python interpreter cannot be found inside the container. Make sure Python is installed in the container" ++ ) ++ + # untar archive + untar_cmd = [ +- "python", ++ container_python_bin, + "-c", + ("import tarfile; " 'tarfile.open("{0}/{1}").extractall(path="{0}")').format( + thin_dest_path, os.path.basename(thin_path) +@@ -6880,7 +6902,7 @@ def call(name, function, *args, **kwargs): + try: + salt_argv = ( + [ +- "python{0}".format(sys.version_info[0]), ++ container_python_bin, + os.path.join(thin_dest_path, "salt-call"), + "--metadata", + "--local", +diff --git a/tests/unit/modules/test_dockermod.py b/tests/unit/modules/test_dockermod.py +index 48526acb71..ebe97a83f5 100644 +--- a/tests/unit/modules/test_dockermod.py ++++ b/tests/unit/modules/test_dockermod.py +@@ -1049,33 +1049,35 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin): + # [ call(name, [args]), ... + self.maxDiff = None + self.assertIn("mkdir", docker_run_all_mock.mock_calls[0][1][1]) +- self.assertIn("mkdir", docker_run_all_mock.mock_calls[4][1][1]) ++ self.assertIn("mkdir", docker_run_all_mock.mock_calls[5][1][1]) + self.assertNotEqual( + docker_run_all_mock.mock_calls[0][1][1], +- docker_run_all_mock.mock_calls[4][1][1], ++ docker_run_all_mock.mock_calls[5][1][1], + ) + +- self.assertIn("salt-call", docker_run_all_mock.mock_calls[2][1][1]) +- self.assertIn("salt-call", docker_run_all_mock.mock_calls[6][1][1]) ++ self.assertEqual("python3 --version", docker_run_all_mock.mock_calls[1][1][1]) ++ ++ self.assertIn("salt-call", docker_run_all_mock.mock_calls[3][1][1]) ++ self.assertIn("salt-call", docker_run_all_mock.mock_calls[8][1][1]) + self.assertNotEqual( +- docker_run_all_mock.mock_calls[2][1][1], +- docker_run_all_mock.mock_calls[6][1][1], ++ docker_run_all_mock.mock_calls[3][1][1], ++ docker_run_all_mock.mock_calls[8][1][1], + ) + + # check thin untar +- self.assertIn("tarfile", docker_run_all_mock.mock_calls[1][1][1]) +- self.assertIn("tarfile", docker_run_all_mock.mock_calls[5][1][1]) ++ self.assertIn("tarfile", docker_run_all_mock.mock_calls[2][1][1]) ++ self.assertIn("tarfile", docker_run_all_mock.mock_calls[7][1][1]) + self.assertNotEqual( +- docker_run_all_mock.mock_calls[1][1][1], +- docker_run_all_mock.mock_calls[5][1][1], ++ docker_run_all_mock.mock_calls[2][1][1], ++ docker_run_all_mock.mock_calls[7][1][1], + ) + + # check directory cleanup +- self.assertIn("rm -rf", docker_run_all_mock.mock_calls[3][1][1]) +- self.assertIn("rm -rf", docker_run_all_mock.mock_calls[7][1][1]) ++ self.assertIn("rm -rf", docker_run_all_mock.mock_calls[4][1][1]) ++ self.assertIn("rm -rf", docker_run_all_mock.mock_calls[9][1][1]) + self.assertNotEqual( +- docker_run_all_mock.mock_calls[3][1][1], +- docker_run_all_mock.mock_calls[7][1][1], ++ docker_run_all_mock.mock_calls[4][1][1], ++ docker_run_all_mock.mock_calls[9][1][1], + ) + + self.assertEqual({"retcode": 0, "comment": "container cmd"}, ret) +-- +2.31.1 + + diff --git a/fix-error-handling-in-openscap-module-bsc-1188647-40.patch b/fix-error-handling-in-openscap-module-bsc-1188647-40.patch new file mode 100644 index 0000000..7fe2229 --- /dev/null +++ b/fix-error-handling-in-openscap-module-bsc-1188647-40.patch @@ -0,0 +1,40 @@ +From b7d11d8caf3eb4fb39a070201be87bb1b3abd525 Mon Sep 17 00:00:00 2001 +From: Vladimir Nadvornik +Date: Wed, 11 Aug 2021 12:19:09 +0200 +Subject: [PATCH] Fix error handling in openscap module (bsc#1188647) + (#409) + +--- + salt/modules/openscap.py | 8 ++++++-- + 1 file changed, 6 insertions(+), 2 deletions(-) + +diff --git a/salt/modules/openscap.py b/salt/modules/openscap.py +index f75e1c5e6b..216fd89eef 100644 +--- a/salt/modules/openscap.py ++++ b/salt/modules/openscap.py +@@ -153,7 +153,9 @@ def xccdf_eval(xccdffile, ovalfiles=None, **kwargs): + tempdir = tempfile.mkdtemp() + proc = Popen(cmd_opts, stdout=PIPE, stderr=PIPE, cwd=tempdir) + (stdoutdata, error) = proc.communicate() +- success = _OSCAP_EXIT_CODES_MAP[proc.returncode] ++ success = _OSCAP_EXIT_CODES_MAP.get(proc.returncode, False) ++ if proc.returncode < 0: ++ error += "\nKilled by signal {}\n".format(proc.returncode).encode('ascii') + returncode = proc.returncode + if success: + __salt__["cp.push_dir"](tempdir) +@@ -202,7 +204,9 @@ def xccdf(params): + tempdir = tempfile.mkdtemp() + proc = Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE, cwd=tempdir) + (stdoutdata, error) = proc.communicate() +- success = _OSCAP_EXIT_CODES_MAP[proc.returncode] ++ success = _OSCAP_EXIT_CODES_MAP.get(proc.returncode, False) ++ if proc.returncode < 0: ++ error += "\nKilled by signal {}\n".format(proc.returncode).encode('ascii') + returncode = proc.returncode + if success: + __salt__["cp.push_dir"](tempdir) +-- +2.32.0 + + diff --git a/fix-exception-in-yumpkg.remove-for-not-installed-pac.patch b/fix-exception-in-yumpkg.remove-for-not-installed-pac.patch new file mode 100644 index 0000000..19b0de4 --- /dev/null +++ b/fix-exception-in-yumpkg.remove-for-not-installed-pac.patch @@ -0,0 +1,64 @@ +From 30a2c8c042f0fe57253a8ab47220d897bc89bd17 Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Thu, 24 Jun 2021 13:17:13 +0300 +Subject: [PATCH] Fix exception in yumpkg.remove for not installed + package (#380) + +--- + salt/modules/yumpkg.py | 2 ++ + tests/unit/modules/test_yumpkg.py | 25 +++++++++++++++++++++++++ + 2 files changed, 27 insertions(+) + +diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py +index 0fb41a0400..c9f9f2c2d3 100644 +--- a/salt/modules/yumpkg.py ++++ b/salt/modules/yumpkg.py +@@ -2051,6 +2051,8 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613 + old = list_pkgs() + targets = [] + for target in pkg_params: ++ if target not in old: ++ continue + version_to_remove = pkg_params[target] + installed_versions = old[target].split(",") + +diff --git a/tests/unit/modules/test_yumpkg.py b/tests/unit/modules/test_yumpkg.py +index e22c0b9251..373d2e09cb 100644 +--- a/tests/unit/modules/test_yumpkg.py ++++ b/tests/unit/modules/test_yumpkg.py +@@ -1099,6 +1099,31 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): + call = cmd_mock.mock_calls[0][1][0] + assert call == expected, call + ++ def test_remove_not_existing(self): ++ """ ++ Test if no exception on removing not installed package ++ """ ++ name = "foo" ++ def list_pkgs_mock(): ++ return {} ++ cmd_mock = MagicMock( ++ return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""} ++ ) ++ salt_mock = { ++ "cmd.run_all": cmd_mock, ++ "lowpkg.version_cmp": rpm.version_cmp, ++ "pkg_resource.parse_targets": MagicMock( ++ return_value=({name: None}, "repository") ++ ), ++ } ++ with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch( ++ "salt.utils.systemd.has_scope", MagicMock(return_value=False) ++ ), patch.dict(yumpkg.__salt__, salt_mock): ++ ++ with patch.dict(yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}): ++ yumpkg.remove(name) ++ cmd_mock.assert_not_called() ++ + def test_install_with_epoch(self): + """ + Tests that we properly identify a version containing an epoch as an +-- +2.32.0 + + diff --git a/fix-failing-unit-tests-for-systemd.patch b/fix-failing-unit-tests-for-systemd.patch new file mode 100644 index 0000000..a5159d8 --- /dev/null +++ b/fix-failing-unit-tests-for-systemd.patch @@ -0,0 +1,80 @@ +From 74d8f5f2d896e5e8bbf7d3fb614ae32f2cf489a5 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Wed, 11 Aug 2021 11:44:54 +0100 +Subject: [PATCH] Fix failing unit tests for systemd + +--- + tests/unit/modules/test_systemd_service.py | 24 ++++++++++++++++------ + 1 file changed, 18 insertions(+), 6 deletions(-) + +diff --git a/tests/unit/modules/test_systemd_service.py b/tests/unit/modules/test_systemd_service.py +index bbd89bb3d0..51be130d29 100644 +--- a/tests/unit/modules/test_systemd_service.py ++++ b/tests/unit/modules/test_systemd_service.py +@@ -165,21 +165,27 @@ class SystemdTestCase(TestCase, LoaderModuleMockMixin): + + # systemd < 231 + with patch.dict(systemd.__context__, {"salt.utils.systemd.version": 230}): +- with patch.object(systemd, "_systemctl_status", mock): ++ with patch.object(systemd, "_systemctl_status", mock), patch.object( ++ systemd, "offline", MagicMock(return_value=False) ++ ): + self.assertTrue(systemd.available("sshd.service")) + self.assertFalse(systemd.available("foo.service")) + + # systemd >= 231 + with patch.dict(systemd.__context__, {"salt.utils.systemd.version": 231}): + with patch.dict(_SYSTEMCTL_STATUS, _SYSTEMCTL_STATUS_GTE_231): +- with patch.object(systemd, "_systemctl_status", mock): ++ with patch.object(systemd, "_systemctl_status", mock), patch.object( ++ systemd, "offline", MagicMock(return_value=False) ++ ): + self.assertTrue(systemd.available("sshd.service")) + self.assertFalse(systemd.available("bar.service")) + + # systemd < 231 with retcode/output changes backported (e.g. RHEL 7.3) + with patch.dict(systemd.__context__, {"salt.utils.systemd.version": 219}): + with patch.dict(_SYSTEMCTL_STATUS, _SYSTEMCTL_STATUS_GTE_231): +- with patch.object(systemd, "_systemctl_status", mock): ++ with patch.object(systemd, "_systemctl_status", mock), patch.object( ++ systemd, "offline", MagicMock(return_value=False) ++ ): + self.assertTrue(systemd.available("sshd.service")) + self.assertFalse(systemd.available("bar.service")) + +@@ -191,21 +197,27 @@ class SystemdTestCase(TestCase, LoaderModuleMockMixin): + + # systemd < 231 + with patch.dict(systemd.__context__, {"salt.utils.systemd.version": 230}): +- with patch.object(systemd, "_systemctl_status", mock): ++ with patch.object(systemd, "_systemctl_status", mock), patch.object( ++ systemd, "offline", MagicMock(return_value=False) ++ ): + self.assertFalse(systemd.missing("sshd.service")) + self.assertTrue(systemd.missing("foo.service")) + + # systemd >= 231 + with patch.dict(systemd.__context__, {"salt.utils.systemd.version": 231}): + with patch.dict(_SYSTEMCTL_STATUS, _SYSTEMCTL_STATUS_GTE_231): +- with patch.object(systemd, "_systemctl_status", mock): ++ with patch.object(systemd, "_systemctl_status", mock), patch.object( ++ systemd, "offline", MagicMock(return_value=False) ++ ): + self.assertFalse(systemd.missing("sshd.service")) + self.assertTrue(systemd.missing("bar.service")) + + # systemd < 231 with retcode/output changes backported (e.g. RHEL 7.3) + with patch.dict(systemd.__context__, {"salt.utils.systemd.version": 219}): + with patch.dict(_SYSTEMCTL_STATUS, _SYSTEMCTL_STATUS_GTE_231): +- with patch.object(systemd, "_systemctl_status", mock): ++ with patch.object(systemd, "_systemctl_status", mock), patch.object( ++ systemd, "offline", MagicMock(return_value=False) ++ ): + self.assertFalse(systemd.missing("sshd.service")) + self.assertTrue(systemd.missing("bar.service")) + +-- +2.32.0 + + diff --git a/fix-issue-parsing-errors-in-ansiblegate-state-module.patch b/fix-issue-parsing-errors-in-ansiblegate-state-module.patch new file mode 100644 index 0000000..78897c5 --- /dev/null +++ b/fix-issue-parsing-errors-in-ansiblegate-state-module.patch @@ -0,0 +1,44 @@ +From cc017f6ed279af7fe02c890e4a7725e6903f364d Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Mon, 26 Apr 2021 12:13:59 +0100 +Subject: [PATCH] Fix issue parsing errors in ansiblegate state module + +--- + salt/states/ansiblegate.py | 12 ++++++++++-- + 1 file changed, 10 insertions(+), 2 deletions(-) + +diff --git a/salt/states/ansiblegate.py b/salt/states/ansiblegate.py +index 5daba0f37f..bd00653928 100644 +--- a/salt/states/ansiblegate.py ++++ b/salt/states/ansiblegate.py +@@ -183,7 +183,11 @@ def playbooks(name, rundir=None, git_repo=None, git_kwargs=None, ansible_kwargs= + checks = __salt__["ansible.playbooks"]( + name, rundir=rundir, check=True, diff=True, **ansible_kwargs + ) +- if all( ++ if "stats" not in checks: ++ ret["comment"] = checks.get("stderr", checks) ++ ret["result"] = False ++ ret["changes"] = {} ++ elif all( + not check["changed"] + and not check["failures"] + and not check["unreachable"] +@@ -212,7 +216,11 @@ def playbooks(name, rundir=None, git_repo=None, git_kwargs=None, ansible_kwargs= + results = __salt__["ansible.playbooks"]( + name, rundir=rundir, diff=True, **ansible_kwargs + ) +- if all( ++ if "stats" not in results: ++ ret["comment"] = results.get("stderr", results) ++ ret["result"] = False ++ ret["changes"] = {} ++ elif all( + not check["changed"] + and not check["failures"] + and not check["unreachable"] +-- +2.31.1 + + diff --git a/fix-missing-minion-returns-in-batch-mode-360.patch b/fix-missing-minion-returns-in-batch-mode-360.patch new file mode 100644 index 0000000..f379bb0 --- /dev/null +++ b/fix-missing-minion-returns-in-batch-mode-360.patch @@ -0,0 +1,30 @@ +From 83fbfcbf49c98624029f1d215b7ad4d247128d39 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Ond=C5=99ej=20Hole=C4=8Dek?= +Date: Mon, 10 May 2021 16:23:19 +0200 +Subject: [PATCH] Fix missing minion returns in batch mode (#360) + +Don't close pub if there are pending events, otherwise events will be lost +resulting in empty minion returns. + +Co-authored-by: Denis V. Meltsaykin +--- + salt/client/__init__.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/salt/client/__init__.py b/salt/client/__init__.py +index ddb437604b..78f4d99e84 100644 +--- a/salt/client/__init__.py ++++ b/salt/client/__init__.py +@@ -920,7 +920,7 @@ class LocalClient: + + self._clean_up_subscriptions(pub_data["jid"]) + finally: +- if not was_listening: ++ if not was_listening and not self.event.pending_events: + self.event.close_pub() + + def cmd_full_return( +-- +2.31.1 + + diff --git a/fix-save-for-iptables-state-module-bsc-1185131-372.patch b/fix-save-for-iptables-state-module-bsc-1185131-372.patch new file mode 100644 index 0000000..0df6e4c --- /dev/null +++ b/fix-save-for-iptables-state-module-bsc-1185131-372.patch @@ -0,0 +1,433 @@ +From 944f2a8e4db522ad32f547cf350a1268caa6de5a Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Thu, 24 Jun 2021 13:18:51 +0300 +Subject: [PATCH] Fix save for iptables state module (bsc#1185131) + (#372) + +--- + salt/states/iptables.py | 86 ++++++++------ + tests/unit/states/test_iptables.py | 184 ++++++++++++++++++++++++++++- + 2 files changed, 227 insertions(+), 43 deletions(-) + +diff --git a/salt/states/iptables.py b/salt/states/iptables.py +index 61dfc7e665..2e81477f18 100644 +--- a/salt/states/iptables.py ++++ b/salt/states/iptables.py +@@ -401,7 +401,7 @@ def append(name, table="filter", family="ipv4", **kwargs): + if save: + if save_file is True: + save_file = None +- __salt__["iptables.save"](save_file, family=family) ++ __salt__["iptables.save"](filename=save_file, family=family) + if not ret["changes"]["locale"]: + del ret["changes"]["locale"] + ret["comment"] = "\n".join(comments) +@@ -426,7 +426,9 @@ def append(name, table="filter", family="ipv4", **kwargs): + filename = kwargs["save"] + else: + filename = None +- saved_rules = __salt__["iptables.get_saved_rules"](family=family) ++ saved_rules = __salt__["iptables.get_saved_rules"]( ++ conf_file=filename, family=family ++ ) + _rules = __salt__["iptables.get_rules"](family=family) + __rules = [] + for table in _rules: +@@ -438,7 +440,7 @@ def append(name, table="filter", family="ipv4", **kwargs): + __saved_rules.append(saved_rules[table][chain].get("rules")) + # Only save if rules in memory are different than saved rules + if __rules != __saved_rules: +- out = __salt__["iptables.save"](filename, family=family) ++ out = __salt__["iptables.save"](filename=filename, family=family) + ret["comment"] += ("\nSaved iptables rule {} for {}\n" "{}\n{}").format( + name, family, command.strip(), out + ) +@@ -454,16 +456,15 @@ def append(name, table="filter", family="ipv4", **kwargs): + ret["comment"] = "Set iptables rule for {} to: {} for {}".format( + name, command.strip(), family + ) +- if "save" in kwargs: +- if kwargs["save"]: +- if kwargs["save"] is not True: +- filename = kwargs["save"] +- else: +- filename = None +- out = __salt__["iptables.save"](filename, family=family) +- ret["comment"] = ( +- "Set and saved iptables rule {} for {}\n" "{}\n{}" +- ).format(name, family, command.strip(), out) ++ if "save" in kwargs and kwargs["save"]: ++ if kwargs["save"] is not True: ++ filename = kwargs["save"] ++ else: ++ filename = None ++ out = __salt__["iptables.save"](filename=filename, family=family) ++ ret["comment"] = ( ++ "Set and saved iptables rule {} for {}\n" "{}\n{}" ++ ).format(name, family, command.strip(), out) + return ret + else: + ret["result"] = False +@@ -527,7 +528,7 @@ def insert(name, table="filter", family="ipv4", **kwargs): + if save: + if save_file is True: + save_file = None +- __salt__["iptables.save"](save_file, family=family) ++ __salt__["iptables.save"](filename=save_file, family=family) + if not ret["changes"]["locale"]: + del ret["changes"]["locale"] + ret["comment"] = "\n".join(comments) +@@ -552,7 +553,9 @@ def insert(name, table="filter", family="ipv4", **kwargs): + filename = kwargs["save"] + else: + filename = None +- saved_rules = __salt__["iptables.get_saved_rules"](family=family) ++ saved_rules = __salt__["iptables.get_saved_rules"]( ++ conf_file=filename, family=family ++ ) + _rules = __salt__["iptables.get_rules"](family=family) + __rules = [] + for table in _rules: +@@ -564,7 +567,7 @@ def insert(name, table="filter", family="ipv4", **kwargs): + __saved_rules.append(saved_rules[table][chain].get("rules")) + # Only save if rules in memory are different than saved rules + if __rules != __saved_rules: +- out = __salt__["iptables.save"](filename, family=family) ++ out = __salt__["iptables.save"](filename=filename, family=family) + ret["comment"] += ("\nSaved iptables rule {} for {}\n" "{}\n{}").format( + name, family, command.strip(), out + ) +@@ -582,12 +585,15 @@ def insert(name, table="filter", family="ipv4", **kwargs): + ret["comment"] = "Set iptables rule for {} to: {} for {}".format( + name, command.strip(), family + ) +- if "save" in kwargs: +- if kwargs["save"]: +- out = __salt__["iptables.save"](filename=None, family=family) +- ret["comment"] = ( +- "Set and saved iptables rule {} for {}\n" "{}\n{}" +- ).format(name, family, command.strip(), out) ++ if "save" in kwargs and kwargs["save"]: ++ if kwargs["save"] is not True: ++ filename = kwargs["save"] ++ else: ++ filename = None ++ out = __salt__["iptables.save"](filename=filename, family=family) ++ ret["comment"] = ( ++ "Set and saved iptables rule {} for {}\n" "{}\n{}" ++ ).format(name, family, command.strip(), out) + return ret + else: + ret["result"] = False +@@ -646,7 +652,7 @@ def delete(name, table="filter", family="ipv4", **kwargs): + if save: + if save_file is True: + save_file = None +- __salt__["iptables.save"](save_file, family=family) ++ __salt__["iptables.save"](filename=save_file, family=family) + if not ret["changes"]["locale"]: + del ret["changes"]["locale"] + ret["comment"] = "\n".join(comments) +@@ -688,12 +694,15 @@ def delete(name, table="filter", family="ipv4", **kwargs): + ret["changes"] = {"locale": name} + ret["result"] = True + ret["comment"] = "Delete iptables rule for {} {}".format(name, command.strip()) +- if "save" in kwargs: +- if kwargs["save"]: +- out = __salt__["iptables.save"](filename=None, family=family) +- ret["comment"] = ( +- "Deleted and saved iptables rule {} for {}\n" "{}\n{}" +- ).format(name, family, command.strip(), out) ++ if "save" in kwargs and kwargs["save"]: ++ if kwargs["save"] is not True: ++ filename = kwargs["save"] ++ else: ++ filename = None ++ out = __salt__["iptables.save"](filename=filename, family=family) ++ ret["comment"] = ( ++ "Deleted and saved iptables rule {} for {}\n" "{}\n{}" ++ ).format(name, family, command.strip(), out) + return ret + else: + ret["result"] = False +@@ -751,14 +760,17 @@ def set_policy(name, table="filter", family="ipv4", **kwargs): + ret["comment"] = "Set default policy for {} to {} family {}".format( + kwargs["chain"], kwargs["policy"], family + ) +- if "save" in kwargs: +- if kwargs["save"]: +- __salt__["iptables.save"](filename=None, family=family) +- ret[ +- "comment" +- ] = "Set and saved default policy for {} to {} family {}".format( +- kwargs["chain"], kwargs["policy"], family +- ) ++ if "save" in kwargs and kwargs["save"]: ++ if kwargs["save"] is not True: ++ filename = kwargs["save"] ++ else: ++ filename = None ++ __salt__["iptables.save"](filename=filename, family=family) ++ ret[ ++ "comment" ++ ] = "Set and saved default policy for {} to {} family {}".format( ++ kwargs["chain"], kwargs["policy"], family ++ ) + return ret + else: + ret["result"] = False +diff --git a/tests/unit/states/test_iptables.py b/tests/unit/states/test_iptables.py +index c49022c962..975ae49c3e 100644 +--- a/tests/unit/states/test_iptables.py ++++ b/tests/unit/states/test_iptables.py +@@ -135,7 +135,7 @@ class IptablesTestCase(TestCase, LoaderModuleMockMixin): + with patch.object(iptables, "_STATE_INTERNAL_KEYWORDS", mock): + mock = MagicMock(return_value="a") + with patch.dict(iptables.__salt__, {"iptables.build_rule": mock}): +- mock = MagicMock(side_effect=[True, False, False, False]) ++ mock = MagicMock(side_effect=[True, False, False, False, False, True]) + with patch.dict(iptables.__salt__, {"iptables.check": mock}): + ret.update( + { +@@ -161,7 +161,7 @@ class IptablesTestCase(TestCase, LoaderModuleMockMixin): + ) + + with patch.dict(iptables.__opts__, {"test": False}): +- mock = MagicMock(side_effect=[True, False]) ++ mock = MagicMock(side_effect=[True, False, True, True]) + with patch.dict(iptables.__salt__, {"iptables.append": mock}): + ret.update( + { +@@ -188,6 +188,65 @@ class IptablesTestCase(TestCase, LoaderModuleMockMixin): + iptables.append("salt", table="", chain=""), ret + ) + ++ mock_save = MagicMock( ++ side_effect=['Wrote 1 lines to "/tmp/iptables"', ""] ++ ) ++ with patch.dict( ++ iptables.__salt__, {"iptables.save": mock_save} ++ ): ++ mock_get_saved_rules = MagicMock(side_effect=[""]) ++ with patch.dict( ++ iptables.__salt__, ++ {"iptables.get_saved_rules": mock_get_saved_rules}, ++ ): ++ mock = MagicMock(side_effect=[""]) ++ with patch.dict( ++ iptables.__salt__, {"iptables.get_rules": mock} ++ ): ++ ret.update( ++ { ++ "changes": {"locale": "salt"}, ++ "result": True, ++ "comment": "Set and saved iptables rule" ++ ' salt for ipv4\na\nWrote 1 lines to "/tmp/iptables"', ++ } ++ ) ++ self.assertDictEqual( ++ iptables.append( ++ "salt", ++ table="", ++ chain="", ++ save="/tmp/iptables", ++ ), ++ ret, ++ ) ++ ret.update( ++ { ++ "changes": {}, ++ "result": True, ++ "comment": "iptables rule for salt already set (a) for ipv4", ++ } ++ ) ++ self.assertDictEqual( ++ iptables.append( ++ "salt", ++ table="", ++ chain="", ++ save="/tmp/iptables", ++ ), ++ ret, ++ ) ++ self.assertEqual( ++ mock_get_saved_rules.mock_calls[0][2][ ++ "conf_file" ++ ], ++ "/tmp/iptables", ++ ) ++ self.assertEqual( ++ mock_save.mock_calls[0][2]["filename"], ++ "/tmp/iptables", ++ ) ++ + def test_insert(self): + """ + Test to insert a rule into a chain +@@ -200,7 +259,7 @@ class IptablesTestCase(TestCase, LoaderModuleMockMixin): + with patch.object(iptables, "_STATE_INTERNAL_KEYWORDS", mock): + mock = MagicMock(return_value="a") + with patch.dict(iptables.__salt__, {"iptables.build_rule": mock}): +- mock = MagicMock(side_effect=[True, False, False, False]) ++ mock = MagicMock(side_effect=[True, False, False, False, False, True]) + with patch.dict(iptables.__salt__, {"iptables.check": mock}): + ret.update( + { +@@ -226,7 +285,7 @@ class IptablesTestCase(TestCase, LoaderModuleMockMixin): + ) + + with patch.dict(iptables.__opts__, {"test": False}): +- mock = MagicMock(side_effect=[False, True]) ++ mock = MagicMock(side_effect=[False, True, False, True]) + with patch.dict(iptables.__salt__, {"iptables.insert": mock}): + ret.update( + { +@@ -258,6 +317,67 @@ class IptablesTestCase(TestCase, LoaderModuleMockMixin): + ret, + ) + ++ mock_save = MagicMock( ++ side_effect=['Wrote 1 lines to "/tmp/iptables"', ""] ++ ) ++ with patch.dict( ++ iptables.__salt__, {"iptables.save": mock_save} ++ ): ++ mock_get_saved_rules = MagicMock(side_effect=[""]) ++ with patch.dict( ++ iptables.__salt__, ++ {"iptables.get_saved_rules": mock_get_saved_rules}, ++ ): ++ mock = MagicMock(side_effect=[""]) ++ with patch.dict( ++ iptables.__salt__, {"iptables.get_rules": mock} ++ ): ++ ret.update( ++ { ++ "changes": {"locale": "salt"}, ++ "result": True, ++ "comment": "Set and saved iptables rule" ++ ' salt for ipv4\na\nWrote 1 lines to "/tmp/iptables"', ++ } ++ ) ++ self.assertDictEqual( ++ iptables.insert( ++ "salt", ++ table="", ++ chain="", ++ position="", ++ save="/tmp/iptables", ++ ), ++ ret, ++ ) ++ ret.update( ++ { ++ "changes": {}, ++ "result": True, ++ "comment": "iptables rule for salt already set for ipv4 (a)", ++ } ++ ) ++ self.assertDictEqual( ++ iptables.insert( ++ "salt", ++ table="", ++ chain="", ++ position="", ++ save="/tmp/iptables", ++ ), ++ ret, ++ ) ++ self.assertEqual( ++ mock_get_saved_rules.mock_calls[0][2][ ++ "conf_file" ++ ], ++ "/tmp/iptables", ++ ) ++ self.assertEqual( ++ mock_save.mock_calls[0][2]["filename"], ++ "/tmp/iptables", ++ ) ++ + def test_delete(self): + """ + Test to delete a rule to a chain +@@ -270,7 +390,7 @@ class IptablesTestCase(TestCase, LoaderModuleMockMixin): + with patch.object(iptables, "_STATE_INTERNAL_KEYWORDS", mock): + mock = MagicMock(return_value="a") + with patch.dict(iptables.__salt__, {"iptables.build_rule": mock}): +- mock = MagicMock(side_effect=[False, True, True, True]) ++ mock = MagicMock(side_effect=[False, True, True, True, True, False]) + with patch.dict(iptables.__salt__, {"iptables.check": mock}): + ret.update( + { +@@ -296,7 +416,7 @@ class IptablesTestCase(TestCase, LoaderModuleMockMixin): + ) + + with patch.dict(iptables.__opts__, {"test": False}): +- mock = MagicMock(side_effect=[False, True]) ++ mock = MagicMock(side_effect=[False, True, False, False]) + with patch.dict(iptables.__salt__, {"iptables.delete": mock}): + ret.update( + { +@@ -327,6 +447,58 @@ class IptablesTestCase(TestCase, LoaderModuleMockMixin): + ret, + ) + ++ mock_save = MagicMock( ++ side_effect=['Wrote 1 lines to "/tmp/iptables"', ""] ++ ) ++ with patch.dict( ++ iptables.__salt__, {"iptables.save": mock_save} ++ ): ++ mock = MagicMock(side_effect=[True, False]) ++ with patch.dict( ++ iptables.__salt__, {"iptables.check": mock} ++ ): ++ mock = MagicMock(side_effect=[""]) ++ with patch.dict( ++ iptables.__salt__, {"iptables.get_rules": mock} ++ ): ++ ret.update( ++ { ++ "changes": {"locale": "salt"}, ++ "result": True, ++ "comment": "Deleted and saved iptables rule" ++ ' salt for ipv4\na\nWrote 1 lines to "/tmp/iptables"', ++ } ++ ) ++ self.assertDictEqual( ++ iptables.delete( ++ "salt", ++ table="", ++ chain="", ++ save="/tmp/iptables", ++ ), ++ ret, ++ ) ++ ret.update( ++ { ++ "changes": {}, ++ "result": True, ++ "comment": "iptables rule for salt already absent for ipv4 (a)", ++ } ++ ) ++ self.assertDictEqual( ++ iptables.delete( ++ "salt", ++ table="", ++ chain="", ++ save="/tmp/iptables", ++ ), ++ ret, ++ ) ++ self.assertEqual( ++ mock_save.mock_calls[0][2]["filename"], ++ "/tmp/iptables", ++ ) ++ + def test_set_policy(self): + """ + Test to sets the default policy for iptables firewall tables +-- +2.32.0 + + diff --git a/grains.extra-support-old-non-intel-kernels-bsc-11806.patch b/grains.extra-support-old-non-intel-kernels-bsc-11806.patch new file mode 100644 index 0000000..6000526 --- /dev/null +++ b/grains.extra-support-old-non-intel-kernels-bsc-11806.patch @@ -0,0 +1,51 @@ +From 27c7a9f62b1a589365785c9428293653ac76fee3 Mon Sep 17 00:00:00 2001 +From: Alberto Planas +Date: Mon, 10 May 2021 16:26:02 +0200 +Subject: [PATCH] grains.extra: support old non-intel kernels + (bsc#1180650) (#368) + +--- + salt/grains/extra.py | 16 ++++++++++++---- + 1 file changed, 12 insertions(+), 4 deletions(-) + +diff --git a/salt/grains/extra.py b/salt/grains/extra.py +index 7729a5c0a5..f2abd1281c 100644 +--- a/salt/grains/extra.py ++++ b/salt/grains/extra.py +@@ -71,10 +71,10 @@ def suse_backported_capabilities(): + } + + +-def __secure_boot(): ++def __secure_boot(efivars_dir): + """Detect if secure-boot is enabled.""" + enabled = False +- sboot = glob.glob("/sys/firmware/efi/vars/SecureBoot-*/data") ++ sboot = glob.glob(os.path.join(efivars_dir, "SecureBoot-*/data")) + if len(sboot) == 1: + # The minion is usually running as a privileged user, but is + # not the case for the master. Seems that the master can also +@@ -89,9 +89,17 @@ def __secure_boot(): + + def uefi(): + """Populate UEFI grains.""" ++ efivars_dir = next( ++ iter( ++ filter( ++ os.path.exists, ["/sys/firmware/efi/efivars", "/sys/firmware/efi/vars"] ++ ) ++ ), ++ None, ++ ) + grains = { +- "efi": os.path.exists("/sys/firmware/efi/systab"), +- "efi-secure-boot": __secure_boot(), ++ "efi": bool(efivars_dir), ++ "efi-secure-boot": __secure_boot(efivars_dir) if efivars_dir else False, + } + + return grains +-- +2.31.1 + + diff --git a/handle-master-tops-data-when-states-are-applied-by-t.patch b/handle-master-tops-data-when-states-are-applied-by-t.patch new file mode 100644 index 0000000..da3bb96 --- /dev/null +++ b/handle-master-tops-data-when-states-are-applied-by-t.patch @@ -0,0 +1,109 @@ +From e0b7511e30da289b4100aa156b67b652681afc03 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Thu, 8 Jul 2021 08:57:13 +0100 +Subject: [PATCH] Handle "master tops" data when states are applied by + "transactional_update" (bsc#1187787) (#398) + +* Handle master tops data when states are applied by transactional_update (bsc#1187787) + +* Fix unit tests for transactional_update module +--- + salt/modules/transactional_update.py | 9 +++++++-- + .../unit/modules/test_transactional_update.py | 20 +++++++++---------- + 2 files changed, 17 insertions(+), 12 deletions(-) + +diff --git a/salt/modules/transactional_update.py b/salt/modules/transactional_update.py +index 7bbdb697b8..9cdaddb91a 100644 +--- a/salt/modules/transactional_update.py ++++ b/salt/modules/transactional_update.py +@@ -301,6 +301,11 @@ def __virtual__(): + return (False, "Module transactional_update requires a transactional system") + + ++class TransactionalUpdateHighstate(salt.client.ssh.state.SSHHighState): ++ def _master_tops(self): ++ return self.client.master_tops() ++ ++ + def _global_params(self_update, snapshot=None, quiet=False): + """Utility function to prepare common global parameters.""" + params = ["--non-interactive", "--drop-if-no-change"] +@@ -1107,7 +1112,7 @@ def sls( + # Clone the options data and apply some default values. May not be + # needed, as this module just delegate + opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) +- st_ = salt.client.ssh.state.SSHHighState( ++ st_ = TransactionalUpdateHighstate( + opts, pillar, __salt__, salt.fileclient.get_file_client(__opts__) + ) + +@@ -1180,7 +1185,7 @@ def highstate(activate_transaction=False, **kwargs): + # Clone the options data and apply some default values. May not be + # needed, as this module just delegate + opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) +- st_ = salt.client.ssh.state.SSHHighState( ++ st_ = TransactionalUpdateHighstate( + opts, pillar, __salt__, salt.fileclient.get_file_client(__opts__) + ) + +diff --git a/tests/unit/modules/test_transactional_update.py b/tests/unit/modules/test_transactional_update.py +index 19e477d02f..2d30f296d7 100644 +--- a/tests/unit/modules/test_transactional_update.py ++++ b/tests/unit/modules/test_transactional_update.py +@@ -622,22 +622,22 @@ class TransactionalUpdateTestCase(TestCase, LoaderModuleMockMixin): + utils_mock["files.rm_rf"].assert_called_once() + + @patch("salt.modules.transactional_update._create_and_execute_salt_state") +- @patch("salt.client.ssh.state.SSHHighState") ++ @patch("salt.modules.transactional_update.TransactionalUpdateHighstate") + @patch("salt.fileclient.get_file_client") + @patch("salt.utils.state.get_sls_opts") + def test_sls( + self, + get_sls_opts, + get_file_client, +- SSHHighState, ++ TransactionalUpdateHighstate, + _create_and_execute_salt_state, + ): + """Test transactional_update.sls""" +- SSHHighState.return_value = SSHHighState +- SSHHighState.render_highstate.return_value = (None, []) +- SSHHighState.state.reconcile_extend.return_value = (None, []) +- SSHHighState.state.requisite_in.return_value = (None, []) +- SSHHighState.state.verify_high.return_value = [] ++ TransactionalUpdateHighstate.return_value = TransactionalUpdateHighstate ++ TransactionalUpdateHighstate.render_highstate.return_value = (None, []) ++ TransactionalUpdateHighstate.state.reconcile_extend.return_value = (None, []) ++ TransactionalUpdateHighstate.state.requisite_in.return_value = (None, []) ++ TransactionalUpdateHighstate.state.verify_high.return_value = [] + + _create_and_execute_salt_state.return_value = "result" + opts_mock = { +@@ -649,18 +649,18 @@ class TransactionalUpdateTestCase(TestCase, LoaderModuleMockMixin): + _create_and_execute_salt_state.assert_called_once() + + @patch("salt.modules.transactional_update._create_and_execute_salt_state") +- @patch("salt.client.ssh.state.SSHHighState") ++ @patch("salt.modules.transactional_update.TransactionalUpdateHighstate") + @patch("salt.fileclient.get_file_client") + @patch("salt.utils.state.get_sls_opts") + def test_highstate( + self, + get_sls_opts, + get_file_client, +- SSHHighState, ++ TransactionalUpdateHighstate, + _create_and_execute_salt_state, + ): + """Test transactional_update.highstage""" +- SSHHighState.return_value = SSHHighState ++ TransactionalUpdateHighstate.return_value = TransactionalUpdateHighstate + + _create_and_execute_salt_state.return_value = "result" + opts_mock = { +-- +2.32.0 + + diff --git a/handle-volumes-on-stopped-pools-in-virt.vm_info-373.patch b/handle-volumes-on-stopped-pools-in-virt.vm_info-373.patch new file mode 100644 index 0000000..6a1081c --- /dev/null +++ b/handle-volumes-on-stopped-pools-in-virt.vm_info-373.patch @@ -0,0 +1,152 @@ +From b154f0a17c85c2fe0b85226dfeb3919bd833a85c Mon Sep 17 00:00:00 2001 +From: Cedric Bosdonnat +Date: Fri, 21 May 2021 13:04:46 +0200 +Subject: [PATCH] Handle volumes on stopped pools in virt.vm_info + (#373) + +For VMs having at least a disk on a stopped volume, we don't want the +user to get an exception when running virt.vm_info. Instead just provide +less information. +--- + changelog/60132.fixed | 1 + + salt/modules/virt.py | 73 +++++++++++-------- + .../pytests/unit/modules/virt/test_domain.py | 9 ++- + 3 files changed, 50 insertions(+), 33 deletions(-) + create mode 100644 changelog/60132.fixed + +diff --git a/changelog/60132.fixed b/changelog/60132.fixed +new file mode 100644 +index 0000000000..1e3bc96b98 +--- /dev/null ++++ b/changelog/60132.fixed +@@ -0,0 +1 @@ ++Gracefuly handle errors in virt.vm_info +diff --git a/salt/modules/virt.py b/salt/modules/virt.py +index 6409089109..d8a8c51ce5 100644 +--- a/salt/modules/virt.py ++++ b/salt/modules/virt.py +@@ -515,41 +515,50 @@ def _get_disks(conn, dom): + def _get_disk_volume_data(pool_name, volume_name): + qemu_target = "{}/{}".format(pool_name, volume_name) + pool = conn.storagePoolLookupByName(pool_name) +- vol = pool.storageVolLookupByName(volume_name) +- vol_info = vol.info() +- extra_properties = { +- "virtual size": vol_info[1], +- "disk size": vol_info[2], +- } +- +- backing_files = [ +- { +- "file": node.find("source").get("file"), +- "file format": node.find("format").get("type"), ++ extra_properties = {} ++ try: ++ vol = pool.storageVolLookupByName(volume_name) ++ vol_info = vol.info() ++ extra_properties = { ++ "virtual size": vol_info[1], ++ "disk size": vol_info[2], + } +- for node in elem.findall(".//backingStore[source]") +- ] + +- if backing_files: +- # We had the backing files in a flat list, nest them again. +- extra_properties["backing file"] = backing_files[0] +- parent = extra_properties["backing file"] +- for sub_backing_file in backing_files[1:]: +- parent["backing file"] = sub_backing_file +- parent = sub_backing_file ++ backing_files = [ ++ { ++ "file": node.find("source").get("file"), ++ "file format": node.find("format").get("type"), ++ } ++ for node in elem.findall(".//backingStore[source]") ++ ] + +- else: +- # In some cases the backing chain is not displayed by the domain definition +- # Try to see if we have some of it in the volume definition. +- vol_desc = ElementTree.fromstring(vol.XMLDesc()) +- backing_path = vol_desc.find("./backingStore/path") +- backing_format = vol_desc.find("./backingStore/format") +- if backing_path is not None: +- extra_properties["backing file"] = {"file": backing_path.text} +- if backing_format is not None: +- extra_properties["backing file"][ +- "file format" +- ] = backing_format.get("type") ++ if backing_files: ++ # We had the backing files in a flat list, nest them again. ++ extra_properties["backing file"] = backing_files[0] ++ parent = extra_properties["backing file"] ++ for sub_backing_file in backing_files[1:]: ++ parent["backing file"] = sub_backing_file ++ parent = sub_backing_file ++ ++ else: ++ # In some cases the backing chain is not displayed by the domain definition ++ # Try to see if we have some of it in the volume definition. ++ vol_desc = ElementTree.fromstring(vol.XMLDesc()) ++ backing_path = vol_desc.find("./backingStore/path") ++ backing_format = vol_desc.find("./backingStore/format") ++ if backing_path is not None: ++ extra_properties["backing file"] = { ++ "file": backing_path.text ++ } ++ if backing_format is not None: ++ extra_properties["backing file"][ ++ "file format" ++ ] = backing_format.get("type") ++ except libvirt.libvirtError: ++ # The volume won't be found if the pool is not started, just output less infos ++ log.info( ++ "Couldn't extract all volume informations: pool is likely not running or refreshed" ++ ) + return (qemu_target, extra_properties) + + if disk_type == "file": +diff --git a/tests/pytests/unit/modules/virt/test_domain.py b/tests/pytests/unit/modules/virt/test_domain.py +index 76433eaef4..a9453e4a66 100644 +--- a/tests/pytests/unit/modules/virt/test_domain.py ++++ b/tests/pytests/unit/modules/virt/test_domain.py +@@ -192,6 +192,11 @@ def test_get_disks(make_mock_vm, make_mock_storage_pool): + +
+ ++ ++ ++ ++ ++ + + + +@@ -205,11 +210,12 @@ def test_get_disks(make_mock_vm, make_mock_storage_pool): + + + """ +- domain_mock = make_mock_vm(vm_def) ++ make_mock_vm(vm_def) + + pool_mock = make_mock_storage_pool( + "default", "dir", ["srv01_system", "srv01_data", "vm05_system"] + ) ++ make_mock_storage_pool("stopped", "dir", []) + + # Append backing store to srv01_data volume XML description + srv1data_mock = pool_mock.storageVolLookupByName("srv01_data") +@@ -256,6 +262,7 @@ def test_get_disks(make_mock_vm, make_mock_storage_pool): + }, + }, + }, ++ "vdd": {"type": "disk", "file": "stopped/vm05_data", "file format": "qcow2"}, + "hda": { + "type": "cdrom", + "file format": "raw", +-- +2.31.1 + + diff --git a/implementation-of-held-unheld-functions-for-state-pk.patch b/implementation-of-held-unheld-functions-for-state-pk.patch new file mode 100644 index 0000000..f8d08a0 --- /dev/null +++ b/implementation-of-held-unheld-functions-for-state-pk.patch @@ -0,0 +1,903 @@ +From 2ee360753c8fa937d9c81bf7da24f457041650bc Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Mon, 5 Jul 2021 18:39:26 +0300 +Subject: [PATCH] Implementation of held/unheld functions for state pkg + (#387) + +* Implementation of held/unheld functions for state pkg +--- + salt/modules/zypperpkg.py | 201 +++++++++--- + salt/states/pkg.py | 310 +++++++++++++++++++ + tests/pytests/unit/modules/test_zypperpkg.py | 142 +++++++++ + tests/pytests/unit/states/test_pkg.py | 155 ++++++++++ + 4 files changed, 760 insertions(+), 48 deletions(-) + create mode 100644 tests/pytests/unit/modules/test_zypperpkg.py + create mode 100644 tests/pytests/unit/states/test_pkg.py + +diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py +index e064e2cb4e..932b30bac5 100644 +--- a/salt/modules/zypperpkg.py ++++ b/salt/modules/zypperpkg.py +@@ -2071,6 +2071,76 @@ def purge( + return _uninstall(inclusion_detection, name=name, pkgs=pkgs, root=root) + + ++def list_holds(pattern=None, full=True, root=None, **kwargs): ++ """ ++ List information on locked packages. ++ ++ .. note:: ++ This function returns the computed output of ``list_locks`` ++ to show exact locked packages. ++ ++ pattern ++ Regular expression used to match the package name ++ ++ full : True ++ Show the full hold definition including version and epoch. Set to ++ ``False`` to return just the name of the package(s) being held. ++ ++ root ++ Operate on a different root directory. ++ ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' pkg.list_holds ++ salt '*' pkg.list_holds full=False ++ """ ++ locks = list_locks(root=root) ++ ret = [] ++ inst_pkgs = {} ++ for solv_name, lock in locks.items(): ++ if lock.get("type", "package") != "package": ++ continue ++ try: ++ found_pkgs = search( ++ solv_name, ++ root=root, ++ match=None if "*" in solv_name else "exact", ++ case_sensitive=(lock.get("case_sensitive", "on") == "on"), ++ installed_only=True, ++ details=True, ++ all_versions=True, ++ ignore_no_matching_item=True, ++ ) ++ except CommandExecutionError: ++ continue ++ if found_pkgs: ++ for pkg in found_pkgs: ++ if pkg not in inst_pkgs: ++ inst_pkgs.update( ++ info_installed( ++ pkg, root=root, attr="edition,epoch", all_versions=True ++ ) ++ ) ++ ++ ptrn_re = re.compile(r"{}-\S+".format(pattern)) if pattern else None ++ for pkg_name, pkg_editions in inst_pkgs.items(): ++ for pkg_info in pkg_editions: ++ pkg_ret = ( ++ "{}-{}:{}.*".format( ++ pkg_name, pkg_info.get("epoch", 0), pkg_info.get("edition") ++ ) ++ if full ++ else pkg_name ++ ) ++ if pkg_ret not in ret and (not ptrn_re or ptrn_re.match(pkg_ret)): ++ ret.append(pkg_ret) ++ ++ return ret ++ ++ + def list_locks(root=None): + """ + List current package locks. +@@ -2141,43 +2211,68 @@ def clean_locks(root=None): + return out + + +-def unhold(name=None, pkgs=None, **kwargs): ++def unhold(name=None, pkgs=None, root=None, **kwargs): + """ +- Remove specified package lock. ++ Remove a package hold. ++ ++ name ++ A package name to unhold, or a comma-separated list of package names to ++ unhold. ++ ++ pkgs ++ A list of packages to unhold. The ``name`` parameter will be ignored if ++ this option is passed. + + root +- operate on a different root directory. ++ Operate on a different root directory. + + CLI Example: + + .. code-block:: bash + +- salt '*' pkg.remove_lock +- salt '*' pkg.remove_lock ,, +- salt '*' pkg.remove_lock pkgs='["foo", "bar"]' ++ salt '*' pkg.unhold ++ salt '*' pkg.unhold ,, ++ salt '*' pkg.unhold pkgs='["foo", "bar"]' + """ + ret = {} +- root = kwargs.get("root") +- if (not name and not pkgs) or (name and pkgs): ++ if not name and not pkgs: + raise CommandExecutionError("Name or packages must be specified.") +- elif name: +- pkgs = [name] + +- locks = list_locks(root) +- try: +- pkgs = list(__salt__["pkg_resource.parse_targets"](pkgs)[0].keys()) +- except MinionError as exc: +- raise CommandExecutionError(exc) ++ targets = [] ++ if pkgs: ++ targets.extend(pkgs) ++ else: ++ targets.append(name) + ++ locks = list_locks() + removed = [] +- missing = [] +- for pkg in pkgs: +- if locks.get(pkg): +- removed.append(pkg) +- ret[pkg]["comment"] = "Package {} is no longer held.".format(pkg) ++ ++ for target in targets: ++ version = None ++ if isinstance(target, dict): ++ (target, version) = next(iter(target.items())) ++ ret[target] = {"name": target, "changes": {}, "result": True, "comment": ""} ++ if locks.get(target): ++ lock_ver = None ++ if "version" in locks.get(target): ++ lock_ver = locks.get(target)["version"] ++ lock_ver = lock_ver.lstrip("= ") ++ if version and lock_ver != version: ++ ret[target]["result"] = False ++ ret[target][ ++ "comment" ++ ] = "Unable to unhold package {} as it is held with the other version.".format( ++ target ++ ) ++ else: ++ removed.append( ++ target if not lock_ver else "{}={}".format(target, lock_ver) ++ ) ++ ret[target]["changes"]["new"] = "" ++ ret[target]["changes"]["old"] = "hold" ++ ret[target]["comment"] = "Package {} is no longer held.".format(target) + else: +- missing.append(pkg) +- ret[pkg]["comment"] = "Package {} unable to be unheld.".format(pkg) ++ ret[target]["comment"] = "Package {} was already unheld.".format(target) + + if removed: + __zypper__(root=root).call("rl", *removed) +@@ -2223,47 +2318,57 @@ def remove_lock(packages, root=None, **kwargs): # pylint: disable=unused-argume + return {"removed": len(removed), "not_found": missing} + + +-def hold(name=None, pkgs=None, **kwargs): ++def hold(name=None, pkgs=None, root=None, **kwargs): + """ +- Add a package lock. Specify packages to lock by exact name. ++ Add a package hold. Specify one of ``name`` and ``pkgs``. ++ ++ name ++ A package name to hold, or a comma-separated list of package names to ++ hold. ++ ++ pkgs ++ A list of packages to hold. The ``name`` parameter will be ignored if ++ this option is passed. + + root +- operate on a different root directory. ++ Operate on a different root directory. ++ + + CLI Example: + + .. code-block:: bash + +- salt '*' pkg.add_lock +- salt '*' pkg.add_lock ,, +- salt '*' pkg.add_lock pkgs='["foo", "bar"]' +- +- :param name: +- :param pkgs: +- :param kwargs: +- :return: ++ salt '*' pkg.hold ++ salt '*' pkg.hold ,, ++ salt '*' pkg.hold pkgs='["foo", "bar"]' + """ + ret = {} +- root = kwargs.get("root") +- if (not name and not pkgs) or (name and pkgs): ++ if not name and not pkgs: + raise CommandExecutionError("Name or packages must be specified.") +- elif name: +- pkgs = [name] + +- locks = list_locks(root=root) ++ targets = [] ++ if pkgs: ++ targets.extend(pkgs) ++ else: ++ targets.append(name) ++ ++ locks = list_locks() + added = [] +- try: +- pkgs = list(__salt__["pkg_resource.parse_targets"](pkgs)[0].keys()) +- except MinionError as exc: +- raise CommandExecutionError(exc) + +- for pkg in pkgs: +- ret[pkg] = {"name": pkg, "changes": {}, "result": False, "comment": ""} +- if not locks.get(pkg): +- added.append(pkg) +- ret[pkg]["comment"] = "Package {} is now being held.".format(pkg) ++ for target in targets: ++ version = None ++ if isinstance(target, dict): ++ (target, version) = next(iter(target.items())) ++ ret[target] = {"name": target, "changes": {}, "result": True, "comment": ""} ++ if not locks.get(target): ++ added.append(target if not version else "{}={}".format(target, version)) ++ ret[target]["changes"]["new"] = "hold" ++ ret[target]["changes"]["old"] = "" ++ ret[target]["comment"] = "Package {} is now being held.".format(target) + else: +- ret[pkg]["comment"] = "Package {} is already set to be held.".format(pkg) ++ ret[target]["comment"] = "Package {} is already set to be held.".format( ++ target ++ ) + + if added: + __zypper__(root=root).call("al", *added) +diff --git a/salt/states/pkg.py b/salt/states/pkg.py +index f7327a33e3..0ef3f056c5 100644 +--- a/salt/states/pkg.py ++++ b/salt/states/pkg.py +@@ -3550,3 +3550,313 @@ def mod_watch(name, **kwargs): + "comment": "pkg.{} does not work with the watch requisite".format(sfun), + "result": False, + } ++ ++ ++def held(name, version=None, pkgs=None, replace=False, **kwargs): ++ """ ++ Set package in 'hold' state, meaning it will not be changed. ++ ++ :param str name: ++ The name of the package to be held. This parameter is ignored ++ if ``pkgs`` is used. ++ ++ :param str version: ++ Hold a specific version of a package. ++ Full description of this parameter is in `installed` function. ++ ++ .. note:: ++ ++ This parameter make sense for Zypper-based systems. ++ Ignored for YUM/DNF and APT ++ ++ :param list pkgs: ++ A list of packages to be held. All packages listed under ``pkgs`` ++ will be held. ++ ++ .. code-block:: yaml ++ ++ mypkgs: ++ pkg.held: ++ - pkgs: ++ - foo ++ - bar: 1.2.3-4 ++ - baz ++ ++ .. note:: ++ ++ For Zypper-based systems the package could be held for ++ the version specified. YUM/DNF and APT ingore it. ++ ++ :param bool replace: ++ Force replacement of existings holds with specified. ++ By default, this parameter is set to ``False``. ++ """ ++ ++ if isinstance(pkgs, list) and len(pkgs) == 0 and not replace: ++ return { ++ "name": name, ++ "changes": {}, ++ "result": True, ++ "comment": "No packages to be held provided", ++ } ++ ++ # If just a name (and optionally a version) is passed, just pack them into ++ # the pkgs argument. ++ if name and pkgs is None: ++ if version: ++ pkgs = [{name: version}] ++ version = None ++ else: ++ pkgs = [name] ++ ++ locks = {} ++ vr_lock = False ++ if "pkg.list_locks" in __salt__: ++ locks = __salt__["pkg.list_locks"]() ++ vr_lock = True ++ elif "pkg.list_holds" in __salt__: ++ _locks = __salt__["pkg.list_holds"](full=True) ++ lock_re = re.compile(r"^(.+)-(\d+):(.*)\.\*") ++ for lock in _locks: ++ match = lock_re.match(lock) ++ if match: ++ epoch = match.group(2) ++ if epoch == "0": ++ epoch = "" ++ else: ++ epoch = "{}:".format(epoch) ++ locks.update( ++ {match.group(1): {"version": "{}{}".format(epoch, match.group(3))}} ++ ) ++ else: ++ locks.update({lock: {}}) ++ elif "pkg.get_selections" in __salt__: ++ _locks = __salt__["pkg.get_selections"](state="hold") ++ for lock in _locks.get("hold", []): ++ locks.update({lock: {}}) ++ else: ++ return { ++ "name": name, ++ "changes": {}, ++ "result": False, ++ "comment": "No any function to get the list of held packages available.\n" ++ "Check if the package manager supports package locking.", ++ } ++ ++ if "pkg.hold" not in __salt__: ++ return { ++ "name": name, ++ "changes": {}, ++ "result": False, ++ "comment": "`hold` function is not implemented for the package manager.", ++ } ++ ++ ret = {"name": name, "changes": {}, "result": True, "comment": ""} ++ comments = [] ++ ++ held_pkgs = set() ++ for pkg in pkgs: ++ if isinstance(pkg, dict): ++ (pkg_name, pkg_ver) = next(iter(pkg.items())) ++ else: ++ pkg_name = pkg ++ pkg_ver = None ++ lock_ver = None ++ if pkg_name in locks and "version" in locks[pkg_name]: ++ lock_ver = locks[pkg_name]["version"] ++ lock_ver = lock_ver.lstrip("= ") ++ held_pkgs.add(pkg_name) ++ if pkg_name not in locks or (vr_lock and lock_ver != pkg_ver): ++ if __opts__["test"]: ++ if pkg_name in locks: ++ comments.append( ++ "The following package's hold rule would be updated: {}{}".format( ++ pkg_name, ++ "" if not pkg_ver else " (version = {})".format(pkg_ver), ++ ) ++ ) ++ else: ++ comments.append( ++ "The following package would be held: {}{}".format( ++ pkg_name, ++ "" if not pkg_ver else " (version = {})".format(pkg_ver), ++ ) ++ ) ++ else: ++ unhold_ret = None ++ if pkg_name in locks: ++ unhold_ret = __salt__["pkg.unhold"](name=name, pkgs=[pkg_name]) ++ hold_ret = __salt__["pkg.hold"](name=name, pkgs=[pkg]) ++ if not hold_ret.get(pkg_name, {}).get("result", False): ++ ret["result"] = False ++ if ( ++ unhold_ret ++ and unhold_ret.get(pkg_name, {}).get("result", False) ++ and hold_ret ++ and hold_ret.get(pkg_name, {}).get("result", False) ++ ): ++ comments.append( ++ "Package {} was updated with hold rule".format(pkg_name) ++ ) ++ elif hold_ret and hold_ret.get(pkg_name, {}).get("result", False): ++ comments.append("Package {} is now being held".format(pkg_name)) ++ else: ++ comments.append("Package {} was not held".format(pkg_name)) ++ ret["changes"].update(hold_ret) ++ ++ if replace: ++ for pkg_name in locks: ++ if locks[pkg_name].get("type", "package") != "package": ++ continue ++ if __opts__["test"]: ++ if pkg_name not in held_pkgs: ++ comments.append( ++ "The following package would be unheld: {}".format(pkg_name) ++ ) ++ else: ++ if pkg_name not in held_pkgs: ++ unhold_ret = __salt__["pkg.unhold"](name=name, pkgs=[pkg_name]) ++ if not unhold_ret.get(pkg_name, {}).get("result", False): ++ ret["result"] = False ++ if unhold_ret and unhold_ret.get(pkg_name, {}).get("comment"): ++ comments.append(unhold_ret.get(pkg_name).get("comment")) ++ ret["changes"].update(unhold_ret) ++ ++ ret["comment"] = "\n".join(comments) ++ if not (ret["changes"] or ret["comment"]): ++ ret["comment"] = "No changes made" ++ ++ return ret ++ ++ ++def unheld(name, version=None, pkgs=None, all=False, **kwargs): ++ """ ++ Unset package from 'hold' state, to allow operations with the package. ++ ++ :param str name: ++ The name of the package to be unheld. This parameter is ignored if "pkgs" ++ is used. ++ ++ :param str version: ++ Unhold a specific version of a package. ++ Full description of this parameter is in `installed` function. ++ ++ .. note:: ++ ++ This parameter make sense for Zypper-based systems. ++ Ignored for YUM/DNF and APT. ++ ++ :param list pkgs: ++ A list of packages to be unheld. All packages listed under ``pkgs`` ++ will be unheld. ++ ++ .. code-block:: yaml ++ ++ mypkgs: ++ pkg.unheld: ++ - pkgs: ++ - foo ++ - bar: 1.2.3-4 ++ - baz ++ ++ .. note:: ++ ++ For Zypper-based systems the package could be held for ++ the version specified. YUM/DNF and APT ingore it. ++ For ``unheld`` there is no need to specify the exact version ++ to be unheld. ++ ++ :param bool all: ++ Force removing of all existings locks. ++ By default, this parameter is set to ``False``. ++ """ ++ ++ if isinstance(pkgs, list) and len(pkgs) == 0 and not all: ++ return { ++ "name": name, ++ "changes": {}, ++ "result": True, ++ "comment": "No packages to be unheld provided", ++ } ++ ++ # If just a name (and optionally a version) is passed, just pack them into ++ # the pkgs argument. ++ if name and pkgs is None: ++ pkgs = [{name: version}] ++ version = None ++ ++ locks = {} ++ vr_lock = False ++ if "pkg.list_locks" in __salt__: ++ locks = __salt__["pkg.list_locks"]() ++ vr_lock = True ++ elif "pkg.list_holds" in __salt__: ++ _locks = __salt__["pkg.list_holds"](full=True) ++ lock_re = re.compile(r"^(.+)-(\d+):(.*)\.\*") ++ for lock in _locks: ++ match = lock_re.match(lock) ++ if match: ++ epoch = match.group(2) ++ if epoch == "0": ++ epoch = "" ++ else: ++ epoch = "{}:".format(epoch) ++ locks.update( ++ {match.group(1): {"version": "{}{}".format(epoch, match.group(3))}} ++ ) ++ else: ++ locks.update({lock: {}}) ++ elif "pkg.get_selections" in __salt__: ++ _locks = __salt__["pkg.get_selections"](state="hold") ++ for lock in _locks.get("hold", []): ++ locks.update({lock: {}}) ++ else: ++ return { ++ "name": name, ++ "changes": {}, ++ "result": False, ++ "comment": "No any function to get the list of held packages available.\n" ++ "Check if the package manager supports package locking.", ++ } ++ ++ dpkgs = {} ++ for pkg in pkgs: ++ if isinstance(pkg, dict): ++ (pkg_name, pkg_ver) = next(iter(pkg.items())) ++ dpkgs.update({pkg_name: pkg_ver}) ++ else: ++ dpkgs.update({pkg: None}) ++ ++ ret = {"name": name, "changes": {}, "result": True, "comment": ""} ++ comments = [] ++ ++ for pkg_name in locks: ++ if locks[pkg_name].get("type", "package") != "package": ++ continue ++ lock_ver = None ++ if vr_lock and "version" in locks[pkg_name]: ++ lock_ver = locks[pkg_name]["version"] ++ lock_ver = lock_ver.lstrip("= ") ++ if all or (pkg_name in dpkgs and (not lock_ver or lock_ver == dpkgs[pkg_name])): ++ if __opts__["test"]: ++ comments.append( ++ "The following package would be unheld: {}{}".format( ++ pkg_name, ++ "" ++ if not dpkgs.get(pkg_name) ++ else " (version = {})".format(lock_ver), ++ ) ++ ) ++ else: ++ unhold_ret = __salt__["pkg.unhold"](name=name, pkgs=[pkg_name]) ++ if not unhold_ret.get(pkg_name, {}).get("result", False): ++ ret["result"] = False ++ if unhold_ret and unhold_ret.get(pkg_name, {}).get("comment"): ++ comments.append(unhold_ret.get(pkg_name).get("comment")) ++ ret["changes"].update(unhold_ret) ++ ++ ret["comment"] = "\n".join(comments) ++ if not (ret["changes"] or ret["comment"]): ++ ret["comment"] = "No changes made" ++ ++ return ret +diff --git a/tests/pytests/unit/modules/test_zypperpkg.py b/tests/pytests/unit/modules/test_zypperpkg.py +new file mode 100644 +index 0000000000..464fae1f47 +--- /dev/null ++++ b/tests/pytests/unit/modules/test_zypperpkg.py +@@ -0,0 +1,142 @@ ++import pytest ++import salt.modules.pkg_resource as pkg_resource ++import salt.modules.zypperpkg as zypper ++from tests.support.mock import MagicMock, patch ++ ++ ++@pytest.fixture ++def configure_loader_modules(): ++ return {zypper: {"rpm": None}, pkg_resource: {}} ++ ++ ++def test_pkg_hold(): ++ """ ++ Tests holding packages with Zypper ++ """ ++ ++ # Test openSUSE 15.3 ++ list_locks_mock = { ++ "bar": {"type": "package", "match_type": "glob", "case_sensitive": "on"}, ++ "minimal_base": { ++ "type": "pattern", ++ "match_type": "glob", ++ "case_sensitive": "on", ++ }, ++ "baz": {"type": "package", "match_type": "glob", "case_sensitive": "on"}, ++ } ++ ++ cmd = MagicMock( ++ return_value={ ++ "pid": 1234, ++ "retcode": 0, ++ "stdout": "Specified lock has been successfully added.", ++ "stderr": "", ++ } ++ ) ++ with patch.object( ++ zypper, "list_locks", MagicMock(return_value=list_locks_mock) ++ ), patch.dict(zypper.__salt__, {"cmd.run_all": cmd}): ++ ret = zypper.hold("foo") ++ assert ret["foo"]["changes"]["old"] == "" ++ assert ret["foo"]["changes"]["new"] == "hold" ++ assert ret["foo"]["comment"] == "Package foo is now being held." ++ cmd.assert_called_once_with( ++ ["zypper", "--non-interactive", "--no-refresh", "al", "foo"], ++ env={}, ++ output_loglevel="trace", ++ python_shell=False, ++ ) ++ cmd.reset_mock() ++ ret = zypper.hold(pkgs=["foo", "bar"]) ++ assert ret["foo"]["changes"]["old"] == "" ++ assert ret["foo"]["changes"]["new"] == "hold" ++ assert ret["foo"]["comment"] == "Package foo is now being held." ++ assert ret["bar"]["changes"] == {} ++ assert ret["bar"]["comment"] == "Package bar is already set to be held." ++ cmd.assert_called_once_with( ++ ["zypper", "--non-interactive", "--no-refresh", "al", "foo"], ++ env={}, ++ output_loglevel="trace", ++ python_shell=False, ++ ) ++ ++ ++def test_pkg_unhold(): ++ """ ++ Tests unholding packages with Zypper ++ """ ++ ++ # Test openSUSE 15.3 ++ list_locks_mock = { ++ "bar": {"type": "package", "match_type": "glob", "case_sensitive": "on"}, ++ "minimal_base": { ++ "type": "pattern", ++ "match_type": "glob", ++ "case_sensitive": "on", ++ }, ++ "baz": {"type": "package", "match_type": "glob", "case_sensitive": "on"}, ++ } ++ ++ cmd = MagicMock( ++ return_value={ ++ "pid": 1234, ++ "retcode": 0, ++ "stdout": "1 lock has been successfully removed.", ++ "stderr": "", ++ } ++ ) ++ with patch.object( ++ zypper, "list_locks", MagicMock(return_value=list_locks_mock) ++ ), patch.dict(zypper.__salt__, {"cmd.run_all": cmd}): ++ ret = zypper.unhold("foo") ++ assert ret["foo"]["comment"] == "Package foo was already unheld." ++ cmd.assert_not_called() ++ cmd.reset_mock() ++ ret = zypper.unhold(pkgs=["foo", "bar"]) ++ assert ret["foo"]["changes"] == {} ++ assert ret["foo"]["comment"] == "Package foo was already unheld." ++ assert ret["bar"]["changes"]["old"] == "hold" ++ assert ret["bar"]["changes"]["new"] == "" ++ assert ret["bar"]["comment"] == "Package bar is no longer held." ++ cmd.assert_called_once_with( ++ ["zypper", "--non-interactive", "--no-refresh", "rl", "bar"], ++ env={}, ++ output_loglevel="trace", ++ python_shell=False, ++ ) ++ ++ ++def test_pkg_list_holds(): ++ """ ++ Tests listing of calculated held packages with Zypper ++ """ ++ ++ # Test openSUSE 15.3 ++ list_locks_mock = { ++ "bar": {"type": "package", "match_type": "glob", "case_sensitive": "on"}, ++ "minimal_base": { ++ "type": "pattern", ++ "match_type": "glob", ++ "case_sensitive": "on", ++ }, ++ "baz": {"type": "package", "match_type": "glob", "case_sensitive": "on"}, ++ } ++ installed_pkgs = { ++ "foo": [{"edition": "1.2.3-1.1"}], ++ "bar": [{"edition": "2.3.4-2.1", "epoch": "2"}], ++ } ++ ++ def zypper_search_mock(name, *_args, **_kwargs): ++ if name in installed_pkgs: ++ return {name: installed_pkgs.get(name)} ++ ++ with patch.object( ++ zypper, "list_locks", MagicMock(return_value=list_locks_mock) ++ ), patch.object( ++ zypper, "search", MagicMock(side_effect=zypper_search_mock) ++ ), patch.object( ++ zypper, "info_installed", MagicMock(side_effect=zypper_search_mock) ++ ): ++ ret = zypper.list_holds() ++ assert len(ret) == 1 ++ assert "bar-2:2.3.4-2.1.*" in ret +diff --git a/tests/pytests/unit/states/test_pkg.py b/tests/pytests/unit/states/test_pkg.py +new file mode 100644 +index 0000000000..faf42c4681 +--- /dev/null ++++ b/tests/pytests/unit/states/test_pkg.py +@@ -0,0 +1,155 @@ ++import pytest ++import salt.states.pkg as pkg ++from tests.support.mock import MagicMock, patch ++ ++ ++@pytest.fixture ++def configure_loader_modules(): ++ return { ++ pkg: { ++ "__env__": "base", ++ "__salt__": {}, ++ "__grains__": {"os": "CentOS"}, ++ "__opts__": {"test": False, "cachedir": ""}, ++ "__instance_id__": "", ++ "__low__": {}, ++ "__utils__": {}, ++ }, ++ } ++ ++ ++@pytest.mark.parametrize( ++ "package_manager", [("Zypper"), ("YUM/DNF"), ("APT")], ++) ++def test_held_unheld(package_manager): ++ """ ++ Test pkg.held and pkg.unheld with Zypper, YUM/DNF and APT ++ """ ++ ++ if package_manager == "Zypper": ++ list_holds_func = "pkg.list_locks" ++ list_holds_mock = MagicMock( ++ return_value={ ++ "bar": { ++ "type": "package", ++ "match_type": "glob", ++ "case_sensitive": "on", ++ }, ++ "minimal_base": { ++ "type": "pattern", ++ "match_type": "glob", ++ "case_sensitive": "on", ++ }, ++ "baz": { ++ "type": "package", ++ "match_type": "glob", ++ "case_sensitive": "on", ++ }, ++ } ++ ) ++ elif package_manager == "YUM/DNF": ++ list_holds_func = "pkg.list_holds" ++ list_holds_mock = MagicMock( ++ return_value=["bar-0:1.2.3-1.1.*", "baz-0:2.3.4-2.1.*"] ++ ) ++ elif package_manager == "APT": ++ list_holds_func = "pkg.get_selections" ++ list_holds_mock = MagicMock(return_value={"hold": ["bar", "baz"]}) ++ ++ def pkg_hold(name, pkgs=None, *_args, **__kwargs): ++ if name and pkgs is None: ++ pkgs = [name] ++ ret = {} ++ for pkg in pkgs: ++ ret.update( ++ { ++ pkg: { ++ "name": pkg, ++ "changes": {"new": "hold", "old": ""}, ++ "result": True, ++ "comment": "Package {} is now being held.".format(pkg), ++ } ++ } ++ ) ++ return ret ++ ++ def pkg_unhold(name, pkgs=None, *_args, **__kwargs): ++ if name and pkgs is None: ++ pkgs = [name] ++ ret = {} ++ for pkg in pkgs: ++ ret.update( ++ { ++ pkg: { ++ "name": pkg, ++ "changes": {"new": "", "old": "hold"}, ++ "result": True, ++ "comment": "Package {} is no longer held.".format(pkg), ++ } ++ } ++ ) ++ return ret ++ ++ hold_mock = MagicMock(side_effect=pkg_hold) ++ unhold_mock = MagicMock(side_effect=pkg_unhold) ++ ++ # Testing with Zypper ++ with patch.dict( ++ pkg.__salt__, ++ { ++ list_holds_func: list_holds_mock, ++ "pkg.hold": hold_mock, ++ "pkg.unhold": unhold_mock, ++ }, ++ ): ++ # Holding one of two packages ++ ret = pkg.held("held-test", pkgs=["foo", "bar"]) ++ assert "foo" in ret["changes"] ++ assert len(ret["changes"]) == 1 ++ hold_mock.assert_called_once_with(name="held-test", pkgs=["foo"]) ++ unhold_mock.assert_not_called() ++ ++ hold_mock.reset_mock() ++ unhold_mock.reset_mock() ++ ++ # Holding one of two packages and replacing all the rest held packages ++ ret = pkg.held("held-test", pkgs=["foo", "bar"], replace=True) ++ assert "foo" in ret["changes"] ++ assert "baz" in ret["changes"] ++ assert len(ret["changes"]) == 2 ++ hold_mock.assert_called_once_with(name="held-test", pkgs=["foo"]) ++ unhold_mock.assert_called_once_with(name="held-test", pkgs=["baz"]) ++ ++ hold_mock.reset_mock() ++ unhold_mock.reset_mock() ++ ++ # Remove all holds ++ ret = pkg.held("held-test", pkgs=[], replace=True) ++ assert "bar" in ret["changes"] ++ assert "baz" in ret["changes"] ++ assert len(ret["changes"]) == 2 ++ hold_mock.assert_not_called() ++ unhold_mock.assert_any_call(name="held-test", pkgs=["baz"]) ++ unhold_mock.assert_any_call(name="held-test", pkgs=["bar"]) ++ ++ hold_mock.reset_mock() ++ unhold_mock.reset_mock() ++ ++ # Unolding one of two packages ++ ret = pkg.unheld("held-test", pkgs=["foo", "bar"]) ++ assert "bar" in ret["changes"] ++ assert len(ret["changes"]) == 1 ++ unhold_mock.assert_called_once_with(name="held-test", pkgs=["bar"]) ++ hold_mock.assert_not_called() ++ ++ hold_mock.reset_mock() ++ unhold_mock.reset_mock() ++ ++ # Remove all holds ++ ret = pkg.unheld("held-test", all=True) ++ assert "bar" in ret["changes"] ++ assert "baz" in ret["changes"] ++ assert len(ret["changes"]) == 2 ++ hold_mock.assert_not_called() ++ unhold_mock.assert_any_call(name="held-test", pkgs=["baz"]) ++ unhold_mock.assert_any_call(name="held-test", pkgs=["bar"]) +-- +2.32.0 + + diff --git a/move-vendor-change-logic-to-zypper-class-355.patch b/move-vendor-change-logic-to-zypper-class-355.patch new file mode 100644 index 0000000..c5b1e07 --- /dev/null +++ b/move-vendor-change-logic-to-zypper-class-355.patch @@ -0,0 +1,843 @@ +From a6f8803f6374f646802a898e43bc772d05960d89 Mon Sep 17 00:00:00 2001 +From: Martin Seidl +Date: Thu, 24 Jun 2021 10:08:06 +0200 +Subject: [PATCH] Move vendor change logic to zypper class (#355) +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +* move vendor change logic to zypper class + +* fix thing in zypperkg + +* refactor unit tests + +* Fix for syntax error + +* Fix mocking issue in unit test + +* fix issues with pr + +* Fix for zypperpkg unit test after refactor of vendorchangeflags + +Co-authored-by: Pablo Suárez Hernández + +* fix docs for vendor change options + +* Fix doc strings, and clean up tests + +Co-authored-by: Jochen Breuer +Co-authored-by: Pablo Suárez Hernández +--- + salt/modules/zypperpkg.py | 110 +++--- + tests/unit/modules/test_zypperpkg.py | 513 +++++++++++++++++++-------- + 2 files changed, 428 insertions(+), 195 deletions(-) + +diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py +index b35792237c..e064e2cb4e 100644 +--- a/salt/modules/zypperpkg.py ++++ b/salt/modules/zypperpkg.py +@@ -105,10 +105,6 @@ class _Zypper: + ZYPPER_LOCK = "/var/run/zypp.pid" + TAG_RELEASED = "zypper/released" + TAG_BLOCKED = "zypper/blocked" +- # Dist upgrade vendor change support (SLE12+) +- dup_avc = False +- # Install/Patch/Upgrade vendor change support (SLE15+) +- inst_avc = False + + def __init__(self): + """ +@@ -138,6 +134,13 @@ class _Zypper: + self.__systemd_scope = False + self.__root = None + ++ # Dist upgrade vendor change support (SLE12+) ++ self.dup_avc = False ++ # Install/Patch/Upgrade vendor change support (SLE15+) ++ self.inst_avc = False ++ # Flag if allow vendor change should be allowed ++ self.avc = False ++ + # Call status + self.__called = False + +@@ -182,6 +185,8 @@ class _Zypper: + self.__no_raise = True + elif item == "refreshable": + self.__refresh = True ++ elif item == "allow_vendor_change": ++ return self.__allow_vendor_change + elif item == "call": + return self.__call + else: +@@ -222,15 +227,27 @@ class _Zypper: + def pid(self): + return self.__call_result.get("pid", "") + ++ def __allow_vendor_change(self, allowvendorchange, novendorchange): ++ if allowvendorchange or not novendorchange: ++ self.refresh_zypper_flags() ++ if self.dup_avc or self.inst_avc: ++ log.info("Enabling vendor change") ++ self.avc = True ++ else: ++ log.warning( ++ "Enabling/Disabling vendor changes is not supported on this Zypper version" ++ ) ++ return self ++ + def refresh_zypper_flags(self): + try: +- zypp_version = version('zypper') ++ zypp_version = version("zypper") + # zypper version 1.11.34 in SLE12 update supports vendor change for only dist upgrade +- if version_cmp(zypp_version, '1.11.34') >= 0: ++ if version_cmp(zypp_version, "1.11.34") >= 0: + # zypper version supports vendor change for dist upgrade + self.dup_avc = True + # zypper version 1.14.8 in SLE15 update supports vendor change in install/patch/upgrading +- if version_cmp(zypp_version, '1.14.8') >= 0: ++ if version_cmp(zypp_version, "1.14.8") >= 0: + self.inst_avc = True + else: + log.error("Failed to compare Zypper version") +@@ -351,6 +368,15 @@ class _Zypper: + if self.__systemd_scope: + cmd.extend(["systemd-run", "--scope"]) + cmd.extend(self.__cmd) ++ ++ if self.avc: ++ for i in ["install", "upgrade", "dist-upgrade"]: ++ if i in cmd: ++ if i == "install" and self.inst_avc: ++ cmd.insert(cmd.index(i) + 1, "--allow-vendor-change") ++ elif i in ["upgrade", "dist-upgrade"] and self.dup_avc: ++ cmd.insert(cmd.index(i) + 1, "--allow-vendor-change") ++ + log.debug("Calling Zypper: %s", " ".join(cmd)) + self.__call_result = __salt__["cmd.run_all"](cmd, **kwargs) + if self._check_result(): +@@ -1451,6 +1477,7 @@ def install( + root=None, + inclusion_detection=False, + novendorchange=True, ++ allowvendorchange=False, + **kwargs + ): + """ +@@ -1499,7 +1526,11 @@ def install( + Skip the GPG verification check (e.g., ``--no-gpg-checks``) + + novendorchange +- Disallow vendor change ++ DEPRECATED(use allowvendorchange): If set to True, do not allow vendor changes. Default: True ++ ++ allowvendorchange ++ If set to True, vendor change is allowed. Default: False ++ If both allowvendorchange and novendorchange are passed, only allowvendorchange is used. + + version + Can be either a version number, or the combination of a comparison +@@ -1662,14 +1693,6 @@ def install( + kwargs.get("resolve_capabilities") and "--capability" or "--name" + ) + # Install / patching / upgrade with vendor change support is only in SLE 15+ opensuse Leap 15+ +- if not novendorchange: +- __zypper__(root=root).refresh_zypper_flags() +- if __zypper__(root=root).inst_avc: +- cmd_install.append("--allow-vendor-change") +- log.info("Enabling vendor changes") +- else: +- log.warning("Enabling/Disabling vendor changes is not supported on this Zypper version") +- + + if not refresh: + cmd_install.insert(0, "--no-refresh") +@@ -1696,6 +1719,7 @@ def install( + systemd_scope=systemd_scope, + root=root, + ) ++ .allow_vendor_change(allowvendorchange, novendorchange) + .call(*cmd) + .splitlines() + ): +@@ -1708,7 +1732,9 @@ def install( + while downgrades: + cmd = cmd_install + ["--force"] + downgrades[:500] + downgrades = downgrades[500:] +- __zypper__(no_repo_failure=ignore_repo_failure, root=root).call(*cmd) ++ __zypper__(no_repo_failure=ignore_repo_failure, root=root).allow_vendor_change( ++ allowvendorchange, novendorchange ++ ).call(*cmd) + + _clean_cache() + new = ( +@@ -1740,6 +1766,7 @@ def upgrade( + dist_upgrade=False, + fromrepo=None, + novendorchange=True, ++ allowvendorchange=False, + skip_verify=False, + no_recommends=False, + root=None, +@@ -1778,7 +1805,11 @@ def upgrade( + Specify a list of package repositories to upgrade from. Default: None + + novendorchange +- If set to True, no allow vendor changes. Default: False ++ DEPRECATED(use allowvendorchange): If set to True, do not allow vendor changes. Default: True ++ ++ allowvendorchange ++ If set to True, vendor change is allowed. Default: False ++ If both allowvendorchange and novendorchange are passed, only allowvendorchange is used. + + skip_verify + Skip the GPG verification check (e.g., ``--no-gpg-checks``) +@@ -1825,40 +1856,21 @@ def upgrade( + cmd_update.extend(["--from" if dist_upgrade else "--repo", repo]) + log.info("Targeting repos: %s", fromrepo) + +- if not novendorchange: +- __zypper__(root=root).refresh_zypper_flags() +- if dist_upgrade: +- if __zypper__(root=root).dup_avc: +- cmd_update.append("--allow-vendor-change") +- log.info("Enabling vendor changes") +- else: +- log.warning( +- "Enabling/Disabling vendor changes is not supported on this Zypper version" +- ) +- else: +- # Install / patching / upgrade with vendor change support is only in SLE 15+ opensuse Leap 15+ +- if __zypper__(root=root).inst_avc: +- cmd_update.append("--allow-vendor-change") +- log.info("Enabling vendor changes") +- else: +- log.warning( +- "Enabling/Disabling vendor changes is not supported on this Zypper version" +- ) +- +- if no_recommends: +- cmd_update.append("--no-recommends") +- log.info("Disabling recommendations") ++ if no_recommends: ++ cmd_update.append("--no-recommends") ++ log.info("Disabling recommendations") + +- if dryrun: +- # Creates a solver test case for debugging. +- log.info("Executing debugsolver and performing a dry-run dist-upgrade") +- __zypper__(systemd_scope=_systemd_scope(), root=root).noraise.call( +- *cmd_update + ["--debug-solver"] +- ) ++ if dryrun: ++ # Creates a solver test case for debugging. ++ log.info("Executing debugsolver and performing a dry-run dist-upgrade") ++ __zypper__(systemd_scope=_systemd_scope(), root=root).allow_vendor_change( ++ allowvendorchange, novendorchange ++ ).noraise.call(*cmd_update + ["--debug-solver"]) + + old = list_pkgs(root=root) +- +- __zypper__(systemd_scope=_systemd_scope(), root=root).noraise.call(*cmd_update) ++ __zypper__(systemd_scope=_systemd_scope(), root=root).allow_vendor_change( ++ allowvendorchange, novendorchange ++ ).noraise.call(*cmd_update) + _clean_cache() + new = list_pkgs(root=root) + ret = salt.utils.data.compare_dicts(old, new) +diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py +index f32c382d7f..5c01bbbfbd 100644 +--- a/tests/unit/modules/test_zypperpkg.py ++++ b/tests/unit/modules/test_zypperpkg.py +@@ -14,7 +14,7 @@ from salt.exceptions import CommandExecutionError, SaltInvocationError + from salt.ext import six + from salt.ext.six.moves import configparser + from tests.support.mixins import LoaderModuleMockMixin +-from tests.support.mock import MagicMock, Mock, call, patch ++from tests.support.mock import MagicMock, Mock, call, mock_open, patch + from tests.support.unit import TestCase + + +@@ -135,6 +135,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): + + stdout_xml_snippet = '' + sniffer = RunSniffer(stdout=stdout_xml_snippet) ++ zypper.__zypper__._reset() + with patch.dict("salt.modules.zypperpkg.__salt__", {"cmd.run_all": sniffer}): + self.assertEqual(zypper.__zypper__.call("foo"), stdout_xml_snippet) + self.assertEqual(len(sniffer.calls), 1) +@@ -590,13 +591,373 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): + {"vim": "7.4.326-2.62", "fakepkg": ""}, + ) + ++ def test_upgrade_without_vendor_change(self): ++ """ ++ Dist-upgrade without vendor change option. ++ """ ++ with patch( ++ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True) ++ ), patch( ++ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False) ++ ): ++ with patch( ++ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock() ++ ) as zypper_mock: ++ with patch( ++ "salt.modules.zypperpkg.list_pkgs", ++ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]), ++ ): ++ ret = zypper.upgrade(dist_upgrade=True) ++ self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}}) ++ zypper_mock.assert_any_call( ++ "dist-upgrade", "--auto-agree-with-licenses", ++ ) ++ ++ def test_refresh_zypper_flags(self): ++ zypper.__zypper__._reset() ++ with patch( ++ "salt.modules.zypperpkg.version", MagicMock(return_value="0.5") ++ ), patch.dict( ++ zypper.__salt__, {"lowpkg.version_cmp": MagicMock(side_effect=[-1, -1])} ++ ): ++ zypper.__zypper__.refresh_zypper_flags() ++ assert zypper.__zypper__.inst_avc == False ++ assert zypper.__zypper__.dup_avc == False ++ with patch( ++ "salt.modules.zypperpkg.version", MagicMock(return_value="1.11.34") ++ ), patch.dict( ++ zypper.__salt__, {"lowpkg.version_cmp": MagicMock(side_effect=[0, -1])} ++ ): ++ zypper.__zypper__.refresh_zypper_flags() ++ assert zypper.__zypper__.inst_avc == False ++ assert zypper.__zypper__.dup_avc == True ++ with patch( ++ "salt.modules.zypperpkg.version", MagicMock(return_value="1.14.8") ++ ), patch.dict( ++ zypper.__salt__, {"lowpkg.version_cmp": MagicMock(side_effect=[0, 0])} ++ ): ++ zypper.__zypper__.refresh_zypper_flags() ++ assert zypper.__zypper__.inst_avc == True ++ assert zypper.__zypper__.dup_avc == True ++ ++ @patch("salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()) ++ def test_allow_vendor_change_function(self): ++ zypper.__zypper__._reset() ++ zypper.__zypper__.inst_avc = True ++ zypper.__zypper__.dup_avc = True ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.allow_vendor_change(False, False) ++ assert zypper.__zypper__.avc == True ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.allow_vendor_change(True, False) ++ assert zypper.__zypper__.avc == True ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.allow_vendor_change(False, True) ++ assert zypper.__zypper__.avc == False ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.allow_vendor_change(True, True) ++ assert zypper.__zypper__.avc == True ++ ++ zypper.__zypper__._reset() ++ zypper.__zypper__.inst_avc = False ++ zypper.__zypper__.dup_avc = True ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.allow_vendor_change(False, False) ++ assert zypper.__zypper__.avc == True ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.allow_vendor_change(True, False) ++ assert zypper.__zypper__.avc == True ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.allow_vendor_change(False, True) ++ assert zypper.__zypper__.avc == False ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.allow_vendor_change(True, True) ++ assert zypper.__zypper__.avc == True ++ ++ zypper.__zypper__._reset() ++ zypper.__zypper__.inst_avc = False ++ zypper.__zypper__.dup_avc = False ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.allow_vendor_change(False, False) ++ assert zypper.__zypper__.avc == False ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.allow_vendor_change(True, False) ++ assert zypper.__zypper__.avc == False ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.allow_vendor_change(False, True) ++ assert zypper.__zypper__.avc == False ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.allow_vendor_change(True, True) ++ assert zypper.__zypper__.avc == False ++ ++ @patch( ++ "salt.utils.environment.get_module_environment", ++ MagicMock(return_value={"SALT_RUNNING": "1"}), ++ ) ++ def test_zypper_call_dist_upgrade_with_avc_true(self): ++ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None}) ++ zypper.__zypper__._reset() ++ with patch.dict(zypper.__salt__, {"cmd.run_all": cmd_run_mock}), patch( ++ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock() ++ ), patch("salt.modules.zypperpkg.__zypper__._reset", MagicMock()): ++ zypper.__zypper__.dup_avc = True ++ zypper.__zypper__.avc = True ++ zypper.__zypper__.call("dist-upgrade") ++ cmd_run_mock.assert_any_call( ++ [ ++ "zypper", ++ "--non-interactive", ++ "--no-refresh", ++ "dist-upgrade", ++ "--allow-vendor-change", ++ ], ++ output_loglevel="trace", ++ python_shell=False, ++ env={"SALT_RUNNING": "1"}, ++ ) ++ ++ @patch( ++ "salt.utils.environment.get_module_environment", ++ MagicMock(return_value={"SALT_RUNNING": "1"}), ++ ) ++ def test_zypper_call_dist_upgrade_with_avc_false(self): ++ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None}) ++ zypper.__zypper__._reset() ++ with patch.dict(zypper.__salt__, {"cmd.run_all": cmd_run_mock}), patch( ++ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock() ++ ), patch("salt.modules.zypperpkg.__zypper__._reset", MagicMock()): ++ zypper.__zypper__.dup_avc = False ++ zypper.__zypper__.avc = False ++ zypper.__zypper__.call("dist-upgrade") ++ cmd_run_mock.assert_any_call( ++ ["zypper", "--non-interactive", "--no-refresh", "dist-upgrade",], ++ output_loglevel="trace", ++ python_shell=False, ++ env={"SALT_RUNNING": "1"}, ++ ) ++ ++ @patch( ++ "salt.utils.environment.get_module_environment", ++ MagicMock(return_value={"SALT_RUNNING": "1"}), ++ ) ++ def test_zypper_call_install_with_avc_true(self): ++ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None}) ++ zypper.__zypper__._reset() ++ with patch.dict(zypper.__salt__, {"cmd.run_all": cmd_run_mock}), patch( ++ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock() ++ ), patch("salt.modules.zypperpkg.__zypper__._reset", MagicMock()): ++ zypper.__zypper__.inst_avc = True ++ zypper.__zypper__.avc = True ++ zypper.__zypper__.call("install") ++ cmd_run_mock.assert_any_call( ++ [ ++ "zypper", ++ "--non-interactive", ++ "--no-refresh", ++ "install", ++ "--allow-vendor-change", ++ ], ++ output_loglevel="trace", ++ python_shell=False, ++ env={"SALT_RUNNING": "1"}, ++ ) ++ ++ @patch( ++ "salt.utils.environment.get_module_environment", ++ MagicMock(return_value={"SALT_RUNNING": "1"}), ++ ) ++ def test_zypper_call_install_with_avc_false(self): ++ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None}) ++ zypper.__zypper__._reset() ++ with patch.dict(zypper.__salt__, {"cmd.run_all": cmd_run_mock}), patch( ++ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock() ++ ), patch("salt.modules.zypperpkg.__zypper__._reset", MagicMock()): ++ zypper.__zypper__.inst_avc = False ++ zypper.__zypper__.dup_avc = True ++ zypper.__zypper__.avc = True ++ zypper.__zypper__.call("install") ++ cmd_run_mock.assert_any_call( ++ ["zypper", "--non-interactive", "--no-refresh", "install",], ++ output_loglevel="trace", ++ python_shell=False, ++ env={"SALT_RUNNING": "1"}, ++ ) ++ ++ def test_upgrade_with_novendorchange_true(self): ++ """ ++ Dist-upgrade without vendor change option. ++ """ ++ zypper.__zypper__._reset() ++ with patch( ++ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True) ++ ), patch( ++ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock() ++ ) as refresh_flags_mock, patch( ++ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False) ++ ): ++ with patch( ++ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock() ++ ) as zypper_mock: ++ with patch( ++ "salt.modules.zypperpkg.list_pkgs", ++ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]), ++ ): ++ ret = zypper.upgrade(dist_upgrade=True, novendorchange=True) ++ refresh_flags_mock.assert_not_called() ++ zypper_mock.assert_any_call( ++ "dist-upgrade", "--auto-agree-with-licenses", ++ ) ++ ++ def test_upgrade_with_novendorchange_false(self): ++ """ ++ Perform dist-upgrade with novendorchange set to False. ++ """ ++ zypper.__zypper__._reset() ++ with patch( ++ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True) ++ ), patch( ++ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock() ++ ), patch( ++ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False) ++ ): ++ with patch( ++ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock() ++ ) as zypper_mock: ++ with patch( ++ "salt.modules.zypperpkg.list_pkgs", ++ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]), ++ ): ++ zypper.__zypper__.inst_avc = True ++ zypper.__zypper__.dup_avc = True ++ with patch.dict( ++ zypper.__salt__, ++ { ++ "pkg_resource.version": MagicMock(return_value="1.15"), ++ "lowpkg.version_cmp": MagicMock(return_value=1), ++ }, ++ ): ++ ret = zypper.upgrade( ++ dist_upgrade=True, ++ dryrun=True, ++ fromrepo=["Dummy", "Dummy2"], ++ novendorchange=False, ++ ) ++ assert zypper.__zypper__.avc == True ++ ++ def test_upgrade_with_allowvendorchange_true(self): ++ """ ++ Perform dist-upgrade with allowvendorchange set to True. ++ """ ++ zypper.__zypper__._reset() ++ with patch( ++ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True) ++ ), patch( ++ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock() ++ ), patch( ++ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False) ++ ): ++ with patch( ++ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock() ++ ) as zypper_mock: ++ with patch( ++ "salt.modules.zypperpkg.list_pkgs", ++ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]), ++ ): ++ with patch.dict( ++ zypper.__salt__, ++ { ++ "pkg_resource.version": MagicMock(return_value="1.15"), ++ "lowpkg.version_cmp": MagicMock(return_value=1), ++ }, ++ ): ++ ++ zypper.__zypper__.inst_avc = True ++ zypper.__zypper__.dup_avc = True ++ ret = zypper.upgrade( ++ dist_upgrade=True, ++ dryrun=True, ++ fromrepo=["Dummy", "Dummy2"], ++ allowvendorchange=True, ++ ) ++ assert zypper.__zypper__.avc == True ++ ++ def test_upgrade_with_allowvendorchange_false(self): ++ """ ++ Perform dist-upgrade with allowvendorchange set to False. ++ """ ++ zypper.__zypper__._reset() ++ with patch( ++ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True) ++ ), patch( ++ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock() ++ ), patch( ++ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False) ++ ): ++ with patch( ++ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock() ++ ) as zypper_mock: ++ with patch( ++ "salt.modules.zypperpkg.list_pkgs", ++ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]), ++ ): ++ with patch.dict( ++ zypper.__salt__, ++ { ++ "pkg_resource.version": MagicMock(return_value="1.15"), ++ "lowpkg.version_cmp": MagicMock(return_value=1), ++ }, ++ ): ++ ++ zypper.__zypper__.inst_avc = True ++ zypper.__zypper__.dup_avc = True ++ ret = zypper.upgrade( ++ dist_upgrade=True, ++ dryrun=True, ++ fromrepo=["Dummy", "Dummy2"], ++ allowvendorchange=False, ++ ) ++ assert zypper.__zypper__.avc == False ++ ++ def test_upgrade_old_zypper(self): ++ zypper.__zypper__._reset() ++ with patch( ++ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True) ++ ), patch( ++ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock() ++ ) as refresh_flags_mock, patch( ++ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False) ++ ): ++ with patch( ++ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock() ++ ) as zypper_mock: ++ with patch( ++ "salt.modules.zypperpkg.list_pkgs", ++ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]), ++ ): ++ with patch.dict( ++ zypper.__salt__, ++ { ++ "pkg_resource.version": MagicMock(return_value="1.11"), ++ "lowpkg.version_cmp": MagicMock(return_value=-1), ++ }, ++ ): ++ zypper.__zypper__.inst_avc = False ++ zypper.__zypper__.dup_avc = False ++ ret = zypper.upgrade( ++ dist_upgrade=True, ++ dryrun=True, ++ fromrepo=["Dummy", "Dummy2"], ++ novendorchange=False, ++ ) ++ zypper.__zypper__.avc = False ++ + def test_upgrade_success(self): + """ + Test system upgrade and dist-upgrade success. + + :return: + """ +- with patch.dict(zypper.__grains__, {"osrelease_info": [12, 1]}), patch( ++ with patch( + "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True) + ), patch( + "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False) +@@ -635,17 +996,6 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): + self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.1,1.2"}}) + zypper_mock.assert_any_call("update", "--auto-agree-with-licenses") + +- with patch( +- "salt.modules.zypperpkg.list_pkgs", +- MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]), +- ): +- ret = zypper.upgrade(dist_upgrade=True) +- self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}}) +- zypper_mock.assert_any_call( +- "dist-upgrade", +- "--auto-agree-with-licenses", +- ) +- + with patch( + "salt.modules.zypperpkg.list_pkgs", + MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]), +@@ -677,94 +1027,6 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): + "Dummy2", + ) + +- with patch( +- "salt.modules.zypperpkg.list_pkgs", +- MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]), +- ): +- ret = zypper.upgrade( +- dist_upgrade=True, +- fromrepo=["Dummy", "Dummy2"], +- novendorchange=True, +- ) +- zypper_mock.assert_any_call( +- "dist-upgrade", +- "--auto-agree-with-licenses", +- "--dry-run", +- ) +- zypper_mock.assert_any_call( +- "dist-upgrade", +- "--auto-agree-with-licenses", +- "--dry-run", +- ) +- +- with patch( +- "salt.modules.zypperpkg.list_pkgs", +- MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]) +- ): +- with patch.dict(zypper.__salt__, +- {'pkg_resource.version': MagicMock(return_value='1.15'), +- 'lowpkg.version_cmp': MagicMock(return_value=1)}): +- ret = zypper.upgrade( +- dist_upgrade=True, +- dryrun=True, +- fromrepo=["Dummy", "Dummy2"], +- novendorchange=False, +- ) +- zypper_mock.assert_any_call( +- "dist-upgrade", +- "--auto-agree-with-licenses", +- "--dry-run", +- "--from", +- "Dummy", +- "--from", +- "Dummy2", +- "--allow-vendor-change", +- ) +- zypper_mock.assert_any_call( +- "dist-upgrade", +- "--auto-agree-with-licenses", +- "--dry-run", +- "--from", +- "Dummy", +- "--from", +- "Dummy2", +- "--allow-vendor-change", +- "--debug-solver", +- ) +- +- with patch( +- "salt.modules.zypperpkg.list_pkgs", +- MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]) +- ): +- with patch.dict(zypper.__salt__, +- {'pkg_resource.version': MagicMock(return_value='1.11'), +- 'lowpkg.version_cmp': MagicMock(return_value=1)}): +- ret = zypper.upgrade( +- dist_upgrade=True, +- dryrun=True, +- fromrepo=["Dummy", "Dummy2"], +- novendorchange=False, +- ) +- zypper_mock.assert_any_call( +- "dist-upgrade", +- "--auto-agree-with-licenses", +- "--dry-run", +- "--from", +- "Dummy", +- "--from", +- "Dummy2", +- ) +- zypper_mock.assert_any_call( +- "dist-upgrade", +- "--auto-agree-with-licenses", +- "--dry-run", +- "--from", +- "Dummy", +- "--from", +- "Dummy2", +- "--debug-solver", +- ) +- + with patch( + "salt.modules.zypperpkg.list_pkgs", + MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]), +@@ -811,52 +1073,13 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): + "Dummy2", + ) + +- with patch( +- "salt.modules.zypperpkg.list_pkgs", +- MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]), +- ): +- ret = zypper.upgrade( +- dist_upgrade=True, +- fromrepo=["Dummy", "Dummy2"], +- novendorchange=True, +- ) +- self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}}) +- zypper_mock.assert_any_call( +- "dist-upgrade", +- "--auto-agree-with-licenses", +- "--from", +- "Dummy", +- "--from", +- "Dummy2", +- ) +- +- with patch( +- "salt.modules.zypperpkg.list_pkgs", +- MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]), +- ): +- ret = zypper.upgrade( +- dist_upgrade=True, +- fromrepo=["Dummy", "Dummy2"], +- novendorchange=False, +- ) +- self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}}) +- zypper_mock.assert_any_call( +- "dist-upgrade", +- "--auto-agree-with-licenses", +- "--from", +- "Dummy", +- "--from", +- "Dummy2", +- "--allow-vendor-change", +- ) +- + def test_upgrade_kernel(self): + """ + Test kernel package upgrade success. + + :return: + """ +- with patch.dict(zypper.__grains__, {"osrelease_info": [12, 1]}), patch( ++ with patch( + "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True) + ), patch( + "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False) +@@ -915,12 +1138,13 @@ Repository 'DUMMY' not found by its alias, number, or URI. + self.pid = 1234 + self.exit_code = 555 + self.noraise = MagicMock() ++ self.allow_vendor_change = self + self.SUCCESS_EXIT_CODES = [0] + + def __call__(self, *args, **kwargs): + return self + +- with patch.dict(zypper.__grains__, {"osrelease_info": [12, 1]}), patch( ++ with patch( + "salt.modules.zypperpkg.__zypper__", FailingZypperDummy() + ) as zypper_mock, patch( + "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True) +@@ -937,10 +1161,7 @@ Repository 'DUMMY' not found by its alias, number, or URI. + self.assertEqual(cmd_exc.exception.info["changes"], {}) + self.assertEqual(cmd_exc.exception.info["result"]["stdout"], zypper_out) + zypper_mock.noraise.call.assert_called_with( +- "dist-upgrade", +- "--auto-agree-with-licenses", +- "--from", +- "DUMMY", ++ "dist-upgrade", "--auto-agree-with-licenses", "--from", "DUMMY", + ) + + def test_upgrade_available(self): +-- +2.31.1 + + diff --git a/parsing-epoch-out-of-version-provided-during-pkg-rem.patch b/parsing-epoch-out-of-version-provided-during-pkg-rem.patch new file mode 100644 index 0000000..3387e55 --- /dev/null +++ b/parsing-epoch-out-of-version-provided-during-pkg-rem.patch @@ -0,0 +1,165 @@ +From d1a8a0d724ee272953bb4615869d9fe468d28e98 Mon Sep 17 00:00:00 2001 +From: Jochen Breuer +Date: Mon, 3 May 2021 17:20:54 +0200 +Subject: [PATCH] Parsing Epoch out of version provided during pkg + remove (bsc#1173692) + +yum doesn't seem to like the epoch information provided within the +version. Therefore it's removed before passing it to yum. + +* Introducing `ignore_epoch` to pkg.remove + Just like pkg.install pkg.remove now also has ignore_epoch. With + this it is possible to ignore the epoch information completely + during version comparison. +* No epoch regardless of arch +* Added tests for cases with and without arch. +* Epoch information is now skipped in all cases. +* Removes ignore_epoch from pkg state +--- + changelog/57881.changed | 1 + + salt/modules/yumpkg.py | 14 +++-- + tests/unit/modules/test_yumpkg.py | 85 +++++++++++++++++++++++++++++++ + 3 files changed, 96 insertions(+), 4 deletions(-) + create mode 100644 changelog/57881.changed + +diff --git a/changelog/57881.changed b/changelog/57881.changed +new file mode 100644 +index 0000000000..e2ae2f4653 +--- /dev/null ++++ b/changelog/57881.changed +@@ -0,0 +1 @@ ++Parsing Epoch out of version during pkg remove, since yum can't handle that in all of the cases. +diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py +index 82adbbd59d..0fb41a0400 100644 +--- a/salt/modules/yumpkg.py ++++ b/salt/modules/yumpkg.py +@@ -2051,11 +2051,13 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613 + old = list_pkgs() + targets = [] + for target in pkg_params: ++ version_to_remove = pkg_params[target] ++ installed_versions = old[target].split(",") ++ + # Check if package version set to be removed is actually installed: +- # old[target] contains a comma-separated list of installed versions +- if target in old and not pkg_params[target]: ++ if target in old and not version_to_remove: + targets.append(target) +- elif target in old and pkg_params[target] in old[target].split(","): ++ elif target in old and version_to_remove in installed_versions: + arch = "" + pkgname = target + try: +@@ -2066,7 +2068,11 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613 + if archpart in salt.utils.pkg.rpm.ARCHES: + arch = "." + archpart + pkgname = namepart +- targets.append("{}-{}{}".format(pkgname, pkg_params[target], arch)) ++ # Since we don't always have the arch info, epoch information has to parsed out. But ++ # a version check was already performed, so we are removing the right version. ++ targets.append( ++ "{}-{}{}".format(pkgname, version_to_remove.split(":", 1)[-1], arch) ++ ) + if not targets: + return {} + +diff --git a/tests/unit/modules/test_yumpkg.py b/tests/unit/modules/test_yumpkg.py +index 96d3f12b17..e22c0b9251 100644 +--- a/tests/unit/modules/test_yumpkg.py ++++ b/tests/unit/modules/test_yumpkg.py +@@ -1014,6 +1014,91 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): + redirect_stderr=True, + ) + ++ def test_remove_with_epoch(self): ++ """ ++ Tests that we properly identify a version containing an epoch for ++ deinstallation. ++ ++ You can deinstall pkgs only without the epoch if no arch is provided: ++ ++ .. code-block:: bash ++ ++ yum remove PackageKit-yum-1.1.10-2.el7.centos ++ """ ++ name = "foo" ++ installed = "8:3.8.12-4.n.el7" ++ list_pkgs_mock = MagicMock( ++ side_effect=lambda **kwargs: { ++ name: [installed] ++ if kwargs.get("versions_as_list", False) ++ else installed ++ } ++ ) ++ cmd_mock = MagicMock( ++ return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""} ++ ) ++ salt_mock = { ++ "cmd.run_all": cmd_mock, ++ "lowpkg.version_cmp": rpm.version_cmp, ++ "pkg_resource.parse_targets": MagicMock( ++ return_value=({name: installed}, "repository") ++ ), ++ } ++ full_pkg_string = "-".join((name, installed[2:])) ++ with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch( ++ "salt.utils.systemd.has_scope", MagicMock(return_value=False) ++ ), patch.dict(yumpkg.__salt__, salt_mock): ++ ++ with patch.dict(yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}): ++ expected = ["yum", "-y", "remove", full_pkg_string] ++ yumpkg.remove(name) ++ call = cmd_mock.mock_calls[0][1][0] ++ assert call == expected, call ++ ++ def test_remove_with_epoch_and_arch_info(self): ++ """ ++ Tests that we properly identify a version containing an epoch and arch ++ deinstallation. ++ ++ You can deinstall pkgs with or without epoch in combination with the arch. ++ Here we test for the absence of the epoch, but the presence for the arch: ++ ++ .. code-block:: bash ++ ++ yum remove PackageKit-yum-1.1.10-2.el7.centos.x86_64 ++ """ ++ arch = "x86_64" ++ name = "foo" ++ name_and_arch = name + "." + arch ++ installed = "8:3.8.12-4.n.el7" ++ list_pkgs_mock = MagicMock( ++ side_effect=lambda **kwargs: { ++ name_and_arch: [installed] ++ if kwargs.get("versions_as_list", False) ++ else installed ++ } ++ ) ++ cmd_mock = MagicMock( ++ return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""} ++ ) ++ salt_mock = { ++ "cmd.run_all": cmd_mock, ++ "lowpkg.version_cmp": rpm.version_cmp, ++ "pkg_resource.parse_targets": MagicMock( ++ return_value=({name_and_arch: installed}, "repository") ++ ), ++ } ++ full_pkg_string = "-".join((name, installed[2:])) ++ with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch( ++ "salt.utils.systemd.has_scope", MagicMock(return_value=False) ++ ), patch.dict(yumpkg.__salt__, salt_mock): ++ ++ with patch.dict(yumpkg.__grains__, {"os": "CentOS", "osrelease": 7}): ++ expected = ["yum", "-y", "remove", full_pkg_string + "." + arch] ++ yumpkg.remove(name) ++ call = cmd_mock.mock_calls[0][1][0] ++ assert call == expected, call ++ + def test_install_with_epoch(self): + """ + Tests that we properly identify a version containing an epoch as an +-- +2.31.1 + + diff --git a/prevent-command-injection-in-the-snapper-module-bsc-.patch b/prevent-command-injection-in-the-snapper-module-bsc-.patch new file mode 100644 index 0000000..0e06ce3 --- /dev/null +++ b/prevent-command-injection-in-the-snapper-module-bsc-.patch @@ -0,0 +1,43 @@ +From ea02e9398160fad03dd662635ec038b95db2c04a Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Tue, 27 Apr 2021 11:14:20 +0100 +Subject: [PATCH] Prevent command injection in the snapper module + (bsc#1185281) (CVE-2021-31607) + +--- + salt/modules/snapper.py | 10 ++++++++-- + 1 file changed, 8 insertions(+), 2 deletions(-) + +diff --git a/salt/modules/snapper.py b/salt/modules/snapper.py +index 1df3ce9368..6954c3b544 100644 +--- a/salt/modules/snapper.py ++++ b/salt/modules/snapper.py +@@ -18,6 +18,7 @@ from __future__ import absolute_import, print_function, unicode_literals + import difflib + import logging + import os ++import subprocess + import time + + import salt.utils.files +@@ -561,8 +562,13 @@ def _is_text_file(filename): + """ + Checks if a file is a text file + """ +- type_of_file = os.popen("file -bi {0}".format(filename), "r").read() +- return type_of_file.startswith("text") ++ type_of_file = subprocess.run( ++ ["file", "-bi", filename], ++ check=False, ++ stdout=subprocess.PIPE, ++ universal_newlines=True, ++ ).stdout ++ return type_of_file.startswith('text') + + + def run(function, *args, **kwargs): +-- +2.31.1 + + diff --git a/salt-tmpfiles.d b/salt-tmpfiles.d index f64b559..291b4d6 100644 --- a/salt-tmpfiles.d +++ b/salt-tmpfiles.d @@ -1,5 +1,5 @@ # Type Path Mode UID GID Age Argument -d /var/run/salt 0750 root salt -d /var/run/salt/master 0750 salt salt -d /var/run/salt/minion 0750 root root +d /run/salt 0750 root salt +d /run/salt/master 0750 salt salt +d /run/salt/minion 0750 root root diff --git a/salt.changes b/salt.changes index a527fb0..95b6da4 100644 --- a/salt.changes +++ b/salt.changes @@ -1,7 +1,191 @@ +------------------------------------------------------------------- +Wed Sep 15 11:18:58 UTC 2021 - Pablo Suárez Hernández + +- Exclude the full path of a download URL to prevent injection of + malicious code (bsc#1190265) (CVE-2021-21996) + +- Added: + * exclude-the-full-path-of-a-download-url-to-prevent-i.patch + +------------------------------------------------------------------- +Tue Aug 31 11:28:13 UTC 2021 - Victor Zhestkov + +- Fix wrong relative paths resolution with Jinja renderer when importing subdirectories + +- Added: + * templates-move-the-globals-up-to-the-environment-jin.patch + +------------------------------------------------------------------- +Thu Aug 19 14:41:12 UTC 2021 - Victor Zhestkov + +- Don't pass shell="/sbin/nologin" to onlyif/unless checks (bsc#1188259) +- Add missing aarch64 to rpm package architectures +- Backport of upstream PR#59492 + +- Added: + * backport-of-upstream-pr59492-to-3002.2-404.patch + * don-t-use-shell-sbin-nologin-in-requisites.patch + * add-missing-aarch64-to-rpm-package-architectures-405.patch + +------------------------------------------------------------------- +Wed Aug 11 12:22:24 UTC 2021 - Pablo Suárez Hernández + +- Fix failing unit test for systemd +- Fix error handling in openscap module (bsc#1188647) +- Better handling of bad public keys from minions (bsc#1189040) + +- Added: + * better-handling-of-bad-public-keys-from-minions-bsc-.patch + * fix-error-handling-in-openscap-module-bsc-1188647-40.patch + * fix-failing-unit-tests-for-systemd.patch + +------------------------------------------------------------------- +Tue Aug 10 12:59:25 UTC 2021 - Pablo Suárez Hernández + +- Define license macro as doc in spec file if not existing +- Add standalone formulas configuration for salt minion and remove salt-master requirement (bsc#1168327) + +------------------------------------------------------------------- +Fri Jul 16 15:35:10 UTC 2021 - Pablo Suárez Hernández + +- Do noop for services states when running systemd in offline mode (bsc#1187787) +- transactional_updates: do not execute states in parallel but use a queue (bsc#1188170) + +- Added: + * do-noop-for-services-states-when-running-systemd-in-.patch + +------------------------------------------------------------------- +Thu Jul 8 08:06:40 UTC 2021 - Pablo Suárez Hernández + +- Handle "master tops" data when states are applied by "transactional_update" (bsc#1187787) +- Enhance openscap module: add "xccdf_eval" call + +- Added: + * enhance-openscap-module-add-xccdf_eval-call-386.patch + * handle-master-tops-data-when-states-are-applied-by-t.patch + +------------------------------------------------------------------- +Tue Jul 6 08:00:23 UTC 2021 - Victor Zhestkov + +- virt: pass emulator when getting domain capabilities from libvirt +- Adding preliminary support for Rocky Linux +- Implementation of held/unheld functions for state pkg (bsc#1187813) + +- Added: + * implementation-of-held-unheld-functions-for-state-pk.patch + * adding-preliminary-support-for-rocky.-59682-391.patch + * virt-pass-emulator-when-getting-domain-capabilities-.patch + +------------------------------------------------------------------- +Fri Jun 25 11:54:13 UTC 2021 - Alexander Graul + +- Replace deprecated Thread.isAlive() with Thread.is_alive() + +- Added: + * backport-thread.is_alive-fix-390.patch + +------------------------------------------------------------------- +Thu Jun 24 12:41:03 UTC 2021 - Victor Zhestkov + +- Fix exception in yumpkg.remove for not installed package +- Fix save for iptables state module (bsc#1185131) + +- Added: + * fix-exception-in-yumpkg.remove-for-not-installed-pac.patch + * fix-save-for-iptables-state-module-bsc-1185131-372.patch + +------------------------------------------------------------------- +Thu Jun 24 09:44:36 UTC 2021 - Pablo Suárez Hernández + +- virt: use /dev/kvm to detect KVM + +- Added: + * virt-use-dev-kvm-to-detect-kvm-383.patch + +------------------------------------------------------------------- +Thu Jun 24 08:41:31 UTC 2021 - Pablo Suárez Hernández + +- zypperpkg: improve logic for handling vendorchange flags + +- Added: + * move-vendor-change-logic-to-zypper-class-355.patch + +------------------------------------------------------------------- +Mon Jun 21 14:57:02 UTC 2021 - Pablo Suárez Hernández + +- Add bundled provides for tornado to the spec file +- Enhance logging when inotify beacon is missing pyinotify (bsc#1186310) +- Add "python3-pyinotify" as a recommended package for Salt in SUSE/OpenSUSE distros + +- Added: + * enhance-logging-when-inotify-beacon-is-missing-pyino.patch + +------------------------------------------------------------------- +Fri Jun 4 09:00:07 UTC 2021 - Pablo Suárez Hernández + +- Fix tmpfiles.d configuration for salt to not use legacy paths (bsc#1173103) + +------------------------------------------------------------------- +Tue Jun 1 12:05:20 UTC 2021 - Pablo Suárez Hernández + +- Check if dpkgnotify is executable (bsc#1186674) + +- Added: + * check-if-dpkgnotify-is-executable-bsc-1186674-376.patch + +------------------------------------------------------------------- +Fri May 21 15:01:10 UTC 2021 - Pablo Suárez Hernández + +- Detect Python version to use inside container (bsc#1167586) (bsc#1164192) +- Handle volumes on stopped pools in virt.vm_info (bsc#1186287) +- Drop support for Python2. Obsoletes "python2-salt" package + +- Added: + * handle-volumes-on-stopped-pools-in-virt.vm_info-373.patch + * figure-out-python-interpreter-to-use-inside-containe.patch + +------------------------------------------------------------------- +Mon May 10 14:45:26 UTC 2021 - Pablo Suárez Hernández + +- grains.extra: support old non-intel kernels (bsc#1180650) +- Fix missing minion returns in batch mode (bsc#1184659) + +- Added: + * fix-missing-minion-returns-in-batch-mode-360.patch + * grains.extra-support-old-non-intel-kernels-bsc-11806.patch + +------------------------------------------------------------------- +Tue May 4 13:44:13 UTC 2021 - Jochen Breuer + +- Parsing Epoch out of version provided during pkg remove (bsc#1173692) + +- Added: + * parsing-epoch-out-of-version-provided-during-pkg-rem.patch + +------------------------------------------------------------------- +Tue Apr 27 15:02:30 UTC 2021 - Pablo Suárez Hernández + +- Fix issue parsing errors in ansiblegate state module + +- Added: + * fix-issue-parsing-errors-in-ansiblegate-state-module.patch + +------------------------------------------------------------------- +Tue Apr 27 12:27:17 UTC 2021 - Pablo Suárez Hernández + +- Prevent command injection in the snapper module (bsc#1185281) (CVE-2021-31607) +- transactional_update: detect recursion in the executor +- Add subpackage salt-transactional-update (jsc#SLE-18028) +- Remove duplicate directories from specfile + +- Added: + * transactional_update-detect-recursion-in-the-executo.patch + * prevent-command-injection-in-the-snapper-module-bsc-.patch + ------------------------------------------------------------------- Tue Apr 20 12:18:06 UTC 2021 - Pablo Suárez Hernández -- Improvements on "ansiblegate" module: +- Improvements on "ansiblegate" module (bsc#1185092): * New methods: ansible.targets / ansible.discover_playbooks * General bugfixes @@ -220,7 +404,7 @@ Tue Jan 5 10:15:08 UTC 2021 - Pablo Suárez Hernández -- Update to Salt release version 3002.2 +- Update to Salt release version 3002.2 (jsc#ECO-3212) (jsc#SLE-18033) - See release notes: https://docs.saltstack.com/en/latest/topics/releases/3002.2.html - Modified: diff --git a/salt.spec b/salt.spec index 7e8d74d..c8125d0 100644 --- a/salt.spec +++ b/salt.spec @@ -1,7 +1,7 @@ # # spec file for package salt # -# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany. +# Copyright (c) 2021 SUSE LLC # # All modifications and additions to the file contributed by third parties # remain the property of their copyright owners, unless otherwise agreed @@ -48,6 +48,7 @@ Source2: salt-tmpfiles.d Source3: html.tar.bz2 Source4: update-documentation.sh Source5: travis.yml +Source6: transactional_update.conf Patch1: run-salt-master-as-dedicated-salt-user.patch Patch2: run-salt-api-as-user-salt-bsc-1064520.patch @@ -131,271 +132,331 @@ Patch40: async-batch-implementation.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/52527 Patch41: calculate-fqdns-in-parallel-to-avoid-blockings-bsc-1.patch #PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/139 -Patch42: fix-async-batch-race-conditions.patch +Patch42: fix-async-batch-race-conditions.patch #PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/141 -Patch43: add-batch_presence_ping_timeout-and-batch_presence_p.patch +Patch43: add-batch_presence_ping_timeout-and-batch_presence_p.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/52527 -Patch44: use-threadpool-from-multiprocessing.pool-to-avoid-le.patch +Patch44: use-threadpool-from-multiprocessing.pool-to-avoid-le.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/52888 -Patch45: do-not-crash-when-there-are-ipv6-established-connect.patch +Patch45: do-not-crash-when-there-are-ipv6-established-connect.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/144 # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/52855 -Patch46: fix-async-batch-multiple-done-events.patch +Patch46: fix-async-batch-multiple-done-events.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/52743 -Patch47: switch-firewalld-state-to-use-change_interface.patch +Patch47: switch-firewalld-state-to-use-change_interface.patch # PATCH-FIX_OPENSUSE -Patch48: add-standalone-configuration-file-for-enabling-packa.patch +Patch48: add-standalone-configuration-file-for-enabling-packa.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/53293 -Patch49: do-not-break-repo-files-with-multiple-line-values-on.patch +Patch49: do-not-break-repo-files-with-multiple-line-values-on.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/53159 -Patch50: batch.py-avoid-exception-when-minion-does-not-respon.patch +Patch50: batch.py-avoid-exception-when-minion-does-not-respon.patch # PATCH_FIX_UPSTREAM: https://github.com/saltstack/salt/pull/53471 -Patch51: fix-zypper-pkg.list_pkgs-expectation-and-dpkg-mockin.patch +Patch51: fix-zypper-pkg.list_pkgs-expectation-and-dpkg-mockin.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/161 -Patch52: provide-the-missing-features-required-for-yomi-yet-o.patch +Patch52: provide-the-missing-features-required-for-yomi-yet-o.patch # PATCH_FIX_UPSTREAM: https://github.com/saltstack/salt/pull/53661 -Patch53: do-not-make-ansiblegate-to-crash-on-python3-minions.patch +Patch53: do-not-make-ansiblegate-to-crash-on-python3-minions.patch # PATCH_FIX_UPSTREAM: https://github.com/saltstack/salt/pull/53693 -Patch54: allow-passing-kwargs-to-pkg.list_downloaded-bsc-1140.patch +Patch54: allow-passing-kwargs-to-pkg.list_downloaded-bsc-1140.patch # PATCH_FIX_UPSTREAM: https://github.com/saltstack/salt/pull/53661 -Patch55: prevent-ansiblegate-unit-tests-to-fail-on-ubuntu.patch +Patch55: prevent-ansiblegate-unit-tests-to-fail-on-ubuntu.patch # PATCH_FIX_UPSTREAM: https://github.com/saltstack/salt/pull/54048 -Patch56: avoid-traceback-when-http.query-request-cannot-be-pe.patch +Patch56: avoid-traceback-when-http.query-request-cannot-be-pe.patch # PATCH_FIX_UPSTREAM: https://github.com/saltstack/salt/pull/53992 # https://github.com/saltstack/salt/pull/53996 # https://github.com/saltstack/salt/pull/54022 # https://github.com/saltstack/salt/pull/54024 -Patch57: accumulated-changes-required-for-yomi-165.patch +Patch57: accumulated-changes-required-for-yomi-165.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/159 -Patch58: move-server_id-deprecation-warning-to-reduce-log-spa.patch +Patch58: move-server_id-deprecation-warning-to-reduce-log-spa.patch # PATCH_FIX_UPSTREAM: https://github.com/saltstack/salt/pull/54077 # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/166 -Patch59: fix-aptpkg-systemd-call-bsc-1143301.patch +Patch59: fix-aptpkg-systemd-call-bsc-1143301.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/170 -Patch60: strip-trailing-from-repo.uri-when-comparing-repos-in.patch +Patch60: strip-trailing-from-repo.uri-when-comparing-repos-in.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/177 -Patch61: restore-default-behaviour-of-pkg-list-return.patch +Patch61: restore-default-behaviour-of-pkg-list-return.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/172 -Patch62: implement-network.fqdns-module-function-bsc-1134860-.patch +Patch62: implement-network.fqdns-module-function-bsc-1134860-.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/6af07030a502c427781991fc9a2b994fa04ef32e -Patch63: fix-memory-leak-produced-by-batch-async-find_jobs-me.patch +Patch63: fix-memory-leak-produced-by-batch-async-find_jobs-me.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/002543df392f65d95dbc127dc058ac897f2035ed -Patch64: improve-batch_async-to-release-consumed-memory-bsc-1.patch +Patch64: improve-batch_async-to-release-consumed-memory-bsc-1.patch # PATCH_FIX_UPSTREAM: https://github.com/saltstack/salt/pull/54077 # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/44a91c2ce6df78d93ce0ef659dedb0e41b1c2e04 -Patch65: prevent-systemd-run-description-issue-when-running-a.patch +Patch65: prevent-systemd-run-description-issue-when-running-a.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/55d8a777d6a9b19c959e14a4060e5579e92cd106 -Patch66: use-current-ioloop-for-the-localclient-instance-of-b.patch +Patch66: use-current-ioloop-for-the-localclient-instance-of-b.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/8378bb24a5a53973e8dba7658b8b3465d967329f -Patch67: fix-failing-unit-tests-for-batch-async.patch +Patch67: fix-failing-unit-tests-for-batch-async.patch # PATCH_FIX_UPSTREAM: https://github.com/saltstack/salt/pull/53326 # PATCH_FIX_UPSTREAM: https://github.com/saltstack/salt/pull/54954 -Patch68: accumulated-changes-from-yomi-167.patch +Patch68: accumulated-changes-from-yomi-167.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/180 -Patch69: fix-a-wrong-rebase-in-test_core.py-180.patch +Patch69: fix-a-wrong-rebase-in-test_core.py-180.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/182 -Patch70: remove-unnecessary-yield-causing-badyielderror-bsc-1.patch +Patch70: remove-unnecessary-yield-causing-badyielderror-bsc-1.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/186 -Patch71: read-repo-info-without-using-interpolation-bsc-11356.patch +Patch71: read-repo-info-without-using-interpolation-bsc-11356.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/53293 -Patch72: prevent-test_mod_del_repo_multiline_values-to-fail.patch -Patch73: fix-for-log-checking-in-x509-test.patch +Patch72: prevent-test_mod_del_repo_multiline_values-to-fail.patch +Patch73: fix-for-log-checking-in-x509-test.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/190 -Patch74: fixing-streamclosed-issue.patch -Patch75: fix-batch_async-obsolete-test.patch +Patch74: fixing-streamclosed-issue.patch +Patch75: fix-batch_async-obsolete-test.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/191 -Patch76: let-salt-ssh-use-platform-python-binary-in-rhel8-191.patch +Patch76: let-salt-ssh-use-platform-python-binary-in-rhel8-191.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/193 -Patch77: xfs-do-not-fails-if-type-is-not-present.patch +Patch77: xfs-do-not-fails-if-type-is-not-present.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/55245 -Patch78: virt-adding-kernel-boot-parameters-to-libvirt-xml-55.patch +Patch78: virt-adding-kernel-boot-parameters-to-libvirt-xml-55.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/200 -Patch79: support-for-btrfs-and-xfs-in-parted-and-mkfs.patch +Patch79: support-for-btrfs-and-xfs-in-parted-and-mkfs.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/56125 -Patch80: add-astra-linux-common-edition-to-the-os-family-list.patch +Patch80: add-astra-linux-common-edition-to-the-os-family-list.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/211 -Patch81: apply-patch-from-upstream-to-support-python-3.8.patch +Patch81: apply-patch-from-upstream-to-support-python-3.8.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/217 -Patch82: batch_async-avoid-using-fnmatch-to-match-event-217.patch +Patch82: batch_async-avoid-using-fnmatch-to-match-event-217.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/8a23030d347b7487328c0395f5e30ef29daf1455 -Patch83: batch-async-catch-exceptions-and-safety-unregister-a.patch +Patch83: batch-async-catch-exceptions-and-safety-unregister-a.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/a38adfa2efe40c2b1508b685af0b5d28a6bbcfc8 -Patch84: fix-unit-tests-for-batch-async-after-refactor.patch +Patch84: fix-unit-tests-for-batch-async-after-refactor.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/b4c401cfe6031b61e27f7795bfa1aca6e8341e52 -Patch85: changed-imports-to-vendored-tornado.patch +Patch85: changed-imports-to-vendored-tornado.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/25b4e3ea983b2606b2fb3d3c0e42f9840208bf84 -Patch86: remove-deprecated-usage-of-no_mock-and-no_mock_reaso.patch +Patch86: remove-deprecated-usage-of-no_mock-and-no_mock_reaso.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/a8f0a15e4067ec278c8a2d690e3bf815523286ca -Patch87: fix-wrong-test_mod_del_repo_multiline_values-test-af.patch +Patch87: fix-wrong-test_mod_del_repo_multiline_values-test-af.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/55814 -Patch88: opensuse-3000-virt-defined-states-222.patch +Patch88: opensuse-3000-virt-defined-states-222.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/223 -Patch89: fix-for-temp-folder-definition-in-loader-unit-test.patch +Patch89: fix-for-temp-folder-definition-in-loader-unit-test.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/56392 -Patch90: virt._get_domain-don-t-raise-an-exception-if-there-i.patch +Patch90: virt._get_domain-don-t-raise-an-exception-if-there-i.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/226 -Patch91: re-adding-function-to-test-for-root.patch +Patch91: re-adding-function-to-test-for-root.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/227 -Patch92: loop-fix-variable-names-for-until_no_eval.patch +Patch92: loop-fix-variable-names-for-until_no_eval.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/226 -Patch93: make-setup.py-script-to-not-require-setuptools-9.1.patch +Patch93: make-setup.py-script-to-not-require-setuptools-9.1.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/50453 # https://github.com/saltstack/salt/commit/e20362f6f053eaa4144583604e6aac3d62838419 # Can be dropped one pull/50453 is in released version. -Patch94: reintroducing-reverted-changes.patch +Patch94: reintroducing-reverted-changes.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/b713d0b3031faadc17cd9cf09977ccc19e50bef7 -Patch95: add-new-custom-suse-capability-for-saltutil-state-mo.patch +Patch95: add-new-custom-suse-capability-for-saltutil-state-mo.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/56491 -Patch96: sanitize-grains-loaded-from-roster_grains.json.patch +Patch96: sanitize-grains-loaded-from-roster_grains.json.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/228 -Patch97: adds-explicit-type-cast-for-port.patch +Patch97: adds-explicit-type-cast-for-port.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/53882 -Patch98: fixed-bug-lvm-has-no-parttion-type.-the-scipt-later-.patch +Patch98: fixed-bug-lvm-has-no-parttion-type.-the-scipt-later-.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/4f80e969e31247a4755d98d25f29b5d8b1b916c3 -Patch99: remove-vendored-backports-abc-from-requirements.patch +Patch99: remove-vendored-backports-abc-from-requirements.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/57123 -Patch100: prevent-logging-deadlock-on-salt-api-subprocesses-bs.patch +Patch100: prevent-logging-deadlock-on-salt-api-subprocesses-bs.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/235 -Patch101: python3.8-compatibility-pr-s-235.patch +Patch101: python3.8-compatibility-pr-s-235.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/56419 -Patch102: option-to-en-disable-force-refresh-in-zypper-215.patch +Patch102: option-to-en-disable-force-refresh-in-zypper-215.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/229 -Patch103: fix-a-test-and-some-variable-names-229.patch +Patch103: fix-a-test-and-some-variable-names-229.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/56439 -Patch104: add-docker-logout-237.patch +Patch104: add-docker-logout-237.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/57392 -Patch105: zypperpkg-filter-patterns-that-start-with-dot-244.patch +Patch105: zypperpkg-filter-patterns-that-start-with-dot-244.patch # PATCH-FIX_OPENSUSE: hhttps://github.com/openSUSE/salt/commit/da936daeebd701e147707ad814c07bfc259d4be -Patch106: add-publish_batch-to-clearfuncs-exposed-methods.patch +Patch106: add-publish_batch-to-clearfuncs-exposed-methods.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/57779 -Patch107: info_installed-works-without-status-attr-now.patch +Patch107: info_installed-works-without-status-attr-now.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/57491 -Patch108: opensuse-3000.3-spacewalk-runner-parse-command-250.patch +Patch108: opensuse-3000.3-spacewalk-runner-parse-command-250.patch # PATCH-FIX_UPSTREAM: https://github.com/openSUSE/salt/pull/251 -Patch109: opensuse-3000-libvirt-engine-fixes-251.patch +Patch109: opensuse-3000-libvirt-engine-fixes-251.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58013 -Patch110: fix-__mount_device-wrapper-254.patch +Patch110: fix-__mount_device-wrapper-254.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58214 -Patch111: ansiblegate-take-care-of-failed-skipped-and-unreacha.patch +Patch111: ansiblegate-take-care-of-failed-skipped-and-unreacha.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58301 -Patch112: do-not-raise-streamclosederror-traceback-but-only-lo.patch +Patch112: do-not-raise-streamclosederror-traceback-but-only-lo.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/257 -Patch113: opensuse-3000.2-virt-backports-236-257.patch +Patch113: opensuse-3000.2-virt-backports-236-257.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/256 -Patch114: backport-virt-patches-from-3001-256.patch +Patch114: backport-virt-patches-from-3001-256.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/262 -Patch115: fix-the-removed-six.itermitems-and-six.-_type-262.patch +Patch115: fix-the-removed-six.itermitems-and-six.-_type-262.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/263 -Patch116: fix-virt.update-with-cpu-defined-263.patch +Patch116: fix-virt.update-with-cpu-defined-263.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/261 # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/57571 -Patch117: remove-msgpack-1.0.0-requirement-in-the-installed-me.patch +Patch117: remove-msgpack-1.0.0-requirement-in-the-installed-me.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/bc20f38d0fa492af70321fef7fe2530937dfc86a -Patch118: prevent-import-errors-when-running-test_btrfs-unit-t.patch +Patch118: prevent-import-errors-when-running-test_btrfs-unit-t.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58529 -Patch119: invalidate-file-list-cache-when-cache-file-modified-.patch +Patch119: invalidate-file-list-cache-when-cache-file-modified-.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58400 -Patch120: xen-disk-fixes-264.patch +Patch120: xen-disk-fixes-264.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58552 -Patch121: zypperpkg-ignore-retcode-104-for-search-bsc-1176697-.patch +Patch121: zypperpkg-ignore-retcode-104-for-search-bsc-1176697-.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58520 -Patch122: support-transactional-systems-microos-271.patch +Patch122: support-transactional-systems-microos-271.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/272 -Patch123: backport-a-few-virt-prs-272.patch +Patch123: backport-a-few-virt-prs-272.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/275 -Patch124: bsc-1176024-fix-file-directory-user-and-group-owners.patch +Patch124: bsc-1176024-fix-file-directory-user-and-group-owners.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/277 -Patch125: fix-grains.test_core-unit-test-277.patch +Patch125: fix-grains.test_core-unit-test-277.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/e2c3b1cb72b796fe12f94af64baa2e64cbe5db0b -Patch126: drop-wrong-mock-from-chroot-unit-test.patch +Patch126: drop-wrong-mock-from-chroot-unit-test.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/280 -Patch127: ensure-virt.update-stop_on_reboot-is-updated-with-it.patch +Patch127: ensure-virt.update-stop_on_reboot-is-updated-with-it.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/281 -Patch128: path-replace-functools.wraps-with-six.wraps-bsc-1177.patch +Patch128: path-replace-functools.wraps-with-six.wraps-bsc-1177.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58560 -Patch129: fix-novendorchange-option-284.patch +Patch129: fix-novendorchange-option-284.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58871 -Patch130: fix-cve-2020-25592-and-add-tests-bsc-1178319.patch +Patch130: fix-cve-2020-25592-and-add-tests-bsc-1178319.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58520 # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/286 -Patch131: grains-master-can-read-grains.patch +Patch131: grains-master-can-read-grains.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58784 -Patch132: add-migrated-state-and-gpg-key-management-functions-.patch +Patch132: add-migrated-state-and-gpg-key-management-functions-.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/292 -Patch133: transactional_update-unify-with-chroot.call.patch +Patch133: transactional_update-unify-with-chroot.call.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/295 -Patch134: pkgrepo-support-python-2.7-function-call-295.patch +Patch134: pkgrepo-support-python-2.7-function-call-295.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/79ae019ac7515614c6fbc620e66575f015bc447 -Patch135: drop-wrong-virt-capabilities-code-after-rebasing-pat.patch +Patch135: drop-wrong-virt-capabilities-code-after-rebasing-pat.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/5ea2f10b15684dd417bad858642faafc92cd382 # (revert https://github.com/saltstack/salt/pull/58655) -Patch136: revert-fixing-a-use-case-when-multiple-inotify-beaco.patch +Patch136: revert-fixing-a-use-case-when-multiple-inotify-beaco.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59269 -Patch137: fix-aptpkg.normalize_name-when-package-arch-is-all.patch +Patch137: fix-aptpkg.normalize_name-when-package-arch-is-all.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/310 -Patch138: open-suse-3002.2-bigvm-310.patch +Patch138: open-suse-3002.2-bigvm-310.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59146 -Patch139: open-suse-3002.2-virt-network-311.patch +Patch139: open-suse-3002.2-virt-network-311.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/298 -Patch140: fix-salt.utils.stringutils.to_str-calls-to-make-it-w.patch +Patch140: fix-salt.utils.stringutils.to_str-calls-to-make-it-w.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58262 -Patch141: add-pkg.services_need_restart-302.patch +Patch141: add-pkg.services_need_restart-302.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/301 -Patch142: add-patch-support-for-allow-vendor-change-option-wit.patch +Patch142: add-patch-support-for-allow-vendor-change-option-wit.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/304 -Patch143: force-zyppnotify-to-prefer-packages.db-than-packages.patch +Patch143: force-zyppnotify-to-prefer-packages.db-than-packages.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/4028fd6e84d882b6dcee695d409c7e1ed6c83bdc -Patch144: revert-add-patch-support-for-allow-vendor-change-opt.patch +Patch144: revert-add-patch-support-for-allow-vendor-change-opt.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/73e357d7eee19a73cade22becb30d9689cae27ba -Patch145: remove-deprecated-warning-that-breaks-miniion-execut.patch +Patch145: remove-deprecated-warning-that-breaks-miniion-execut.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59345 -Patch146: fix-onlyif-unless-when-multiple-conditions-bsc-11808.patch +Patch146: fix-onlyif-unless-when-multiple-conditions-bsc-11808.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59354 -Patch147: do-not-crash-when-unexpected-cmd-output-at-listing-p.patch +Patch147: do-not-crash-when-unexpected-cmd-output-at-listing-p.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59189 -Patch148: virt-uefi-fix-backport-312.patch +Patch148: virt-uefi-fix-backport-312.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59355 # https://github.com/saltstack/salt/pull/59417 -Patch149: 3002.2-xen-spicevmc-dns-srv-records-backports-314.patch +Patch149: 3002.2-xen-spicevmc-dns-srv-records-backports-314.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59485 -Patch150: open-suse-3002.2-xen-grub-316.patch +Patch150: open-suse-3002.2-xen-grub-316.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/320 -Patch151: async-batch-implementation-fix-320.patch +Patch151: async-batch-implementation-fix-320.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/56173 -Patch152: fixes-56144-to-enable-hotadd-profile-support.patch +Patch152: fixes-56144-to-enable-hotadd-profile-support.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/307 -Patch153: add-sleep-on-exception-handling-on-minion-connection.patch +Patch153: add-sleep-on-exception-handling-on-minion-connection.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/323 -Patch154: implementation-of-suse_ip-execution-module-bsc-10999.patch +Patch154: implementation-of-suse_ip-execution-module-bsc-10999.patch # PATCH-FIX_UPSTREAM: no PR to link to yet -Patch155: fix-for-some-cves-bsc1181550.patch +Patch155: fix-for-some-cves-bsc1181550.patch # PATCH-FIX_UPSTREAM: no PR to link to yet -Patch156: allow-extra_filerefs-as-sanitized-kwargs-for-ssh-cli.patch +Patch156: allow-extra_filerefs-as-sanitized-kwargs-for-ssh-cli.patch # PATCH-FIX_UPSTREAM: no PR to link to yet -Patch157: fix-regression-on-cmd.run-when-passing-tuples-as-cmd.patch +Patch157: fix-regression-on-cmd.run-when-passing-tuples-as-cmd.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59524 -Patch158: prevent-race-condition-on-sigterm-for-the-minion-bsc.patch +Patch158: prevent-race-condition-on-sigterm-for-the-minion-bsc.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59746 -Patch159: do-not-monkey-patch-yaml-bsc-1177474.patch +Patch159: do-not-monkey-patch-yaml-bsc-1177474.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59536 -Patch160: 3002-set-distro-requirement-to-oldest-supported-vers.patch +Patch160: 3002-set-distro-requirement-to-oldest-supported-vers.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59693 -Patch161: virt.network_update-handle-missing-ipv4-netmask-attr.patch +Patch161: virt.network_update-handle-missing-ipv4-netmask-attr.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/313 -Patch162: allow-vendor-change-option-with-zypper-313.patch +Patch162: allow-vendor-change-option-with-zypper-313.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59404 # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/341 -Patch163: add-almalinux-and-alibaba-cloud-linux-to-the-os-fami.patch +Patch163: add-almalinux-and-alibaba-cloud-linux-to-the-os-fami.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/347 -Patch164: notify-beacon-for-debian-ubuntu-systems-347.patch +Patch164: notify-beacon-for-debian-ubuntu-systems-347.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/336 -Patch165: update-target-fix-for-salt-ssh-to-process-targets-li.patch +Patch165: update-target-fix-for-salt-ssh-to-process-targets-li.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59687 -Patch166: add-alibaba-cloud-linux-2-by-backporting-upstream-s-.patch +Patch166: add-alibaba-cloud-linux-2-by-backporting-upstream-s-.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/353 -Patch167: regression-fix-of-salt-ssh-on-processing-targets-353.patch +Patch167: regression-fix-of-salt-ssh-on-processing-targets-353.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60056 -Patch168: improvements-on-ansiblegate-module-354.patch +Patch168: improvements-on-ansiblegate-module-354.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58520 +Patch169: transactional_update-detect-recursion-in-the-executo.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59648 +Patch170: prevent-command-injection-in-the-snapper-module-bsc-.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60056 +Patch171: fix-issue-parsing-errors-in-ansiblegate-state-module.patch +# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/376 +Patch172: check-if-dpkgnotify-is-executable-bsc-1186674-376.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/57881 +Patch173: parsing-epoch-out-of-version-provided-during-pkg-rem.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58503 +Patch174: fix-missing-minion-returns-in-batch-mode-360.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58520 +Patch175: grains.extra-support-old-non-intel-kernels-bsc-11806.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60133 +Patch176: handle-volumes-on-stopped-pools-in-virt.vm_info-373.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60229 +Patch177: figure-out-python-interpreter-to-use-inside-containe.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60402 +Patch178: enhance-logging-when-inotify-beacon-is-missing-pyino.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60421 +Patch179: move-vendor-change-logic-to-zypper-class-355.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60420 +Patch180: virt-use-dev-kvm-to-detect-kvm-383.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60358 +Patch181: fix-save-for-iptables-state-module-bsc-1185131-372.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60356 +Patch182: fix-exception-in-yumpkg.remove-for-not-installed-pac.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59535 +Patch183: backport-thread.is_alive-fix-390.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/56935 +# https://github.com/saltstack/salt/pull/60432 +Patch184: implementation-of-held-unheld-functions-for-state-pk.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59682 +Patch185: adding-preliminary-support-for-rocky.-59682-391.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60492 +Patch186: virt-pass-emulator-when-getting-domain-capabilities-.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59756 +Patch187: enhance-openscap-module-add-xccdf_eval-call-386.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58520 +Patch188: handle-master-tops-data-when-states-are-applied-by-t.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58520 +Patch189: do-noop-for-services-states-when-running-systemd-in-.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60662 +# https://github.com/saltstack/salt/pull/60688 +Patch190: better-handling-of-bad-public-keys-from-minions-bsc-.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59756 +Patch191: fix-error-handling-in-openscap-module-bsc-1188647-40.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/58520 +Patch192: fix-failing-unit-tests-for-systemd.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/59492 +Patch193: backport-of-upstream-pr59492-to-3002.2-404.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60650 +Patch194: add-missing-aarch64-to-rpm-package-architectures-405.patch +# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/413 +Patch195: don-t-use-shell-sbin-nologin-in-requisites.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60811 +Patch196: templates-move-the-globals-up-to-the-environment-jin.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/commit/0b75ba190fda9c04cc026ad1aa4a6d572f40349b +Patch197: exclude-the-full-path-of-a-download-url-to-prevent-i.patch BuildRoot: %{_tmppath}/%{name}-%{version}-build BuildRequires: logrotate @@ -404,6 +465,7 @@ BuildRequires: fdupes %endif Requires: python3-%{name} = %{version}-%{release} +Obsoletes: python2-%{name} Requires(pre): %{_sbindir}/groupadd Requires(pre): %{_sbindir}/useradd @@ -570,8 +632,11 @@ Suggests: python3-gnupg Requires: python3-xml Suggests: python3-Mako Recommends: python3-netaddr +Recommends: python3-pyinotify %endif +Provides: bundled(python3-tornado) = 4.5.3 + %description -n python3-salt Python3 specific files for salt @@ -648,6 +713,9 @@ than serially. Summary: The client component for Saltstack Group: System/Management Requires: %{name} = %{version}-%{release} +%if 0%{?suse_version} > 1500 || 0%{?sle_version} > 150000 +Requires: (%{name}-transactional-update = %{version}-%{release} if read-only-root-fs) +%endif %if %{with systemd} %{?systemd_requires} @@ -778,19 +846,30 @@ Zsh command line completion support for %{name}. %package standalone-formulas-configuration Summary: Standalone Salt configuration to make the packaged formulas available for the Salt master Group: System/Management -Requires: %{name} = %{version}-%{release} -Requires: %{name}-master = %{version}-%{release} +Requires: %{name} Provides: salt-formulas-configuration Conflicts: otherproviders(salt-formulas-configuration) %description standalone-formulas-configuration This package adds the standalone configuration for the Salt master in order to make the packaged Salt formulas available on the Salt master +%package transactional-update +Summary: Transactional update executor configuration +Group: System/Management +Requires: %{name} = %{version}-%{release} +Requires: %{name}-minion = %{version}-%{release} + +%description transactional-update +For transactional systems, like MicroOS, Salt can operate +transparently if the executor "transactional-update" is registered in +list of active executors. This package add the configuration file. + %prep %setup -q -n salt-%{version}-suse cp %{S:1} . cp %{S:5} ./.travis.yml +cp %{S:6} . %patch1 -p1 %patch2 -p1 %patch3 -p1 @@ -959,6 +1038,35 @@ cp %{S:5} ./.travis.yml %patch166 -p1 %patch167 -p1 %patch168 -p1 +%patch169 -p1 +%patch170 -p1 +%patch171 -p1 +%patch172 -p1 +%patch173 -p1 +%patch174 -p1 +%patch175 -p1 +%patch176 -p1 +%patch177 -p1 +%patch178 -p1 +%patch179 -p1 +%patch180 -p1 +%patch181 -p1 +%patch182 -p1 +%patch183 -p1 +%patch184 -p1 +%patch185 -p1 +%patch186 -p1 +%patch187 -p1 +%patch188 -p1 +%patch189 -p1 +%patch190 -p1 +%patch191 -p1 +%patch192 -p1 +%patch193 -p1 +%patch194 -p1 +%patch195 -p1 +%patch196 -p1 +%patch197 -p1 %build # Putting /usr/bin at the front of $PATH is needed for RHEL/RES 7. Without this @@ -997,16 +1105,7 @@ for script in $DEF_PYPATH/*; do done ## create missing directories -install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/master.d -install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/minion.d -install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/cloud.maps.d -install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/cloud.profiles.d -install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/cloud.providers.d -install -Dd -m 0750 %{buildroot}%{_localstatedir}/log/salt -install -Dd -m 0755 %{buildroot}%{_sysconfdir}/logrotate.d/ -install -Dd -m 0755 %{buildroot}%{_sbindir} -install -Dd -m 0750 %{buildroot}%{_localstatedir}/log/salt -install -Dd -m 0750 %{buildroot}%{_localstatedir}/cache/salt/minion/extmod +install -Dd -m 0750 %{buildroot}%{_localstatedir}/cache/salt/cloud install -Dd -m 0750 %{buildroot}%{_localstatedir}/cache/salt/master install -Dd -m 0750 %{buildroot}%{_localstatedir}/cache/salt/master/jobs install -Dd -m 0750 %{buildroot}%{_localstatedir}/cache/salt/master/proc @@ -1014,12 +1113,8 @@ install -Dd -m 0750 %{buildroot}%{_localstatedir}/cache/salt/master/queues install -Dd -m 0750 %{buildroot}%{_localstatedir}/cache/salt/master/roots install -Dd -m 0750 %{buildroot}%{_localstatedir}/cache/salt/master/syndics install -Dd -m 0750 %{buildroot}%{_localstatedir}/cache/salt/master/tokens -install -Dd -m 0750 %{buildroot}%{_localstatedir}/cache/salt/cloud -install -Dd -m 0750 %{buildroot}/var/lib/salt -install -Dd -m 0750 %{buildroot}/srv/salt -install -Dd -m 0750 %{buildroot}/srv/pillar -install -Dd -m 0750 %{buildroot}/srv/spm -install -Dd -m 0755 %{buildroot}%{_docdir}/salt +install -Dd -m 0750 %{buildroot}%{_localstatedir}/cache/salt/minion/extmod +install -Dd -m 0750 %{buildroot}%{_localstatedir}/log/salt install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/ install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/cloud.maps.d install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/cloud.profiles.d @@ -1034,6 +1129,13 @@ install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/master/minions_denied install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/master/minions_pre install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/master/minions_rejected install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/minion +install -Dd -m 0750 %{buildroot}/srv/pillar +install -Dd -m 0750 %{buildroot}/srv/salt +install -Dd -m 0750 %{buildroot}/srv/spm +install -Dd -m 0750 %{buildroot}/var/lib/salt +install -Dd -m 0755 %{buildroot}%{_docdir}/salt +install -Dd -m 0755 %{buildroot}%{_sbindir} +install -Dd -m 0755 %{buildroot}%{_sysconfdir}/logrotate.d/ # Install salt-support profiles install -Dpm 0644 salt/cli/support/profiles/* %{buildroot}%{python3_sitelib}/salt/cli/support/profiles @@ -1097,6 +1199,7 @@ install -Dpm 0640 conf/roster %{buildroot}%{_sysconfdir}/salt/roster install -Dpm 0640 conf/cloud %{buildroot}%{_sysconfdir}/salt/cloud install -Dpm 0640 conf/cloud.profiles %{buildroot}%{_sysconfdir}/salt/cloud.profiles install -Dpm 0640 conf/cloud.providers %{buildroot}%{_sysconfdir}/salt/cloud.providers +install -Dpm 0640 transactional_update.conf %{buildroot}%{_sysconfdir}/salt/minion.d/transactional_update.conf # ## install logrotate file (for RHEL6 we use without sudo) %if 0%{?rhel} > 6 || 0%{?suse_version} @@ -1128,6 +1231,7 @@ install -Dd -m 0750 %{buildroot}%{_prefix}/share/salt-formulas install -Dd -m 0750 %{buildroot}%{_prefix}/share/salt-formulas/states install -Dd -m 0750 %{buildroot}%{_prefix}/share/salt-formulas/metadata install -Dpm 0640 conf/suse/standalone-formulas-configuration.conf %{buildroot}%{_sysconfdir}/salt/master.d +install -Dpm 0640 conf/suse/standalone-formulas-configuration.conf %{buildroot}%{_sysconfdir}/salt/minion.d %if 0%{?suse_version} > 1020 %fdupes %{buildroot}%{_docdir} @@ -1580,12 +1684,9 @@ rm -f %{_localstatedir}/cache/salt/minion/thin/version %{_mandir}/man1/salt-call.1.gz %{_mandir}/man1/spm.1.gz %config(noreplace) %{_sysconfdir}/logrotate.d/salt -%if 0%{?suse_version} < 1500 -%doc LICENSE AUTHORS README.rst HACKING.rst README.SUSE -%else +%{!?_licensedir:%global license %doc} %license LICENSE %doc AUTHORS README.rst HACKING.rst README.SUSE -%endif # %dir %attr(0750, root, salt) %{_sysconfdir}/salt %dir %attr(0750, root, salt) %{_sysconfdir}/salt/pki @@ -1632,11 +1733,19 @@ rm -f %{_localstatedir}/cache/salt/minion/thin/version %files standalone-formulas-configuration %defattr(-,root,root) +%dir %attr(0755, root, salt) %{_sysconfdir}/salt/master.d/ %config(noreplace) %attr(0640, root, salt) %{_sysconfdir}/salt/master.d/standalone-formulas-configuration.conf +%dir %attr(0750, root, root) %{_sysconfdir}/salt/minion.d/ +%config(noreplace) %attr(0640, root, root) %{_sysconfdir}/salt/minion.d/standalone-formulas-configuration.conf %dir %attr(0755, root, salt) %{_prefix}/share/salt-formulas/ %dir %attr(0755, root, salt) %{_prefix}/share/salt-formulas/states/ %dir %attr(0755, root, salt) %{_prefix}/share/salt-formulas/metadata/ +%files transactional-update +%defattr(-,root,root) +%config(noreplace) %attr(0640, root, root) %{_sysconfdir}/salt/minion.d/transactional_update.conf + + %changelog diff --git a/templates-move-the-globals-up-to-the-environment-jin.patch b/templates-move-the-globals-up-to-the-environment-jin.patch new file mode 100644 index 0000000..ac482b9 --- /dev/null +++ b/templates-move-the-globals-up-to-the-environment-jin.patch @@ -0,0 +1,112 @@ +From 1e8f506827bcf32bfe7e87763fa854a13729f2c8 Mon Sep 17 00:00:00 2001 +From: Alberto Planas +Date: Tue, 31 Aug 2021 11:20:49 +0200 +Subject: [PATCH] templates: move the globals up to the Environment + (Jinja2 3.0.0) (#418) + +* jinja: fix TemplateNotFound missing name + +The TemplateNotFound exception requires a parameter, name, that is +missing in one of the calls. + +File "/usr/lib/python3.8/site-packages/salt/utils/jinja.py", line 158, in get_source + raise TemplateNotFound +TypeError: __init__() missing 1 required positional argument: 'name' + +This patch add the missing parameter in the raise call. + +Signed-off-by: Alberto Planas + +* templates: move the globals up to the Environment + +When creating a Jinja2 environment, we populate the globals in the +Template object that we generate from the environment. This cause a +problem when there is a {% include "./file.sls" %} in the template, as +cannot find in the environment globals information like the "tpldir", +for example, making the relative path to be unresolved. + +Seems that in Jinja2 2.X this behaviour is not present, so attaching the +globals to the Template will make the include to work, but since Jinja2 +3.0.0 this is not the case. Maybe related with the re-architecture from +https://github.com/pallets/jinja/issues/295 + +This patch populate the globals in the Environment level, making this +and other variables reachable by the Jinja templates. + +Fix #55159 + +Signed-off-by: Alberto Planas +--- + changelog/55159.fixed | 1 + + salt/utils/jinja.py | 2 +- + salt/utils/templates.py | 2 +- + tests/unit/utils/test_jinja.py | 16 ++++++++++++++++ + 4 files changed, 19 insertions(+), 2 deletions(-) + create mode 100644 changelog/55159.fixed + +diff --git a/changelog/55159.fixed b/changelog/55159.fixed +new file mode 100644 +index 0000000000..6ee1a78366 +--- /dev/null ++++ b/changelog/55159.fixed +@@ -0,0 +1 @@ ++Jinja renderer resolves wrong relative paths when importing subdirectories +diff --git a/salt/utils/jinja.py b/salt/utils/jinja.py +index 997d4b1697..e1ac4657f9 100644 +--- a/salt/utils/jinja.py ++++ b/salt/utils/jinja.py +@@ -155,7 +155,7 @@ class SaltCacheLoader(BaseLoader): + 'Relative path "%s" cannot be resolved without an environment', + template, + ) +- raise TemplateNotFound ++ raise TemplateNotFound(template) + base_path = environment.globals["tpldir"] + _template = os.path.normpath("/".join((base_path, _template))) + if _template.split("/", 1)[0] == "..": +diff --git a/salt/utils/templates.py b/salt/utils/templates.py +index 1fda960b2e..f369da5c9e 100644 +--- a/salt/utils/templates.py ++++ b/salt/utils/templates.py +@@ -492,9 +492,9 @@ def render_jinja_tmpl(tmplstr, context, tmplpath=None): + ) + decoded_context[key] = salt.utils.data.decode(value) + ++ jinja_env.globals.update(decoded_context) + try: + template = jinja_env.from_string(tmplstr) +- template.globals.update(decoded_context) + output = template.render(**decoded_context) + except jinja2.exceptions.UndefinedError as exc: + trace = traceback.extract_tb(sys.exc_info()[2]) +diff --git a/tests/unit/utils/test_jinja.py b/tests/unit/utils/test_jinja.py +index 807e901afa..0219512097 100644 +--- a/tests/unit/utils/test_jinja.py ++++ b/tests/unit/utils/test_jinja.py +@@ -612,6 +612,22 @@ class TestGetTemplate(TestCase): + dict(opts=self.local_opts, saltenv="test", salt=self.local_salt), + ) + ++ def test_relative_include(self): ++ template = "{% include './hello_import' %}" ++ expected = "Hey world !a b !" ++ filename = os.path.join(self.template_dir, "hello_import") ++ with salt.utils.files.fopen(filename) as fp_: ++ out = render_jinja_tmpl( ++ template, ++ dict( ++ opts=self.local_opts, ++ saltenv="test", ++ salt=self.local_salt, ++ tpldir=self.template_dir, ++ ), ++ ) ++ self.assertEqual(out, expected) ++ + + class TestJinjaDefaultOptions(TestCase): + @classmethod +-- +2.33.0 + + diff --git a/transactional_update-detect-recursion-in-the-executo.patch b/transactional_update-detect-recursion-in-the-executo.patch new file mode 100644 index 0000000..cf857d3 --- /dev/null +++ b/transactional_update-detect-recursion-in-the-executo.patch @@ -0,0 +1,52 @@ +From 1ea573fe35245ab08eb26a757d373ca16c841a1c Mon Sep 17 00:00:00 2001 +From: Alberto Planas +Date: Tue, 27 Apr 2021 14:01:43 +0200 +Subject: [PATCH] transactional_update: detect recursion in the + executor (#359) + +--- + salt/executors/transactional_update.py | 10 ++++++++-- + 1 file changed, 8 insertions(+), 2 deletions(-) + +diff --git a/salt/executors/transactional_update.py b/salt/executors/transactional_update.py +index ef7d92bc05..0fa83d730b 100644 +--- a/salt/executors/transactional_update.py ++++ b/salt/executors/transactional_update.py +@@ -5,6 +5,8 @@ Transactional executor module + + """ + ++import os ++ + import salt.utils.path + + # Functions that are mapped into an equivalent one in +@@ -98,6 +100,8 @@ def execute(opts, data, func, args, kwargs): + add_delegated_functions: [file.copy] + + """ ++ inside_transaction = os.environ.get("TRANSACTIONAL_UPDATE") ++ + fun = data["fun"] + module, _ = fun.split(".") + +@@ -114,11 +118,13 @@ def execute(opts, data, func, args, kwargs): + delegated_modules |= set(opts.get("add_delegated_modules", [])) + delegated_functions |= set(opts.get("add_delegated_functions", [])) + +- if fun in DELEGATION_MAP: ++ if fun in DELEGATION_MAP and not inside_transaction: + result = __executors__["direct_call.execute"]( + opts, data, __salt__[DELEGATION_MAP[fun]], args, kwargs + ) +- elif module in delegated_modules or fun in delegated_functions: ++ elif ( ++ module in delegated_modules or fun in delegated_functions ++ ) and not inside_transaction: + result = __salt__["transactional_update.call"](fun, *args, **kwargs) + else: + result = __executors__["direct_call.execute"](opts, data, func, args, kwargs) +-- +2.31.1 + + diff --git a/transactional_update.conf b/transactional_update.conf new file mode 100644 index 0000000..46a66b6 --- /dev/null +++ b/transactional_update.conf @@ -0,0 +1,4 @@ +# Enable the transactional_update executor +module_executors: + - transactional_update + - direct_call diff --git a/virt-pass-emulator-when-getting-domain-capabilities-.patch b/virt-pass-emulator-when-getting-domain-capabilities-.patch new file mode 100644 index 0000000..b95257f --- /dev/null +++ b/virt-pass-emulator-when-getting-domain-capabilities-.patch @@ -0,0 +1,74 @@ +From ddcf5ae80be638ade7634990194c48c5c703d538 Mon Sep 17 00:00:00 2001 +From: Cedric Bosdonnat +Date: Tue, 6 Jul 2021 08:47:25 +0200 +Subject: [PATCH] virt: pass emulator when getting domain capabilities + from libvirt (#394) + +On aarch64, for some emulated architectures like armv6l libvirt needs to +have the emulator path to properly return the domain capabilities. + +Passing it will avoid virt.all_capabilities to fail on such +architectures. +--- + changelog/60491.fixed | 1 + + salt/modules/virt.py | 10 +++++++--- + tests/unit/modules/test_virt.py | 4 ++++ + 3 files changed, 12 insertions(+), 3 deletions(-) + create mode 100644 changelog/60491.fixed + +diff --git a/changelog/60491.fixed b/changelog/60491.fixed +new file mode 100644 +index 0000000000..256d29b5fb +--- /dev/null ++++ b/changelog/60491.fixed +@@ -0,0 +1 @@ ++Pass emulator path to get guest capabilities from libvirt +diff --git a/salt/modules/virt.py b/salt/modules/virt.py +index 2f2aa63957..12b39d76db 100644 +--- a/salt/modules/virt.py ++++ b/salt/modules/virt.py +@@ -6770,7 +6770,11 @@ def all_capabilities(**kwargs): + host_caps = ElementTree.fromstring(conn.getCapabilities()) + domains = [ + [ +- (guest.get("arch", {}).get("name", None), key) ++ ( ++ guest.get("arch", {}).get("name", None), ++ key, ++ guest.get("arch", {}).get("emulator", None), ++ ) + for key in guest.get("arch", {}).get("domains", {}).keys() + ] + for guest in [ +@@ -6788,10 +6792,10 @@ def all_capabilities(**kwargs): + "domains": [ + _parse_domain_caps( + ElementTree.fromstring( +- conn.getDomainCapabilities(None, arch, None, domain) ++ conn.getDomainCapabilities(emulator, arch, None, domain) + ) + ) +- for (arch, domain) in flattened ++ for (arch, domain, emulator) in flattened + ], + } + finally: +diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py +index 5c7e1e1cc4..c6a76af10f 100644 +--- a/tests/unit/modules/test_virt.py ++++ b/tests/unit/modules/test_virt.py +@@ -5057,6 +5057,10 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin): + {"qemu", "kvm"}, {domainCaps["domain"] for domainCaps in caps["domains"]}, + ) + ++ self.mock_conn.getDomainCapabilities.assert_called_with( ++ "/usr/bin/qemu-system-x86_64", "x86_64", None, "kvm" ++ ) ++ + def test_network_tag(self): + """ + Test virt._get_net_xml() with VLAN tag +-- +2.32.0 + + diff --git a/virt-use-dev-kvm-to-detect-kvm-383.patch b/virt-use-dev-kvm-to-detect-kvm-383.patch new file mode 100644 index 0000000..fca4aa2 --- /dev/null +++ b/virt-use-dev-kvm-to-detect-kvm-383.patch @@ -0,0 +1,77 @@ +From 73f474fcc7700abff110e3eac653fea5e320ee4f Mon Sep 17 00:00:00 2001 +From: Cedric Bosdonnat +Date: Thu, 24 Jun 2021 11:37:41 +0200 +Subject: [PATCH] virt: use /dev/kvm to detect KVM (#383) + +checking for kvm_* modules to be loaded is not robust enough since the +kernel could be compiled with builtin modules. /dev/kvm is much more +reliable. +--- + changelog/60419.fixed | 1 + + salt/modules/virt.py | 7 +------ + tests/pytests/unit/modules/virt/test_host.py | 19 +++++++++++++++++++ + 3 files changed, 21 insertions(+), 6 deletions(-) + create mode 100644 changelog/60419.fixed + +diff --git a/changelog/60419.fixed b/changelog/60419.fixed +new file mode 100644 +index 0000000000..44c782da48 +--- /dev/null ++++ b/changelog/60419.fixed +@@ -0,0 +1 @@ ++Check for /dev/kvm to detect KVM hypervisor. +diff --git a/salt/modules/virt.py b/salt/modules/virt.py +index d8a8c51ce5..2f2aa63957 100644 +--- a/salt/modules/virt.py ++++ b/salt/modules/virt.py +@@ -5745,12 +5745,7 @@ def _is_kvm_hyper(): + """ + Returns a bool whether or not this node is a KVM hypervisor + """ +- try: +- with salt.utils.files.fopen("/proc/modules") as fp_: +- if "kvm_" not in salt.utils.stringutils.to_unicode(fp_.read()): +- return False +- except OSError: +- # No /proc/modules? Are we on Windows? Or Solaris? ++ if not os.path.exists("/dev/kvm"): + return False + return "libvirtd" in __salt__["cmd.run"](__grains__["ps"]) + +diff --git a/tests/pytests/unit/modules/virt/test_host.py b/tests/pytests/unit/modules/virt/test_host.py +index 6c9ac79337..c5cadb8aa0 100644 +--- a/tests/pytests/unit/modules/virt/test_host.py ++++ b/tests/pytests/unit/modules/virt/test_host.py +@@ -1,5 +1,8 @@ ++import os.path ++ + import pytest + import salt.modules.virt as virt ++from tests.support.mock import MagicMock, patch + + from .conftest import loader_modules_config + +@@ -217,3 +220,19 @@ def test_node_devices(make_mock_device): + "device name": "pci_0000_02_10_7", + }, + ] ++ ++ ++@pytest.mark.parametrize( ++ "dev_kvm, libvirtd", [(True, True), (False, False), (True, False)] ++) ++def test_is_kvm(dev_kvm, libvirtd): ++ """ ++ Test the virt._is_kvm_hyper() function ++ """ ++ with patch.dict(os.path.__dict__, {"exists": MagicMock(return_value=dev_kvm)}): ++ processes = ["libvirtd"] if libvirtd else [] ++ with patch.dict(virt.__grains__, {"ps": MagicMock(return_value="foo")}): ++ with patch.dict( ++ virt.__salt__, {"cmd.run": MagicMock(return_value=processes)} ++ ): ++ assert virt._is_kvm_hyper() == (dev_kvm and libvirtd) +-- +2.31.1 + +