From f067db8b671b05c94472e983a171e25e075f063629fb5a62a46fc01127a5c11d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Tue, 16 Nov 2021 11:00:40 +0000 Subject: [PATCH] osc copypac from project:systemsmanagement:saltstack:testing package:salt revision:427 OBS-URL: https://build.opensuse.org/package/show/systemsmanagement:saltstack/salt?expand=0&rev=193 --- _lastrevision | 2 +- ...ython-library-for-version-comparison.patch | 1115 +++++++++++++++++ ...set-plugin-implementation-3002.2-450.patch | 130 ++ ...e-grain-to-not-leak-secondary-ipv4-a.patch | 32 + ...-regression-for-yumnotify-plugin-456.patch | 23 + ...int_exc-calls-for-test_pip_state-432.patch | 36 + ...rs-in-utils-minions.py-unit-test-443.patch | 56 + ...ins-errors-on-missing-cookie-path-bs.patch | 147 +++ ...rovements-for-transactional-updates-.patch | 1064 ++++++++++++++++ salt.changes | 37 + salt.spec | 55 +- 11 files changed, 2692 insertions(+), 5 deletions(-) create mode 100644 add-rpm_vercmp-python-library-for-version-comparison.patch create mode 100644 dnfnotify-pkgset-plugin-implementation-3002.2-450.patch create mode 100644 fix-ip6_interface-grain-to-not-leak-secondary-ipv4-a.patch create mode 100644 fix-the-regression-for-yumnotify-plugin-456.patch create mode 100644 fix-traceback.print_exc-calls-for-test_pip_state-432.patch create mode 100644 mock-ip_addrs-in-utils-minions.py-unit-test-443.patch create mode 100644 prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch create mode 100644 refactor-and-improvements-for-transactional-updates-.patch diff --git a/_lastrevision b/_lastrevision index 19dd0ae..ee9a323 100644 --- a/_lastrevision +++ b/_lastrevision @@ -1 +1 @@ -43d1aa8a46def69d5b6097d235e7c7a97d4635cf \ No newline at end of file +21e5e5ac757d79b2899ba18b18ae369d713013dd \ No newline at end of file diff --git a/add-rpm_vercmp-python-library-for-version-comparison.patch b/add-rpm_vercmp-python-library-for-version-comparison.patch new file mode 100644 index 0000000..d3e8724 --- /dev/null +++ b/add-rpm_vercmp-python-library-for-version-comparison.patch @@ -0,0 +1,1115 @@ +From 0ccc9aa260032ba86481f121132a10f439a20700 Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov +Date: Mon, 15 Nov 2021 17:22:35 +0300 +Subject: [PATCH] Add rpm_vercmp python library for version comparison - + 3003.3 (#448) + +* Add rpm_vercmp python library for version comparison + +* Add rpm-vercmp to tiamat builds + +* Put GPG tests back to test_rpm_lowpkg + +Co-authored-by: Megan Wilhite +--- + changelog/60814.added | 1 + + salt/modules/rpm_lowpkg.py | 21 +- + tests/pytests/unit/modules/test_rpm_lowpkg.py | 545 ++++++++++++++++++ + tests/unit/modules/test_rpm_lowpkg.py | 478 --------------- + 4 files changed, 566 insertions(+), 479 deletions(-) + create mode 100644 changelog/60814.added + create mode 100644 tests/pytests/unit/modules/test_rpm_lowpkg.py + delete mode 100644 tests/unit/modules/test_rpm_lowpkg.py + +diff --git a/changelog/60814.added b/changelog/60814.added +new file mode 100644 +index 0000000000..7a9ffe1b25 +--- /dev/null ++++ b/changelog/60814.added +@@ -0,0 +1 @@ ++Add the python rpm-vercmp library in the rpm_lowpkg.py module. +diff --git a/salt/modules/rpm_lowpkg.py b/salt/modules/rpm_lowpkg.py +index 370bd5b728..aba5b939b6 100644 +--- a/salt/modules/rpm_lowpkg.py ++++ b/salt/modules/rpm_lowpkg.py +@@ -29,6 +29,13 @@ try: + except ImportError: + HAS_RPMUTILS = False + ++try: ++ import rpm_vercmp ++ ++ HAS_PY_RPM = True ++except ImportError: ++ HAS_PY_RPM = False ++ + + log = logging.getLogger(__name__) + +@@ -710,6 +717,8 @@ def version_cmp(ver1, ver2, ignore_epoch=False): + "labelCompare function. Not using rpm.labelCompare for " + "version comparison." + ) ++ elif HAS_PY_RPM: ++ cmp_func = rpm_vercmp.vercmp + else: + log.warning( + "Please install a package that provides rpm.labelCompare for " +@@ -778,7 +787,17 @@ def version_cmp(ver1, ver2, ignore_epoch=False): + if not ver1_r or not ver2_r: + ver1_r = ver2_r = "" + +- cmp_result = cmp_func((ver1_e, ver1_v, ver1_r), (ver2_e, ver2_v, ver2_r)) ++ if HAS_PY_RPM: ++ # handle epoch version comparison first ++ # rpm_vercmp.vercmp does not handle epoch version comparison ++ ret = salt.utils.versions.version_cmp(ver1_e, ver2_e) ++ if ret in (1, -1): ++ return ret ++ cmp_result = cmp_func(ver1, ver2) ++ else: ++ cmp_result = cmp_func( ++ (ver1_e, ver1_v, ver1_r), (ver2_e, ver2_v, ver2_r) ++ ) + if cmp_result not in (-1, 0, 1): + raise CommandExecutionError( + "Comparison result '{}' is invalid".format(cmp_result) +diff --git a/tests/pytests/unit/modules/test_rpm_lowpkg.py b/tests/pytests/unit/modules/test_rpm_lowpkg.py +new file mode 100644 +index 0000000000..c9d1ac2b1c +--- /dev/null ++++ b/tests/pytests/unit/modules/test_rpm_lowpkg.py +@@ -0,0 +1,545 @@ ++""" ++ :codeauthor: Jayesh Kariya ++""" ++ ++ ++import datetime ++import pytest ++import salt.modules.cmdmod ++import salt.modules.rpm_lowpkg as rpm ++import salt.utils.path ++from tests.support.mock import MagicMock, patch ++ ++# pylint: disable=unused-import ++try: ++ import rpm as rpm_lib ++ ++ HAS_RPM = True ++except ImportError: ++ HAS_RPM = False ++ ++try: ++ import rpm_vercmp ++ ++ HAS_PY_RPM = True ++except ImportError: ++ HAS_PY_RPM = False ++# pylint: enable=unused-import ++ ++ ++def _called_with_root(mock): ++ cmd = " ".join(mock.call_args[0][0]) ++ return cmd.startswith("rpm --root /") ++ ++ ++@pytest.fixture ++def configure_loader_modules(): ++ return {rpm: {"rpm": MagicMock(return_value=MagicMock)}} ++ ++ ++# 'list_pkgs' function tests: 2 ++ ++ ++def test_list_pkgs(): ++ """ ++ Test if it list the packages currently installed in a dict ++ """ ++ mock = MagicMock(return_value="") ++ with patch.dict(rpm.__salt__, {"cmd.run": mock}): ++ assert rpm.list_pkgs() == {} ++ assert not _called_with_root(mock) ++ ++ ++def test_list_pkgs_root(): ++ """ ++ Test if it list the packages currently installed in a dict, ++ called with root parameter ++ """ ++ mock = MagicMock(return_value="") ++ with patch.dict(rpm.__salt__, {"cmd.run": mock}): ++ rpm.list_pkgs(root="/") ++ assert _called_with_root(mock) ++ ++ ++# 'verify' function tests: 2 ++ ++ ++def test_verify(): ++ """ ++ Test if it runs an rpm -Va on a system, and returns the ++ results in a dict ++ """ ++ mock = MagicMock( ++ return_value={"stdout": "", "stderr": "", "retcode": 0, "pid": 12345} ++ ) ++ with patch.dict(rpm.__salt__, {"cmd.run_all": mock}): ++ assert rpm.verify("httpd") == {} ++ assert not _called_with_root(mock) ++ ++ ++def test_verify_root(): ++ """ ++ Test if it runs an rpm -Va on a system, and returns the ++ results in a dict, called with root parameter ++ """ ++ mock = MagicMock( ++ return_value={"stdout": "", "stderr": "", "retcode": 0, "pid": 12345} ++ ) ++ with patch.dict(rpm.__salt__, {"cmd.run_all": mock}): ++ rpm.verify("httpd", root="/") ++ assert _called_with_root(mock) ++ ++ ++# 'file_list' function tests: 2 ++ ++ ++def test_file_list(): ++ """ ++ Test if it list the files that belong to a package. ++ """ ++ mock = MagicMock(return_value="") ++ with patch.dict(rpm.__salt__, {"cmd.run": mock}): ++ assert rpm.file_list("httpd") == {"errors": [], "files": []} ++ assert not _called_with_root(mock) ++ ++ ++def test_file_list_root(): ++ """ ++ Test if it list the files that belong to a package, using the ++ root parameter. ++ """ ++ ++ mock = MagicMock(return_value="") ++ with patch.dict(rpm.__salt__, {"cmd.run": mock}): ++ rpm.file_list("httpd", root="/") ++ assert _called_with_root(mock) ++ ++ ++# 'file_dict' function tests: 2 ++ ++ ++def test_file_dict(): ++ """ ++ Test if it list the files that belong to a package ++ """ ++ mock = MagicMock(return_value="") ++ with patch.dict(rpm.__salt__, {"cmd.run": mock}): ++ assert rpm.file_dict("httpd") == {"errors": [], "packages": {}} ++ assert not _called_with_root(mock) ++ ++ ++def test_file_dict_root(): ++ """ ++ Test if it list the files that belong to a package ++ """ ++ mock = MagicMock(return_value="") ++ with patch.dict(rpm.__salt__, {"cmd.run": mock}): ++ rpm.file_dict("httpd", root="/") ++ assert _called_with_root(mock) ++ ++ ++# 'owner' function tests: 1 ++ ++ ++def test_owner(): ++ """ ++ Test if it return the name of the package that owns the file. ++ """ ++ assert rpm.owner() == "" ++ ++ ret = "file /usr/bin/salt-jenkins-build is not owned by any package" ++ mock = MagicMock(return_value=ret) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ assert rpm.owner("/usr/bin/salt-jenkins-build") == "" ++ assert not _called_with_root(mock) ++ ++ ret = { ++ "/usr/bin/vim": "vim-enhanced-7.4.160-1.e17.x86_64", ++ "/usr/bin/python": "python-2.7.5-16.e17.x86_64", ++ } ++ mock = MagicMock( ++ side_effect=[ ++ "python-2.7.5-16.e17.x86_64", ++ "vim-enhanced-7.4.160-1.e17.x86_64", ++ ] ++ ) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ assert rpm.owner("/usr/bin/python", "/usr/bin/vim") == ret ++ assert not _called_with_root(mock) ++ ++ ++def test_owner_root(): ++ """ ++ Test if it return the name of the package that owns the file, ++ using the parameter root. ++ """ ++ assert rpm.owner() == "" ++ ++ ret = "file /usr/bin/salt-jenkins-build is not owned by any package" ++ mock = MagicMock(return_value=ret) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ rpm.owner("/usr/bin/salt-jenkins-build", root="/") ++ assert _called_with_root(mock) ++ ++ ++# 'checksum' function tests: 2 ++ ++ ++def test_checksum(): ++ """ ++ Test if checksum validate as expected ++ """ ++ ret = { ++ "file1.rpm": True, ++ "file2.rpm": False, ++ "file3.rpm": False, ++ } ++ ++ mock = MagicMock(side_effect=[True, 0, True, 1, False, 0]) ++ with patch.dict(rpm.__salt__, {"file.file_exists": mock, "cmd.retcode": mock}): ++ assert rpm.checksum("file1.rpm", "file2.rpm", "file3.rpm") == ret ++ assert not _called_with_root(mock) ++ ++ ++def test_checksum_root(): ++ """ ++ Test if checksum validate as expected, using the parameter ++ root ++ """ ++ mock = MagicMock(side_effect=[True, 0]) ++ with patch.dict(rpm.__salt__, {"file.file_exists": mock, "cmd.retcode": mock}): ++ rpm.checksum("file1.rpm", root="/") ++ assert _called_with_root(mock) ++ ++ ++@pytest.mark.parametrize("rpm_lib", ["HAS_RPM", "HAS_PY_RPM", "rpmdev-vercmp"]) ++def test_version_cmp_rpm_all_libraries(rpm_lib): ++ """ ++ Test package version when each library is installed ++ """ ++ rpmdev = salt.utils.path.which("rpmdev-vercmp") ++ patch_cmd = patch.dict(rpm.__salt__, {"cmd.run_all": salt.modules.cmdmod.run_all}) ++ if rpm_lib == "rpmdev-vercmp": ++ if rpmdev: ++ patch_rpm = patch("salt.modules.rpm_lowpkg.HAS_RPM", False) ++ patch_py_rpm = patch("salt.modules.rpm_lowpkg.HAS_PY_RPM", False) ++ else: ++ pytest.skip("The rpmdev-vercmp binary is not installed") ++ elif rpm_lib == "HAS_RPM": ++ if HAS_RPM: ++ patch_rpm = patch("salt.modules.rpm_lowpkg.HAS_RPM", True) ++ patch_py_rpm = patch("salt.modules.rpm_lowpkg.HAS_PY_RPM", False) ++ else: ++ pytest.skip("The RPM lib is not installed, skipping") ++ elif rpm_lib == "HAS_PY_RPM": ++ if HAS_PY_RPM: ++ patch_rpm = patch("salt.modules.rpm_lowpkg.HAS_RPM", False) ++ patch_py_rpm = patch("salt.modules.rpm_lowpkg.HAS_PY_RPM", True) ++ else: ++ pytest.skip("The Python RPM lib is not installed, skipping") ++ ++ with patch_rpm, patch_py_rpm, patch_cmd: ++ assert -1 == rpm.version_cmp("1", "2") ++ assert -1 == rpm.version_cmp("2.9.1-6.el7_2.3", "2.9.1-6.el7.4") ++ assert 1 == rpm.version_cmp("3.2", "3.0") ++ assert 0 == rpm.version_cmp("3.0", "3.0") ++ assert 1 == rpm.version_cmp("1:2.9.1-6.el7_2.3", "2.9.1-6.el7.4") ++ assert -1 == rpm.version_cmp("1:2.9.1-6.el7_2.3", "1:2.9.1-6.el7.4") ++ assert 1 == rpm.version_cmp("2:2.9.1-6.el7_2.3", "1:2.9.1-6.el7.4") ++ assert 0 == rpm.version_cmp("3:2.9.1-6.el7.4", "3:2.9.1-6.el7.4") ++ assert -1 == rpm.version_cmp("3:2.9.1-6.el7.4", "3:2.9.1-7.el7.4") ++ assert 1 == rpm.version_cmp("3:2.9.1-8.el7.4", "3:2.9.1-7.el7.4") ++ ++ ++@patch("salt.modules.rpm_lowpkg.HAS_RPM", True) ++@patch("salt.modules.rpm_lowpkg.rpm.labelCompare", return_value=-1) ++@patch("salt.modules.rpm_lowpkg.log") ++def test_version_cmp_rpm(mock_log, mock_labelCompare): ++ """ ++ Test package version if RPM-Python is installed ++ ++ :return: ++ """ ++ assert -1 == rpm.version_cmp("1", "2") ++ assert not mock_log.warning.called ++ assert mock_labelCompare.called ++ ++ ++@patch("salt.modules.rpm_lowpkg.HAS_RPM", False) ++@patch("salt.modules.rpm_lowpkg.HAS_RPMUTILS", True) ++@patch("salt.modules.rpm_lowpkg.HAS_PY_RPM", False) ++@patch("salt.modules.rpm_lowpkg.rpmUtils", create=True) ++@patch("salt.modules.rpm_lowpkg.log") ++def test_version_cmp_rpmutils(mock_log, mock_rpmUtils): ++ """ ++ Test package version if rpmUtils.miscutils called ++ ++ :return: ++ """ ++ mock_rpmUtils.miscutils = MagicMock() ++ mock_rpmUtils.miscutils.compareEVR = MagicMock(return_value=-1) ++ assert -1 == rpm.version_cmp("1", "2") ++ assert mock_log.warning.called ++ assert mock_rpmUtils.miscutils.compareEVR.called ++ assert ( ++ mock_log.warning.mock_calls[0][1][0] ++ == "Please install a package that provides rpm.labelCompare for more accurate version comparisons." ++ ) ++ ++ ++@patch("salt.modules.rpm_lowpkg.HAS_RPM", False) ++@patch("salt.modules.rpm_lowpkg.HAS_RPMUTILS", False) ++@patch("salt.modules.rpm_lowpkg.HAS_PY_RPM", False) ++@patch("salt.utils.path.which", return_value=True) ++@patch("salt.modules.rpm_lowpkg.log") ++def test_version_cmp_rpmdev_vercmp(mock_log, mock_which): ++ """ ++ Test package version if rpmdev-vercmp is installed ++ ++ :return: ++ """ ++ mock__salt__ = MagicMock(return_value={"retcode": 12}) ++ with patch.dict(rpm.__salt__, {"cmd.run_all": mock__salt__}): ++ assert -1 == rpm.version_cmp("1", "2") ++ assert mock__salt__.called ++ assert mock_log.warning.called ++ assert ( ++ mock_log.warning.mock_calls[0][1][0] ++ == "Please install a package that provides rpm.labelCompare for more accurate version comparisons." ++ ) ++ assert ( ++ mock_log.warning.mock_calls[1][1][0] ++ == "Installing the rpmdevtools package may surface dev tools in production." ++ ) ++ ++ ++@patch("salt.modules.rpm_lowpkg.HAS_RPM", False) ++@patch("salt.modules.rpm_lowpkg.HAS_RPMUTILS", False) ++@patch("salt.modules.rpm_lowpkg.HAS_PY_RPM", False) ++@patch("salt.utils.versions.version_cmp", return_value=-1) ++@patch("salt.utils.path.which", return_value=False) ++@patch("salt.modules.rpm_lowpkg.log") ++def test_version_cmp_python(mock_log, mock_which, mock_version_cmp): ++ """ ++ Test package version if falling back to python ++ ++ :return: ++ """ ++ assert -1 == rpm.version_cmp("1", "2") ++ assert mock_version_cmp.called ++ assert mock_log.warning.called ++ assert ( ++ mock_log.warning.mock_calls[0][1][0] ++ == "Please install a package that provides rpm.labelCompare for more accurate version comparisons." ++ ) ++ assert ( ++ mock_log.warning.mock_calls[1][1][0] ++ == "Falling back on salt.utils.versions.version_cmp() for version comparisons" ++ ) ++ ++ ++def test_list_gpg_keys_no_info(): ++ """ ++ Test list_gpg_keys with no extra information ++ """ ++ mock = MagicMock(return_value="\n".join(["gpg-pubkey-1", "gpg-pubkey-2"])) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ assert rpm.list_gpg_keys() == ["gpg-pubkey-1", "gpg-pubkey-2"] ++ assert not _called_with_root(mock) ++ ++ ++def test_list_gpg_keys_no_info_root(): ++ """ ++ Test list_gpg_keys with no extra information and root ++ """ ++ mock = MagicMock(return_value="\n".join(["gpg-pubkey-1", "gpg-pubkey-2"])) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ assert rpm.list_gpg_keys(root="/mnt") == ["gpg-pubkey-1", "gpg-pubkey-2"] ++ assert _called_with_root(mock) ++ ++ ++@patch("salt.modules.rpm_lowpkg.info_gpg_key") ++def test_list_gpg_keys_info(info_gpg_key): ++ """ ++ Test list_gpg_keys with extra information ++ """ ++ info_gpg_key.side_effect = lambda x, root: {"Description": "key for {}".format(x)} ++ mock = MagicMock(return_value="\n".join(["gpg-pubkey-1", "gpg-pubkey-2"])) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ assert rpm.list_gpg_keys(info=True) == { ++ "gpg-pubkey-1": {"Description": "key for gpg-pubkey-1"}, ++ "gpg-pubkey-2": {"Description": "key for gpg-pubkey-2"}, ++ } ++ assert not _called_with_root(mock) ++ ++ ++def test_info_gpg_key(): ++ """ ++ Test info_gpg_keys from a normal output ++ """ ++ info = """Name : gpg-pubkey ++Version : 3dbdc284 ++Release : 53674dd4 ++Architecture: (none) ++Install Date: Fri 08 Mar 2019 11:57:44 AM UTC ++Group : Public Keys ++Size : 0 ++License : pubkey ++Signature : (none) ++Source RPM : (none) ++Build Date : Mon 05 May 2014 10:37:40 AM UTC ++Build Host : localhost ++Packager : openSUSE Project Signing Key ++Summary : gpg(openSUSE Project Signing Key ) ++Description : ++-----BEGIN PGP PUBLIC KEY BLOCK----- ++Version: rpm-4.14.2.1 (NSS-3) ++ ++mQENBEkUTD8BCADWLy5d5IpJedHQQSXkC1VK/oAZlJEeBVpSZjMCn8LiHaI9Wq3G ++3Vp6wvsP1b3kssJGzVFNctdXt5tjvOLxvrEfRJuGfqHTKILByqLzkeyWawbFNfSQ ++93/8OunfSTXC1Sx3hgsNXQuOrNVKrDAQUqT620/jj94xNIg09bLSxsjN6EeTvyiO ++mtE9H1J03o9tY6meNL/gcQhxBvwuo205np0JojYBP0pOfN8l9hnIOLkA0yu4ZXig ++oKOVmf4iTjX4NImIWldT+UaWTO18NWcCrujtgHueytwYLBNV5N0oJIP2VYuLZfSD ++VYuPllv7c6O2UEOXJsdbQaVuzU1HLocDyipnABEBAAG0NG9wZW5TVVNFIFByb2pl ++Y3QgU2lnbmluZyBLZXkgPG9wZW5zdXNlQG9wZW5zdXNlLm9yZz6JATwEEwECACYC ++GwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCU2dN1AUJHR8ElQAKCRC4iy/UPb3C ++hGQrB/9teCZ3Nt8vHE0SC5NmYMAE1Spcjkzx6M4r4C70AVTMEQh/8BvgmwkKP/qI ++CWo2vC1hMXRgLg/TnTtFDq7kW+mHsCXmf5OLh2qOWCKi55Vitlf6bmH7n+h34Sha ++Ei8gAObSpZSF8BzPGl6v0QmEaGKM3O1oUbbB3Z8i6w21CTg7dbU5vGR8Yhi9rNtr ++hqrPS+q2yftjNbsODagaOUb85ESfQGx/LqoMePD+7MqGpAXjKMZqsEDP0TbxTwSk ++4UKnF4zFCYHPLK3y/hSH5SEJwwPY11l6JGdC1Ue8Zzaj7f//axUs/hTC0UZaEE+a ++5v4gbqOcigKaFs9Lc3Bj8b/lE10Y ++=i2TA ++-----END PGP PUBLIC KEY BLOCK----- ++ ++""" ++ mock = MagicMock(return_value=info) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ assert rpm.info_gpg_key("key") == { ++ "Name": "gpg-pubkey", ++ "Version": "3dbdc284", ++ "Release": "53674dd4", ++ "Architecture": None, ++ "Install Date": datetime.datetime(2019, 3, 8, 11, 57, 44), ++ "Group": "Public Keys", ++ "Size": 0, ++ "License": "pubkey", ++ "Signature": None, ++ "Source RPM": None, ++ "Build Date": datetime.datetime(2014, 5, 5, 10, 37, 40), ++ "Build Host": "localhost", ++ "Packager": "openSUSE Project Signing Key ", ++ "Summary": "gpg(openSUSE Project Signing Key )", ++ "Description": """-----BEGIN PGP PUBLIC KEY BLOCK----- ++Version: rpm-4.14.2.1 (NSS-3) ++ ++mQENBEkUTD8BCADWLy5d5IpJedHQQSXkC1VK/oAZlJEeBVpSZjMCn8LiHaI9Wq3G ++3Vp6wvsP1b3kssJGzVFNctdXt5tjvOLxvrEfRJuGfqHTKILByqLzkeyWawbFNfSQ ++93/8OunfSTXC1Sx3hgsNXQuOrNVKrDAQUqT620/jj94xNIg09bLSxsjN6EeTvyiO ++mtE9H1J03o9tY6meNL/gcQhxBvwuo205np0JojYBP0pOfN8l9hnIOLkA0yu4ZXig ++oKOVmf4iTjX4NImIWldT+UaWTO18NWcCrujtgHueytwYLBNV5N0oJIP2VYuLZfSD ++VYuPllv7c6O2UEOXJsdbQaVuzU1HLocDyipnABEBAAG0NG9wZW5TVVNFIFByb2pl ++Y3QgU2lnbmluZyBLZXkgPG9wZW5zdXNlQG9wZW5zdXNlLm9yZz6JATwEEwECACYC ++GwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCU2dN1AUJHR8ElQAKCRC4iy/UPb3C ++hGQrB/9teCZ3Nt8vHE0SC5NmYMAE1Spcjkzx6M4r4C70AVTMEQh/8BvgmwkKP/qI ++CWo2vC1hMXRgLg/TnTtFDq7kW+mHsCXmf5OLh2qOWCKi55Vitlf6bmH7n+h34Sha ++Ei8gAObSpZSF8BzPGl6v0QmEaGKM3O1oUbbB3Z8i6w21CTg7dbU5vGR8Yhi9rNtr ++hqrPS+q2yftjNbsODagaOUb85ESfQGx/LqoMePD+7MqGpAXjKMZqsEDP0TbxTwSk ++4UKnF4zFCYHPLK3y/hSH5SEJwwPY11l6JGdC1Ue8Zzaj7f//axUs/hTC0UZaEE+a ++5v4gbqOcigKaFs9Lc3Bj8b/lE10Y ++=i2TA ++-----END PGP PUBLIC KEY BLOCK-----""", ++ } ++ assert not _called_with_root(mock) ++ ++ ++def test_info_gpg_key_extended(): ++ """ ++ Test info_gpg_keys from an extended output ++ """ ++ info = """Name : gpg-pubkey ++Version : 3dbdc284 ++Release : 53674dd4 ++Architecture: (none) ++Install Date: Fri 08 Mar 2019 11:57:44 AM UTC ++Group : Public Keys ++Size : 0 ++License : pubkey ++Signature : (none) ++Source RPM : (none) ++Build Date : Mon 05 May 2014 10:37:40 AM UTC ++Build Host : localhost ++Packager : openSUSE Project Signing Key ++Summary : gpg(openSUSE Project Signing Key ) ++Description : ++-----BEGIN PGP PUBLIC KEY BLOCK----- ++Version: rpm-4.14.2.1 (NSS-3) ++ ++mQENBEkUTD8BCADWLy5d5IpJedHQQSXkC1VK/oAZlJEeBVpSZjMCn8LiHaI9Wq3G ++3Vp6wvsP1b3kssJGzVFNctdXt5tjvOLxvrEfRJuGfqHTKILByqLzkeyWawbFNfSQ ++93/8OunfSTXC1Sx3hgsNXQuOrNVKrDAQUqT620/jj94xNIg09bLSxsjN6EeTvyiO ++mtE9H1J03o9tY6meNL/gcQhxBvwuo205np0JojYBP0pOfN8l9hnIOLkA0yu4ZXig ++oKOVmf4iTjX4NImIWldT+UaWTO18NWcCrujtgHueytwYLBNV5N0oJIP2VYuLZfSD ++VYuPllv7c6O2UEOXJsdbQaVuzU1HLocDyipnABEBAAG0NG9wZW5TVVNFIFByb2pl ++Y3QgU2lnbmluZyBLZXkgPG9wZW5zdXNlQG9wZW5zdXNlLm9yZz6JATwEEwECACYC ++GwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCU2dN1AUJHR8ElQAKCRC4iy/UPb3C ++hGQrB/9teCZ3Nt8vHE0SC5NmYMAE1Spcjkzx6M4r4C70AVTMEQh/8BvgmwkKP/qI ++CWo2vC1hMXRgLg/TnTtFDq7kW+mHsCXmf5OLh2qOWCKi55Vitlf6bmH7n+h34Sha ++Ei8gAObSpZSF8BzPGl6v0QmEaGKM3O1oUbbB3Z8i6w21CTg7dbU5vGR8Yhi9rNtr ++hqrPS+q2yftjNbsODagaOUb85ESfQGx/LqoMePD+7MqGpAXjKMZqsEDP0TbxTwSk ++4UKnF4zFCYHPLK3y/hSH5SEJwwPY11l6JGdC1Ue8Zzaj7f//axUs/hTC0UZaEE+a ++5v4gbqOcigKaFs9Lc3Bj8b/lE10Y ++=i2TA ++-----END PGP PUBLIC KEY BLOCK----- ++ ++Distribution: (none) ++""" ++ mock = MagicMock(return_value=info) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ assert rpm.info_gpg_key("key") == { ++ "Name": "gpg-pubkey", ++ "Version": "3dbdc284", ++ "Release": "53674dd4", ++ "Architecture": None, ++ "Install Date": datetime.datetime(2019, 3, 8, 11, 57, 44), ++ "Group": "Public Keys", ++ "Size": 0, ++ "License": "pubkey", ++ "Signature": None, ++ "Source RPM": None, ++ "Build Date": datetime.datetime(2014, 5, 5, 10, 37, 40), ++ "Build Host": "localhost", ++ "Packager": "openSUSE Project Signing Key ", ++ "Summary": "gpg(openSUSE Project Signing Key )", ++ "Description": """-----BEGIN PGP PUBLIC KEY BLOCK----- ++Version: rpm-4.14.2.1 (NSS-3) ++ ++mQENBEkUTD8BCADWLy5d5IpJedHQQSXkC1VK/oAZlJEeBVpSZjMCn8LiHaI9Wq3G ++3Vp6wvsP1b3kssJGzVFNctdXt5tjvOLxvrEfRJuGfqHTKILByqLzkeyWawbFNfSQ ++93/8OunfSTXC1Sx3hgsNXQuOrNVKrDAQUqT620/jj94xNIg09bLSxsjN6EeTvyiO ++mtE9H1J03o9tY6meNL/gcQhxBvwuo205np0JojYBP0pOfN8l9hnIOLkA0yu4ZXig ++oKOVmf4iTjX4NImIWldT+UaWTO18NWcCrujtgHueytwYLBNV5N0oJIP2VYuLZfSD ++VYuPllv7c6O2UEOXJsdbQaVuzU1HLocDyipnABEBAAG0NG9wZW5TVVNFIFByb2pl ++Y3QgU2lnbmluZyBLZXkgPG9wZW5zdXNlQG9wZW5zdXNlLm9yZz6JATwEEwECACYC ++GwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCU2dN1AUJHR8ElQAKCRC4iy/UPb3C ++hGQrB/9teCZ3Nt8vHE0SC5NmYMAE1Spcjkzx6M4r4C70AVTMEQh/8BvgmwkKP/qI ++CWo2vC1hMXRgLg/TnTtFDq7kW+mHsCXmf5OLh2qOWCKi55Vitlf6bmH7n+h34Sha ++Ei8gAObSpZSF8BzPGl6v0QmEaGKM3O1oUbbB3Z8i6w21CTg7dbU5vGR8Yhi9rNtr ++hqrPS+q2yftjNbsODagaOUb85ESfQGx/LqoMePD+7MqGpAXjKMZqsEDP0TbxTwSk ++4UKnF4zFCYHPLK3y/hSH5SEJwwPY11l6JGdC1Ue8Zzaj7f//axUs/hTC0UZaEE+a ++5v4gbqOcigKaFs9Lc3Bj8b/lE10Y ++=i2TA ++-----END PGP PUBLIC KEY BLOCK-----""", ++ "Distribution": None, ++ } ++ assert not _called_with_root(mock) ++ ++ ++def test_remove_gpg_key(): ++ """ ++ Test remove_gpg_key ++ """ ++ mock = MagicMock(return_value=0) ++ with patch.dict(rpm.__salt__, {"cmd.retcode": mock}): ++ assert rpm.remove_gpg_key("gpg-pubkey-1") ++ assert not _called_with_root(mock) +diff --git a/tests/unit/modules/test_rpm_lowpkg.py b/tests/unit/modules/test_rpm_lowpkg.py +deleted file mode 100644 +index 280a19b911..0000000000 +--- a/tests/unit/modules/test_rpm_lowpkg.py ++++ /dev/null +@@ -1,478 +0,0 @@ +-# -*- coding: utf-8 -*- +-""" +- :codeauthor: Jayesh Kariya +-""" +- +-# Import Python Libs +-from __future__ import absolute_import +-import datetime +- +-# Import Salt Libs +-import salt.modules.rpm_lowpkg as rpm +- +-# Import Salt Testing Libs +-from tests.support.mixins import LoaderModuleMockMixin +-from tests.support.mock import MagicMock, patch +-from tests.support.unit import TestCase +- +- +-def _called_with_root(mock): +- cmd = " ".join(mock.call_args[0][0]) +- return cmd.startswith("rpm --root /") +- +- +-class RpmTestCase(TestCase, LoaderModuleMockMixin): +- """ +- Test cases for salt.modules.rpm +- """ +- +- def setup_loader_modules(self): +- return {rpm: {"rpm": MagicMock(return_value=MagicMock)}} +- +- # 'list_pkgs' function tests: 2 +- +- def test_list_pkgs(self): +- """ +- Test if it list the packages currently installed in a dict +- """ +- mock = MagicMock(return_value="") +- with patch.dict(rpm.__salt__, {"cmd.run": mock}): +- self.assertDictEqual(rpm.list_pkgs(), {}) +- self.assertFalse(_called_with_root(mock)) +- +- def test_list_pkgs_root(self): +- """ +- Test if it list the packages currently installed in a dict, +- called with root parameter +- """ +- mock = MagicMock(return_value="") +- with patch.dict(rpm.__salt__, {"cmd.run": mock}): +- rpm.list_pkgs(root="/") +- self.assertTrue(_called_with_root(mock)) +- +- # 'verify' function tests: 2 +- +- def test_verify(self): +- """ +- Test if it runs an rpm -Va on a system, and returns the +- results in a dict +- """ +- mock = MagicMock( +- return_value={"stdout": "", "stderr": "", "retcode": 0, "pid": 12345} +- ) +- with patch.dict(rpm.__salt__, {"cmd.run_all": mock}): +- self.assertDictEqual(rpm.verify("httpd"), {}) +- self.assertFalse(_called_with_root(mock)) +- +- def test_verify_root(self): +- """ +- Test if it runs an rpm -Va on a system, and returns the +- results in a dict, called with root parameter +- """ +- mock = MagicMock( +- return_value={"stdout": "", "stderr": "", "retcode": 0, "pid": 12345} +- ) +- with patch.dict(rpm.__salt__, {"cmd.run_all": mock}): +- rpm.verify("httpd", root="/") +- self.assertTrue(_called_with_root(mock)) +- +- # 'file_list' function tests: 2 +- +- def test_file_list(self): +- """ +- Test if it list the files that belong to a package. +- """ +- mock = MagicMock(return_value="") +- with patch.dict(rpm.__salt__, {"cmd.run": mock}): +- self.assertDictEqual(rpm.file_list("httpd"), {"errors": [], "files": []}) +- self.assertFalse(_called_with_root(mock)) +- +- def test_file_list_root(self): +- """ +- Test if it list the files that belong to a package, using the +- root parameter. +- """ +- +- mock = MagicMock(return_value="") +- with patch.dict(rpm.__salt__, {"cmd.run": mock}): +- rpm.file_list("httpd", root="/") +- self.assertTrue(_called_with_root(mock)) +- +- # 'file_dict' function tests: 2 +- +- def test_file_dict(self): +- """ +- Test if it list the files that belong to a package +- """ +- mock = MagicMock(return_value="") +- with patch.dict(rpm.__salt__, {"cmd.run": mock}): +- self.assertDictEqual(rpm.file_dict("httpd"), {"errors": [], "packages": {}}) +- self.assertFalse(_called_with_root(mock)) +- +- def test_file_dict_root(self): +- """ +- Test if it list the files that belong to a package +- """ +- mock = MagicMock(return_value="") +- with patch.dict(rpm.__salt__, {"cmd.run": mock}): +- rpm.file_dict("httpd", root="/") +- self.assertTrue(_called_with_root(mock)) +- +- # 'owner' function tests: 1 +- +- def test_owner(self): +- """ +- Test if it return the name of the package that owns the file. +- """ +- self.assertEqual(rpm.owner(), "") +- +- ret = "file /usr/bin/salt-jenkins-build is not owned by any package" +- mock = MagicMock(return_value=ret) +- with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): +- self.assertEqual(rpm.owner("/usr/bin/salt-jenkins-build"), "") +- self.assertFalse(_called_with_root(mock)) +- +- ret = { +- "/usr/bin/vim": "vim-enhanced-7.4.160-1.e17.x86_64", +- "/usr/bin/python": "python-2.7.5-16.e17.x86_64", +- } +- mock = MagicMock( +- side_effect=[ +- "python-2.7.5-16.e17.x86_64", +- "vim-enhanced-7.4.160-1.e17.x86_64", +- ] +- ) +- with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): +- self.assertDictEqual(rpm.owner("/usr/bin/python", "/usr/bin/vim"), ret) +- self.assertFalse(_called_with_root(mock)) +- +- def test_owner_root(self): +- """ +- Test if it return the name of the package that owns the file, +- using the parameter root. +- """ +- self.assertEqual(rpm.owner(), "") +- +- ret = "file /usr/bin/salt-jenkins-build is not owned by any package" +- mock = MagicMock(return_value=ret) +- with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): +- rpm.owner("/usr/bin/salt-jenkins-build", root="/") +- self.assertTrue(_called_with_root(mock)) +- +- # 'checksum' function tests: 2 +- +- def test_checksum(self): +- """ +- Test if checksum validate as expected +- """ +- ret = { +- "file1.rpm": True, +- "file2.rpm": False, +- "file3.rpm": False, +- } +- +- mock = MagicMock(side_effect=[True, 0, True, 1, False, 0]) +- with patch.dict(rpm.__salt__, {"file.file_exists": mock, "cmd.retcode": mock}): +- self.assertDictEqual( +- rpm.checksum("file1.rpm", "file2.rpm", "file3.rpm"), ret +- ) +- self.assertFalse(_called_with_root(mock)) +- +- def test_checksum_root(self): +- """ +- Test if checksum validate as expected, using the parameter +- root +- """ +- mock = MagicMock(side_effect=[True, 0]) +- with patch.dict(rpm.__salt__, {"file.file_exists": mock, "cmd.retcode": mock}): +- rpm.checksum("file1.rpm", root="/") +- self.assertTrue(_called_with_root(mock)) +- +- @patch("salt.modules.rpm_lowpkg.HAS_RPM", True) +- @patch("salt.modules.rpm_lowpkg.rpm.labelCompare", return_value=-1) +- @patch("salt.modules.rpm_lowpkg.log") +- def test_version_cmp_rpm(self, mock_log, mock_labelCompare): +- """ +- Test package version if RPM-Python is installed +- +- :return: +- """ +- self.assertEqual(-1, rpm.version_cmp("1", "2")) +- self.assertEqual(mock_log.warning.called, False) +- self.assertEqual(mock_labelCompare.called, True) +- +- @patch("salt.modules.rpm_lowpkg.HAS_RPM", False) +- @patch("salt.modules.rpm_lowpkg.HAS_RPMUTILS", True) +- @patch("salt.modules.rpm_lowpkg.rpmUtils", create=True) +- @patch("salt.modules.rpm_lowpkg.log") +- def test_version_cmp_rpmutils(self, mock_log, mock_rpmUtils): +- """ +- Test package version if rpmUtils.miscutils called +- +- :return: +- """ +- mock_rpmUtils.miscutils = MagicMock() +- mock_rpmUtils.miscutils.compareEVR = MagicMock(return_value=-1) +- self.assertEqual(-1, rpm.version_cmp("1", "2")) +- self.assertEqual(mock_log.warning.called, True) +- self.assertEqual(mock_rpmUtils.miscutils.compareEVR.called, True) +- self.assertEqual( +- mock_log.warning.mock_calls[0][1][0], +- "Please install a package that provides rpm.labelCompare for more accurate version comparisons.", +- ) +- +- @patch("salt.modules.rpm_lowpkg.HAS_RPM", False) +- @patch("salt.modules.rpm_lowpkg.HAS_RPMUTILS", False) +- @patch("salt.utils.path.which", return_value=True) +- @patch("salt.modules.rpm_lowpkg.log") +- def test_version_cmp_rpmdev_vercmp(self, mock_log, mock_which): +- """ +- Test package version if rpmdev-vercmp is installed +- +- :return: +- """ +- mock__salt__ = MagicMock(return_value={"retcode": 12}) +- with patch.dict(rpm.__salt__, {"cmd.run_all": mock__salt__}): +- self.assertEqual(-1, rpm.version_cmp("1", "2")) +- self.assertEqual(mock__salt__.called, True) +- self.assertEqual(mock_log.warning.called, True) +- self.assertEqual( +- mock_log.warning.mock_calls[0][1][0], +- "Please install a package that provides rpm.labelCompare for more accurate version comparisons.", +- ) +- self.assertEqual( +- mock_log.warning.mock_calls[1][1][0], +- "Installing the rpmdevtools package may surface dev tools in production.", +- ) +- +- @patch("salt.modules.rpm_lowpkg.HAS_RPM", False) +- @patch("salt.modules.rpm_lowpkg.HAS_RPMUTILS", False) +- @patch("salt.utils.versions.version_cmp", return_value=-1) +- @patch("salt.utils.path.which", return_value=False) +- @patch("salt.modules.rpm_lowpkg.log") +- def test_version_cmp_python(self, mock_log, mock_which, mock_version_cmp): +- """ +- Test package version if falling back to python +- +- :return: +- """ +- with patch( +- "salt.modules.rpm_lowpkg.rpm.labelCompare", MagicMock(return_value=0) +- ), patch("salt.modules.rpm_lowpkg.HAS_RPM", False): +- self.assertEqual( +- -1, rpm.version_cmp("1", "2") +- ) # mock returns -1, a python implementation was called +- +- def test_list_gpg_keys_no_info(self): +- """ +- Test list_gpg_keys with no extra information +- """ +- mock = MagicMock(return_value="\n".join(["gpg-pubkey-1", "gpg-pubkey-2"])) +- with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): +- self.assertEqual(rpm.list_gpg_keys(), ["gpg-pubkey-1", "gpg-pubkey-2"]) +- self.assertFalse(_called_with_root(mock)) +- +- def test_list_gpg_keys_no_info_root(self): +- """ +- Test list_gpg_keys with no extra information and root +- """ +- mock = MagicMock(return_value="\n".join(["gpg-pubkey-1", "gpg-pubkey-2"])) +- with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): +- self.assertEqual( +- rpm.list_gpg_keys(root="/mnt"), ["gpg-pubkey-1", "gpg-pubkey-2"] +- ) +- self.assertTrue(_called_with_root(mock)) +- +- @patch("salt.modules.rpm_lowpkg.info_gpg_key") +- def test_list_gpg_keys_info(self, info_gpg_key): +- """ +- Test list_gpg_keys with extra information +- """ +- info_gpg_key.side_effect = lambda x, root: { +- "Description": "key for {}".format(x) +- } +- mock = MagicMock(return_value="\n".join(["gpg-pubkey-1", "gpg-pubkey-2"])) +- with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): +- self.assertEqual( +- rpm.list_gpg_keys(info=True), +- { +- "gpg-pubkey-1": {"Description": "key for gpg-pubkey-1"}, +- "gpg-pubkey-2": {"Description": "key for gpg-pubkey-2"}, +- }, +- ) +- self.assertFalse(_called_with_root(mock)) +- +- def test_info_gpg_key(self): +- """ +- Test info_gpg_keys from a normal output +- """ +- info = """Name : gpg-pubkey +-Version : 3dbdc284 +-Release : 53674dd4 +-Architecture: (none) +-Install Date: Fri 08 Mar 2019 11:57:44 AM UTC +-Group : Public Keys +-Size : 0 +-License : pubkey +-Signature : (none) +-Source RPM : (none) +-Build Date : Mon 05 May 2014 10:37:40 AM UTC +-Build Host : localhost +-Packager : openSUSE Project Signing Key +-Summary : gpg(openSUSE Project Signing Key ) +-Description : +------BEGIN PGP PUBLIC KEY BLOCK----- +-Version: rpm-4.14.2.1 (NSS-3) +- +-mQENBEkUTD8BCADWLy5d5IpJedHQQSXkC1VK/oAZlJEeBVpSZjMCn8LiHaI9Wq3G +-3Vp6wvsP1b3kssJGzVFNctdXt5tjvOLxvrEfRJuGfqHTKILByqLzkeyWawbFNfSQ +-93/8OunfSTXC1Sx3hgsNXQuOrNVKrDAQUqT620/jj94xNIg09bLSxsjN6EeTvyiO +-mtE9H1J03o9tY6meNL/gcQhxBvwuo205np0JojYBP0pOfN8l9hnIOLkA0yu4ZXig +-oKOVmf4iTjX4NImIWldT+UaWTO18NWcCrujtgHueytwYLBNV5N0oJIP2VYuLZfSD +-VYuPllv7c6O2UEOXJsdbQaVuzU1HLocDyipnABEBAAG0NG9wZW5TVVNFIFByb2pl +-Y3QgU2lnbmluZyBLZXkgPG9wZW5zdXNlQG9wZW5zdXNlLm9yZz6JATwEEwECACYC +-GwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCU2dN1AUJHR8ElQAKCRC4iy/UPb3C +-hGQrB/9teCZ3Nt8vHE0SC5NmYMAE1Spcjkzx6M4r4C70AVTMEQh/8BvgmwkKP/qI +-CWo2vC1hMXRgLg/TnTtFDq7kW+mHsCXmf5OLh2qOWCKi55Vitlf6bmH7n+h34Sha +-Ei8gAObSpZSF8BzPGl6v0QmEaGKM3O1oUbbB3Z8i6w21CTg7dbU5vGR8Yhi9rNtr +-hqrPS+q2yftjNbsODagaOUb85ESfQGx/LqoMePD+7MqGpAXjKMZqsEDP0TbxTwSk +-4UKnF4zFCYHPLK3y/hSH5SEJwwPY11l6JGdC1Ue8Zzaj7f//axUs/hTC0UZaEE+a +-5v4gbqOcigKaFs9Lc3Bj8b/lE10Y +-=i2TA +------END PGP PUBLIC KEY BLOCK----- +- +-""" +- mock = MagicMock(return_value=info) +- with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): +- self.assertEqual( +- rpm.info_gpg_key("key"), +- { +- "Name": "gpg-pubkey", +- "Version": "3dbdc284", +- "Release": "53674dd4", +- "Architecture": None, +- "Install Date": datetime.datetime(2019, 3, 8, 11, 57, 44), +- "Group": "Public Keys", +- "Size": 0, +- "License": "pubkey", +- "Signature": None, +- "Source RPM": None, +- "Build Date": datetime.datetime(2014, 5, 5, 10, 37, 40), +- "Build Host": "localhost", +- "Packager": "openSUSE Project Signing Key ", +- "Summary": "gpg(openSUSE Project Signing Key )", +- "Description": """-----BEGIN PGP PUBLIC KEY BLOCK----- +-Version: rpm-4.14.2.1 (NSS-3) +- +-mQENBEkUTD8BCADWLy5d5IpJedHQQSXkC1VK/oAZlJEeBVpSZjMCn8LiHaI9Wq3G +-3Vp6wvsP1b3kssJGzVFNctdXt5tjvOLxvrEfRJuGfqHTKILByqLzkeyWawbFNfSQ +-93/8OunfSTXC1Sx3hgsNXQuOrNVKrDAQUqT620/jj94xNIg09bLSxsjN6EeTvyiO +-mtE9H1J03o9tY6meNL/gcQhxBvwuo205np0JojYBP0pOfN8l9hnIOLkA0yu4ZXig +-oKOVmf4iTjX4NImIWldT+UaWTO18NWcCrujtgHueytwYLBNV5N0oJIP2VYuLZfSD +-VYuPllv7c6O2UEOXJsdbQaVuzU1HLocDyipnABEBAAG0NG9wZW5TVVNFIFByb2pl +-Y3QgU2lnbmluZyBLZXkgPG9wZW5zdXNlQG9wZW5zdXNlLm9yZz6JATwEEwECACYC +-GwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCU2dN1AUJHR8ElQAKCRC4iy/UPb3C +-hGQrB/9teCZ3Nt8vHE0SC5NmYMAE1Spcjkzx6M4r4C70AVTMEQh/8BvgmwkKP/qI +-CWo2vC1hMXRgLg/TnTtFDq7kW+mHsCXmf5OLh2qOWCKi55Vitlf6bmH7n+h34Sha +-Ei8gAObSpZSF8BzPGl6v0QmEaGKM3O1oUbbB3Z8i6w21CTg7dbU5vGR8Yhi9rNtr +-hqrPS+q2yftjNbsODagaOUb85ESfQGx/LqoMePD+7MqGpAXjKMZqsEDP0TbxTwSk +-4UKnF4zFCYHPLK3y/hSH5SEJwwPY11l6JGdC1Ue8Zzaj7f//axUs/hTC0UZaEE+a +-5v4gbqOcigKaFs9Lc3Bj8b/lE10Y +-=i2TA +------END PGP PUBLIC KEY BLOCK-----""", +- }, +- ) +- self.assertFalse(_called_with_root(mock)) +- +- def test_info_gpg_key_extended(self): +- """ +- Test info_gpg_keys from an extended output +- """ +- info = """Name : gpg-pubkey +-Version : 3dbdc284 +-Release : 53674dd4 +-Architecture: (none) +-Install Date: Fri 08 Mar 2019 11:57:44 AM UTC +-Group : Public Keys +-Size : 0 +-License : pubkey +-Signature : (none) +-Source RPM : (none) +-Build Date : Mon 05 May 2014 10:37:40 AM UTC +-Build Host : localhost +-Packager : openSUSE Project Signing Key +-Summary : gpg(openSUSE Project Signing Key ) +-Description : +------BEGIN PGP PUBLIC KEY BLOCK----- +-Version: rpm-4.14.2.1 (NSS-3) +- +-mQENBEkUTD8BCADWLy5d5IpJedHQQSXkC1VK/oAZlJEeBVpSZjMCn8LiHaI9Wq3G +-3Vp6wvsP1b3kssJGzVFNctdXt5tjvOLxvrEfRJuGfqHTKILByqLzkeyWawbFNfSQ +-93/8OunfSTXC1Sx3hgsNXQuOrNVKrDAQUqT620/jj94xNIg09bLSxsjN6EeTvyiO +-mtE9H1J03o9tY6meNL/gcQhxBvwuo205np0JojYBP0pOfN8l9hnIOLkA0yu4ZXig +-oKOVmf4iTjX4NImIWldT+UaWTO18NWcCrujtgHueytwYLBNV5N0oJIP2VYuLZfSD +-VYuPllv7c6O2UEOXJsdbQaVuzU1HLocDyipnABEBAAG0NG9wZW5TVVNFIFByb2pl +-Y3QgU2lnbmluZyBLZXkgPG9wZW5zdXNlQG9wZW5zdXNlLm9yZz6JATwEEwECACYC +-GwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCU2dN1AUJHR8ElQAKCRC4iy/UPb3C +-hGQrB/9teCZ3Nt8vHE0SC5NmYMAE1Spcjkzx6M4r4C70AVTMEQh/8BvgmwkKP/qI +-CWo2vC1hMXRgLg/TnTtFDq7kW+mHsCXmf5OLh2qOWCKi55Vitlf6bmH7n+h34Sha +-Ei8gAObSpZSF8BzPGl6v0QmEaGKM3O1oUbbB3Z8i6w21CTg7dbU5vGR8Yhi9rNtr +-hqrPS+q2yftjNbsODagaOUb85ESfQGx/LqoMePD+7MqGpAXjKMZqsEDP0TbxTwSk +-4UKnF4zFCYHPLK3y/hSH5SEJwwPY11l6JGdC1Ue8Zzaj7f//axUs/hTC0UZaEE+a +-5v4gbqOcigKaFs9Lc3Bj8b/lE10Y +-=i2TA +------END PGP PUBLIC KEY BLOCK----- +- +-Distribution: (none) +-""" +- mock = MagicMock(return_value=info) +- with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): +- self.assertEqual( +- rpm.info_gpg_key("key"), +- { +- "Name": "gpg-pubkey", +- "Version": "3dbdc284", +- "Release": "53674dd4", +- "Architecture": None, +- "Install Date": datetime.datetime(2019, 3, 8, 11, 57, 44), +- "Group": "Public Keys", +- "Size": 0, +- "License": "pubkey", +- "Signature": None, +- "Source RPM": None, +- "Build Date": datetime.datetime(2014, 5, 5, 10, 37, 40), +- "Build Host": "localhost", +- "Packager": "openSUSE Project Signing Key ", +- "Summary": "gpg(openSUSE Project Signing Key )", +- "Description": """-----BEGIN PGP PUBLIC KEY BLOCK----- +-Version: rpm-4.14.2.1 (NSS-3) +- +-mQENBEkUTD8BCADWLy5d5IpJedHQQSXkC1VK/oAZlJEeBVpSZjMCn8LiHaI9Wq3G +-3Vp6wvsP1b3kssJGzVFNctdXt5tjvOLxvrEfRJuGfqHTKILByqLzkeyWawbFNfSQ +-93/8OunfSTXC1Sx3hgsNXQuOrNVKrDAQUqT620/jj94xNIg09bLSxsjN6EeTvyiO +-mtE9H1J03o9tY6meNL/gcQhxBvwuo205np0JojYBP0pOfN8l9hnIOLkA0yu4ZXig +-oKOVmf4iTjX4NImIWldT+UaWTO18NWcCrujtgHueytwYLBNV5N0oJIP2VYuLZfSD +-VYuPllv7c6O2UEOXJsdbQaVuzU1HLocDyipnABEBAAG0NG9wZW5TVVNFIFByb2pl +-Y3QgU2lnbmluZyBLZXkgPG9wZW5zdXNlQG9wZW5zdXNlLm9yZz6JATwEEwECACYC +-GwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCU2dN1AUJHR8ElQAKCRC4iy/UPb3C +-hGQrB/9teCZ3Nt8vHE0SC5NmYMAE1Spcjkzx6M4r4C70AVTMEQh/8BvgmwkKP/qI +-CWo2vC1hMXRgLg/TnTtFDq7kW+mHsCXmf5OLh2qOWCKi55Vitlf6bmH7n+h34Sha +-Ei8gAObSpZSF8BzPGl6v0QmEaGKM3O1oUbbB3Z8i6w21CTg7dbU5vGR8Yhi9rNtr +-hqrPS+q2yftjNbsODagaOUb85ESfQGx/LqoMePD+7MqGpAXjKMZqsEDP0TbxTwSk +-4UKnF4zFCYHPLK3y/hSH5SEJwwPY11l6JGdC1Ue8Zzaj7f//axUs/hTC0UZaEE+a +-5v4gbqOcigKaFs9Lc3Bj8b/lE10Y +-=i2TA +------END PGP PUBLIC KEY BLOCK-----""", +- "Distribution": None, +- }, +- ) +- self.assertFalse(_called_with_root(mock)) +- +- def test_remove_gpg_key(self): +- """ +- Test remove_gpg_key +- """ +- mock = MagicMock(return_value=0) +- with patch.dict(rpm.__salt__, {"cmd.retcode": mock}): +- self.assertTrue(rpm.remove_gpg_key("gpg-pubkey-1")) +- self.assertFalse(_called_with_root(mock)) +-- +2.33.1 + + diff --git a/dnfnotify-pkgset-plugin-implementation-3002.2-450.patch b/dnfnotify-pkgset-plugin-implementation-3002.2-450.patch new file mode 100644 index 0000000..a94cafb --- /dev/null +++ b/dnfnotify-pkgset-plugin-implementation-3002.2-450.patch @@ -0,0 +1,130 @@ +From b1c213f171538890b3b61def25e4777bccfa64fe Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Mon, 8 Nov 2021 18:09:53 +0300 +Subject: [PATCH] dnfnotify pkgset plugin implementation - 3002.2 (#450) + +* dnfnotify pkgset plugin implementation + +* Fix failing check + +* Add error reporting if not possible to save cookie + +* Try to create dir if not exists + +* Show the exception message instead of file name + +* Fix isort +--- + scripts/suse/dnf/plugins/README.md | 21 +++++++++ + scripts/suse/dnf/plugins/dnfnotify.conf | 2 + + scripts/suse/dnf/plugins/dnfnotify.py | 60 +++++++++++++++++++++++++ + 3 files changed, 83 insertions(+) + create mode 100644 scripts/suse/dnf/plugins/README.md + create mode 100644 scripts/suse/dnf/plugins/dnfnotify.conf + create mode 100644 scripts/suse/dnf/plugins/dnfnotify.py + +diff --git a/scripts/suse/dnf/plugins/README.md b/scripts/suse/dnf/plugins/README.md +new file mode 100644 +index 0000000000..b19428608e +--- /dev/null ++++ b/scripts/suse/dnf/plugins/README.md +@@ -0,0 +1,21 @@ ++## What it is ++ ++Plugin which provides a notification mechanism to Salt, if DNF is ++used outside of it. ++ ++## Installation ++ ++Configuration files are going to: ++ ++ `/etc/dnf/plugins/[name].conf` ++ ++Plugin itself goes to: ++ ++ `%{python_sitelib}/dnf-plugins/[name].py` ++ The path to dnf-plugins directory is Python version dependant. ++ ++## Permissions ++ ++User: root ++Group: root ++Mode: 644 +diff --git a/scripts/suse/dnf/plugins/dnfnotify.conf b/scripts/suse/dnf/plugins/dnfnotify.conf +new file mode 100644 +index 0000000000..e7002aa3e9 +--- /dev/null ++++ b/scripts/suse/dnf/plugins/dnfnotify.conf +@@ -0,0 +1,2 @@ ++[main] ++enabled = 1 +diff --git a/scripts/suse/dnf/plugins/dnfnotify.py b/scripts/suse/dnf/plugins/dnfnotify.py +new file mode 100644 +index 0000000000..6e9df85f71 +--- /dev/null ++++ b/scripts/suse/dnf/plugins/dnfnotify.py +@@ -0,0 +1,60 @@ ++import hashlib ++import os ++ ++import dnf ++from dnfpluginscore import _, logger ++ ++ ++class DnfNotifyPlugin(dnf.Plugin): ++ def __init__(self, base, cli): ++ super().__init__(base, cli) ++ self.base = base ++ self.cookie_file = "/var/cache/salt/minion/rpmdb.cookie" ++ if os.path.exists("/var/lib/rpm/rpmdb.sqlite"): ++ self.rpmdb_file = "/var/lib/rpm/rpmdb.sqlite" ++ else: ++ self.rpmdb_file = "/var/lib/rpm/Packages" ++ ++ def transaction(self): ++ if "SALT_RUNNING" not in os.environ: ++ try: ++ ck_dir = os.path.dirname(self.cookie_file) ++ if not os.path.exists(ck_dir): ++ os.makedirs(ck_dir) ++ with open(self.cookie_file, "w") as ck_fh: ++ ck_fh.write( ++ "{chksum} {mtime}\n".format( ++ chksum=self._get_checksum(), mtime=self._get_mtime() ++ ) ++ ) ++ except OSError as e: ++ logger.error(_("Unable to save cookie file: %s"), e) ++ ++ def _get_mtime(self): ++ """ ++ Get the modified time of the RPM Database. ++ ++ Returns: ++ Unix ticks ++ """ ++ return ( ++ os.path.exists(self.rpmdb_file) ++ and int(os.path.getmtime(self.rpmdb_file)) ++ or 0 ++ ) ++ ++ def _get_checksum(self): ++ """ ++ Get the checksum of the RPM Database. ++ ++ Returns: ++ hexdigest ++ """ ++ digest = hashlib.sha256() ++ with open(self.rpmdb_file, "rb") as rpm_db_fh: ++ while True: ++ buff = rpm_db_fh.read(0x1000) ++ if not buff: ++ break ++ digest.update(buff) ++ return digest.hexdigest() +-- +2.33.1 + + diff --git a/fix-ip6_interface-grain-to-not-leak-secondary-ipv4-a.patch b/fix-ip6_interface-grain-to-not-leak-secondary-ipv4-a.patch new file mode 100644 index 0000000..5e6bdb7 --- /dev/null +++ b/fix-ip6_interface-grain-to-not-leak-secondary-ipv4-a.patch @@ -0,0 +1,32 @@ +From 0571b8a6d0f4728e604bab9a8ef6f2123546671b Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Fri, 15 Oct 2021 13:08:53 +0100 +Subject: [PATCH] Fix ip6_interface grain to not leak secondary IPv4 + addrs + +--- + salt/grains/core.py | 6 +++++- + 1 file changed, 5 insertions(+), 1 deletion(-) + +diff --git a/salt/grains/core.py b/salt/grains/core.py +index f79110124f..88f1d2c053 100644 +--- a/salt/grains/core.py ++++ b/salt/grains/core.py +@@ -2537,7 +2537,11 @@ def ip6_interfaces(): + iface_ips.append(inet["address"]) + for secondary in ifaces[face].get("secondary", []): + if "address" in secondary: +- iface_ips.append(secondary["address"]) ++ try: ++ socket.inet_pton(socket.AF_INET6, secondary["address"]) ++ iface_ips.append(secondary["address"]) ++ except OSError: ++ pass + ret[face] = iface_ips + return {"ip6_interfaces": ret} + +-- +2.33.0 + + diff --git a/fix-the-regression-for-yumnotify-plugin-456.patch b/fix-the-regression-for-yumnotify-plugin-456.patch new file mode 100644 index 0000000..3e071f6 --- /dev/null +++ b/fix-the-regression-for-yumnotify-plugin-456.patch @@ -0,0 +1,23 @@ +From a33a7b2e8e477912548cfd24c0dff2c38c44eae8 Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Tue, 9 Nov 2021 16:19:56 +0300 +Subject: [PATCH] Fix the regression for yumnotify plugin (#456) + +--- + scripts/suse/yum/plugins/yumnotify.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/scripts/suse/yum/plugins/yumnotify.py b/scripts/suse/yum/plugins/yumnotify.py +index 0d117e8946..cec5256d20 100644 +--- a/scripts/suse/yum/plugins/yumnotify.py ++++ b/scripts/suse/yum/plugins/yumnotify.py +@@ -63,4 +63,4 @@ def posttrans_hook(conduit): + ) + ) + except OSError as e: +- print("Unable to save the cookie file: %s" % (e), file=sys.stderr) ++ sys.stderr.write("Unable to save the cookie file: %s\n" % (e)) +-- +2.33.1 + + diff --git a/fix-traceback.print_exc-calls-for-test_pip_state-432.patch b/fix-traceback.print_exc-calls-for-test_pip_state-432.patch new file mode 100644 index 0000000..46a4e35 --- /dev/null +++ b/fix-traceback.print_exc-calls-for-test_pip_state-432.patch @@ -0,0 +1,36 @@ +From fba844fbaeb6203350944241a4ad0d7127a79bd5 Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Mon, 8 Nov 2021 17:43:02 +0300 +Subject: [PATCH] Fix traceback.print_exc calls for test_pip_state (#432) + +--- + tests/unit/states/test_pip_state.py | 6 +++--- + 1 file changed, 3 insertions(+), 3 deletions(-) + +diff --git a/tests/unit/states/test_pip_state.py b/tests/unit/states/test_pip_state.py +index 914f62ff23..9e827dbf8a 100644 +--- a/tests/unit/states/test_pip_state.py ++++ b/tests/unit/states/test_pip_state.py +@@ -439,15 +439,15 @@ class PipStateInstallationErrorTest(TestCase): + import salt.states.pip_state + salt.states.pip_state.InstallationError + except ImportError as exc: +- traceback.print_exc(exc, file=sys.stdout) ++ traceback.print_exc(file=sys.stdout) + sys.stdout.flush() + sys.exit(1) + except AttributeError as exc: +- traceback.print_exc(exc, file=sys.stdout) ++ traceback.print_exc(file=sys.stdout) + sys.stdout.flush() + sys.exit(2) + except Exception as exc: +- traceback.print_exc(exc, file=sys.stdout) ++ traceback.print_exc(file=sys.stdout) + sys.stdout.flush() + sys.exit(3) + sys.exit(0) +-- +2.33.1 + + diff --git a/mock-ip_addrs-in-utils-minions.py-unit-test-443.patch b/mock-ip_addrs-in-utils-minions.py-unit-test-443.patch new file mode 100644 index 0000000..a5067c0 --- /dev/null +++ b/mock-ip_addrs-in-utils-minions.py-unit-test-443.patch @@ -0,0 +1,56 @@ +From 2ea56dd17378fe2f41de04a9c1786d27fec9a266 Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Mon, 25 Oct 2021 10:31:10 +0200 +Subject: [PATCH] Mock ip_addrs() in utils/minions.py unit test (#443) +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +Previously the test used `salt.utils.network.ip_addrs()' in the same way +that the tested code did. This worked well as long as at least one IP +address was returned by `salt.utils.network.ip_addrs()'. + +Since this is a unit test, it should not depend on the environment, +it should just work™, even if there are no real IP addresses assigned to +the system (or container) that runs the test. + +Co-authored-by: Pablo Suárez Hernández + +Co-authored-by: Pablo Suárez Hernández +--- + tests/pytests/unit/utils/test_minions.py | 17 +++++++++-------- + 1 file changed, 9 insertions(+), 8 deletions(-) + +diff --git a/tests/pytests/unit/utils/test_minions.py b/tests/pytests/unit/utils/test_minions.py +index 0b7a7d3928..5b0cd77216 100644 +--- a/tests/pytests/unit/utils/test_minions.py ++++ b/tests/pytests/unit/utils/test_minions.py +@@ -8,15 +8,16 @@ def test_connected_ids(): + test ckminion connected_ids when + local_port_tcp returns 127.0.0.1 + """ +- opts = {"publish_port": 4505} ++ opts = {"publish_port": 4505, "minion_data_cache": True} + minion = "minion" +- ip = salt.utils.network.ip_addrs() +- mdata = {"grains": {"ipv4": ip, "ipv6": []}} +- ckminions = salt.utils.minions.CkMinions({"minion_data_cache": True}) ++ ips = {"203.0.113.1", "203.0.113.2"} ++ mdata = {"grains": {"ipv4": ips, "ipv6": []}} ++ patch_ip_addrs = patch("salt.utils.network.local_port_tcp", return_value=ips) + patch_net = patch("salt.utils.network.local_port_tcp", return_value={"127.0.0.1"}) + patch_list = patch("salt.cache.Cache.list", return_value=[minion]) + patch_fetch = patch("salt.cache.Cache.fetch", return_value=mdata) +- with patch.dict(ckminions.opts, opts): +- with patch_net, patch_list, patch_fetch: +- ret = ckminions.connected_ids() +- assert ret == {minion} ++ ++ ckminions = salt.utils.minions.CkMinions(opts) ++ with patch_net, patch_ip_addrs, patch_list, patch_fetch: ++ ret = ckminions.connected_ids() ++ assert ret == {minion} +-- +2.33.1 + + diff --git a/prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch b/prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch new file mode 100644 index 0000000..dcfbfe3 --- /dev/null +++ b/prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch @@ -0,0 +1,147 @@ +From ad5baab333cb80ce47e65605c47c8ca6fc6d4514 Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Mon, 8 Nov 2021 17:42:36 +0300 +Subject: [PATCH] Prevent pkg plugins errors on missing cookie path + (bsc#1186738) - 3002.2 (#415) + +* Prevent pkg plugins errors on missing cookie path (bsc#1186738) + +* Narrowing down exception handling + +* Modify for Python 3 only + +* Fix yumnotify +--- + scripts/suse/dpkg/dpkgnotify | 18 ++++++++++++++--- + scripts/suse/yum/plugins/README.md | 2 +- + scripts/suse/yum/plugins/yumnotify.py | 17 ++++++++++++---- + scripts/suse/zypper/plugins/commit/zyppnotify | 20 ++++++++++++------- + 4 files changed, 42 insertions(+), 15 deletions(-) + +diff --git a/scripts/suse/dpkg/dpkgnotify b/scripts/suse/dpkg/dpkgnotify +index d3ad3d2ba9..3d6d038a98 100644 +--- a/scripts/suse/dpkg/dpkgnotify ++++ b/scripts/suse/dpkg/dpkgnotify +@@ -2,10 +2,12 @@ + + import os + import hashlib ++import sys + + CK_PATH = "/var/cache/salt/minion/dpkg.cookie" + DPKG_PATH = "/var/lib/dpkg/status" + ++ + def _get_mtime(): + """ + Get the modified time of the Package Database. +@@ -35,9 +37,19 @@ def dpkg_post_invoke(): + """ + Hook after the package installation transaction. + """ +- if 'SALT_RUNNING' not in os.environ: +- with open(CK_PATH, 'w') as ck_fh: +- ck_fh.write('{chksum} {mtime}\n'.format(chksum=_get_checksum(), mtime=_get_mtime())) ++ if "SALT_RUNNING" not in os.environ: ++ try: ++ ck_dir = os.path.dirname(CK_PATH) ++ if not os.path.exists(ck_dir): ++ os.makedirs(ck_dir) ++ with open(CK_PATH, "w") as ck_fh: ++ ck_fh.write( ++ "{chksum} {mtime}\n".format( ++ chksum=_get_checksum(), mtime=_get_mtime() ++ ) ++ ) ++ except OSError as e: ++ print("Unable to save the cookie file: %s" % (e), file=sys.stderr) + + + if __name__ == "__main__": +diff --git a/scripts/suse/yum/plugins/README.md b/scripts/suse/yum/plugins/README.md +index cb3abd2260..3515845b31 100644 +--- a/scripts/suse/yum/plugins/README.md ++++ b/scripts/suse/yum/plugins/README.md +@@ -11,7 +11,7 @@ Configuration files are going to: + + Plugin itself goes to: + +- `/usr/share/yum-plugins/[name].conf` ++ `/usr/share/yum-plugins/[name].py` + + ## Permissions + +diff --git a/scripts/suse/yum/plugins/yumnotify.py b/scripts/suse/yum/plugins/yumnotify.py +index 4e137191a0..0d117e8946 100644 +--- a/scripts/suse/yum/plugins/yumnotify.py ++++ b/scripts/suse/yum/plugins/yumnotify.py +@@ -5,6 +5,7 @@ + + import hashlib + import os ++import sys + + from yum.plugins import TYPE_CORE + +@@ -51,7 +52,15 @@ def posttrans_hook(conduit): + """ + # Integrate Yum with Salt + if "SALT_RUNNING" not in os.environ: +- with open(CK_PATH, "w") as ck_fh: +- ck_fh.write( +- "{chksum} {mtime}\n".format(chksum=_get_checksum(), mtime=_get_mtime()) +- ) ++ try: ++ ck_dir = os.path.dirname(CK_PATH) ++ if not os.path.exists(ck_dir): ++ os.makedirs(ck_dir) ++ with open(CK_PATH, "w") as ck_fh: ++ ck_fh.write( ++ "{chksum} {mtime}\n".format( ++ chksum=_get_checksum(), mtime=_get_mtime() ++ ) ++ ) ++ except OSError as e: ++ print("Unable to save the cookie file: %s" % (e), file=sys.stderr) +diff --git a/scripts/suse/zypper/plugins/commit/zyppnotify b/scripts/suse/zypper/plugins/commit/zyppnotify +index d6a1bef42b..e3528e87a9 100755 +--- a/scripts/suse/zypper/plugins/commit/zyppnotify ++++ b/scripts/suse/zypper/plugins/commit/zyppnotify +@@ -1,4 +1,4 @@ +-#!/usr/bin/python ++#!/usr/bin/python3 + # + # Copyright (c) 2016 SUSE Linux LLC + # All Rights Reserved. +@@ -52,15 +52,21 @@ class DriftDetector(Plugin): + + def PLUGINEND(self, headers, body): + """ +- Hook when plugin closes Zypper's transaction. ++ Hook when plugin closes Zypper's transaction. + """ + if "SALT_RUNNING" not in os.environ: +- with open(self.ck_path, "w") as ck_fh: +- ck_fh.write( +- "{chksum} {mtime}\n".format( +- chksum=self._get_checksum(), mtime=self._get_mtime() ++ try: ++ ck_dir = os.path.dirname(self.ck_path) ++ if not os.path.exists(ck_dir): ++ os.makedirs(ck_dir) ++ with open(self.ck_path, "w") as ck_fh: ++ ck_fh.write( ++ "{chksum} {mtime}\n".format( ++ chksum=self._get_checksum(), mtime=self._get_mtime() ++ ) + ) +- ) ++ except OSError as e: ++ print("Unable to save the cookie file: %s" % (e), file=sys.stderr) + + self.ack() + +-- +2.33.1 + + diff --git a/refactor-and-improvements-for-transactional-updates-.patch b/refactor-and-improvements-for-transactional-updates-.patch new file mode 100644 index 0000000..f4f0022 --- /dev/null +++ b/refactor-and-improvements-for-transactional-updates-.patch @@ -0,0 +1,1064 @@ +From b458f53eaa4930abab230ca734f5930b95b8def0 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Thu, 4 Nov 2021 16:07:01 +0000 +Subject: [PATCH] Refactor and improvements for "transactional-updates" + module + +Add --no-return-event option to salt-call + +Act on concurrent flag when running highstate + +Simplify transactional_update module to not use SSH wrapper + +Fix tests for transactional update + +Add changelog + +Fix pylint issues + +Fix failing unit test for state.highstate after refactor + +Remove hack about tukit issue that has been already fixed +--- + changelog/61188.fixed | 3 + + salt/cli/caller.py | 2 +- + salt/modules/state.py | 12 +- + salt/modules/transactional_update.py | 235 ++--------- + salt/utils/parsers.py | 6 + + .../unit/modules/test_transactional_update.py | 389 ++---------------- + tests/unit/modules/test_state.py | 2 +- + 7 files changed, 81 insertions(+), 568 deletions(-) + create mode 100644 changelog/61188.fixed + +diff --git a/changelog/61188.fixed b/changelog/61188.fixed +new file mode 100644 +index 0000000000..102a8982a6 +--- /dev/null ++++ b/changelog/61188.fixed +@@ -0,0 +1,3 @@ ++Add "--no-return-event" option to salt-call to prevent sending return event back to master. ++Make "state.highstate" to acts on concurrent flag. ++Simplify "transactional_update" module to not use SSH wrapper and allow more flexible execution +diff --git a/salt/cli/caller.py b/salt/cli/caller.py +index 0e1fa9f90b..af8cc84a29 100644 +--- a/salt/cli/caller.py ++++ b/salt/cli/caller.py +@@ -294,7 +294,7 @@ class BaseCaller: + pass + + # return the job infos back up to the respective minion's master +- if not is_local: ++ if not is_local and not self.opts.get("no_return_event", False): + try: + mret = ret.copy() + mret["jid"] = "req" +diff --git a/salt/modules/state.py b/salt/modules/state.py +index ff6998a0e3..0636b7d894 100644 +--- a/salt/modules/state.py ++++ b/salt/modules/state.py +@@ -1052,9 +1052,15 @@ def highstate(test=None, queue=False, **kwargs): + } + return ret + +- conflict = _check_queue(queue, kwargs) +- if conflict is not None: +- return conflict ++ concurrent = kwargs.get("concurrent", False) ++ ++ if queue: ++ _wait(kwargs.get("__pub_jid")) ++ else: ++ conflict = running(concurrent) ++ if conflict: ++ __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR ++ return conflict + + orig_test = __opts__.get("test", None) + opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) +diff --git a/salt/modules/transactional_update.py b/salt/modules/transactional_update.py +index 799fe08e4d..28b02f8fec 100644 +--- a/salt/modules/transactional_update.py ++++ b/salt/modules/transactional_update.py +@@ -275,11 +275,7 @@ transaction. + + """ + +-import copy + import logging +-import os +-import sys +-import tempfile + + # required by _check_queue invocation later + import time # pylint: disable=unused-import +@@ -312,11 +308,6 @@ def __virtual__(): + return (False, "Module transactional_update requires a transactional system") + + +-class TransactionalUpdateHighstate(salt.client.ssh.state.SSHHighState): +- def _master_tops(self): +- return self.client.master_tops() +- +- + def _global_params(self_update, snapshot=None, quiet=False): + """Utility function to prepare common global parameters.""" + params = ["--non-interactive", "--drop-if-no-change"] +@@ -950,65 +941,42 @@ def call(function, *args, **kwargs): + + activate_transaction = kwargs.pop("activate_transaction", False) + +- # Generate the salt-thin and create a temporary directory in a +- # place that the new transaction will have access to, and where we +- # can untar salt-thin +- thin_path = __utils__["thin.gen_thin"]( +- __opts__["cachedir"], +- extra_mods=__salt__["config.option"]("thin_extra_mods", ""), +- so_mods=__salt__["config.option"]("thin_so_mods", ""), +- ) +- thin_dest_path = tempfile.mkdtemp(dir=__opts__["cachedir"]) +- # Some bug in Salt is preventing us to use `archive.tar` here. A +- # AsyncZeroMQReqChannel is not closed at the end of the salt-call, +- # and makes the client never exit. +- # +- # stdout = __salt__['archive.tar']('xzf', thin_path, dest=thin_dest_path) +- # +- stdout = __salt__["cmd.run"](["tar", "xzf", thin_path, "-C", thin_dest_path]) +- if stdout: +- __utils__["files.rm_rf"](thin_dest_path) +- return {"result": False, "comment": stdout} +- + try: + safe_kwargs = salt.utils.args.clean_kwargs(**kwargs) + salt_argv = ( + [ +- "python{}".format(sys.version_info[0]), +- os.path.join(thin_dest_path, "salt-call"), +- "--metadata", +- "--local", +- "--log-file", +- os.path.join(thin_dest_path, "log"), +- "--cachedir", +- os.path.join(thin_dest_path, "cache"), ++ "salt-call", + "--out", + "json", + "-l", + "quiet", ++ "--no-return-event", + "--", + function, + ] + + list(args) + + ["{}={}".format(k, v) for (k, v) in safe_kwargs.items()] + ) ++ + try: + ret_stdout = run([str(x) for x in salt_argv], snapshot="continue") + except salt.exceptions.CommandExecutionError as e: ++ # This happens when there was an problem with salt-call execution + ret_stdout = e.message + + # Process "real" result in stdout + try: + data = __utils__["json.find_json"](ret_stdout) + local = data.get("local", data) +- if isinstance(local, dict) and "retcode" in local: +- __context__["retcode"] = local["retcode"] +- return local.get("return", data) ++ if isinstance(local, dict): ++ if "retcode" in local: ++ __context__["retcode"] = local["retcode"] ++ return local.get("return", local) ++ else: ++ return local + except ValueError: + return {"result": False, "retcode": 1, "comment": ret_stdout} + finally: +- __utils__["files.rm_rf"](thin_dest_path) +- + # Check if reboot is needed + if activate_transaction and pending_transaction(): + reboot() +@@ -1044,49 +1012,7 @@ def apply_(mods=None, **kwargs): + return highstate(**kwargs) + + +-def _create_and_execute_salt_state( +- chunks, file_refs, test, hash_type, activate_transaction +-): +- """Create the salt_state tarball, and execute it in a transaction""" +- +- # Create the tar containing the state pkg and relevant files. +- salt.client.ssh.wrapper.state._cleanup_slsmod_low_data(chunks) +- trans_tar = salt.client.ssh.state.prep_trans_tar( +- salt.fileclient.get_file_client(__opts__), chunks, file_refs, __pillar__.value() +- ) +- trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, hash_type) +- +- ret = None +- +- # Create a temporary directory accesible later by the transaction +- # where we can move the salt_state.tgz +- salt_state_path = tempfile.mkdtemp(dir=__opts__["cachedir"]) +- salt_state_path = os.path.join(salt_state_path, "salt_state.tgz") +- try: +- salt.utils.files.copyfile(trans_tar, salt_state_path) +- ret = call( +- "state.pkg", +- salt_state_path, +- test=test, +- pkg_sum=trans_tar_sum, +- hash_type=hash_type, +- activate_transaction=activate_transaction, +- ) +- finally: +- __utils__["files.rm_rf"](salt_state_path) +- +- return ret +- +- +-def sls( +- mods, +- saltenv="base", +- test=None, +- exclude=None, +- activate_transaction=False, +- queue=False, +- **kwargs +-): ++def sls(mods, activate_transaction=False, queue=False, **kwargs): + """Execute the states in one or more SLS files inside a transaction. + + saltenv +@@ -1132,55 +1058,14 @@ def sls( + if conflict is not None: + return conflict + +- # Get a copy of the pillar data, to avoid overwriting the current +- # pillar, instead the one delegated +- pillar = copy.deepcopy(__pillar__.value()) +- pillar.update(kwargs.get("pillar", {})) +- +- # Clone the options data and apply some default values. May not be +- # needed, as this module just delegate +- opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) +- st_ = TransactionalUpdateHighstate( +- opts, pillar, __salt__, salt.fileclient.get_file_client(__opts__) +- ) +- +- if isinstance(mods, str): +- mods = mods.split(",") +- +- high_data, errors = st_.render_highstate({saltenv: mods}) +- if exclude: +- if isinstance(exclude, str): +- exclude = exclude.split(",") +- if "__exclude__" in high_data: +- high_data["__exclude__"].extend(exclude) +- else: +- high_data["__exclude__"] = exclude +- +- high_data, ext_errors = st_.state.reconcile_extend(high_data) +- errors += ext_errors +- errors += st_.state.verify_high(high_data) +- if errors: +- return errors +- +- high_data, req_in_errors = st_.state.requisite_in(high_data) +- errors += req_in_errors +- if errors: +- return errors +- +- high_data = st_.state.apply_exclude(high_data) +- +- # Compile and verify the raw chunks +- chunks = st_.state.compile_high_data(high_data) +- file_refs = salt.client.ssh.state.lowstate_file_refs( +- chunks, +- salt.client.ssh.wrapper.state._merge_extra_filerefs( +- kwargs.get("extra_filerefs", ""), opts.get("extra_filerefs", "") +- ), +- ) ++ concurrent = kwargs.pop("concurrent", True) + +- hash_type = opts["hash_type"] +- return _create_and_execute_salt_state( +- chunks, file_refs, test, hash_type, activate_transaction ++ return call( ++ "state.sls", ++ mods, ++ activate_transaction=activate_transaction, ++ concurrent=concurrent, ++ **kwargs + ) + + +@@ -1216,40 +1101,15 @@ def highstate(activate_transaction=False, queue=False, **kwargs): + if conflict is not None: + return conflict + +- # Get a copy of the pillar data, to avoid overwriting the current +- # pillar, instead the one delegated +- pillar = copy.deepcopy(__pillar__.value()) +- pillar.update(kwargs.get("pillar", {})) +- +- # Clone the options data and apply some default values. May not be +- # needed, as this module just delegate +- opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) +- st_ = TransactionalUpdateHighstate( +- opts, pillar, __salt__, salt.fileclient.get_file_client(__opts__) +- ) +- +- # Compile and verify the raw chunks +- chunks = st_.compile_low_chunks() +- file_refs = salt.client.ssh.state.lowstate_file_refs( +- chunks, +- salt.client.ssh.wrapper.state._merge_extra_filerefs( +- kwargs.get("extra_filerefs", ""), opts.get("extra_filerefs", "") +- ), +- ) +- # Check for errors +- for chunk in chunks: +- if not isinstance(chunk, dict): +- __context__["retcode"] = 1 +- return chunks +- +- test = kwargs.pop("test", False) +- hash_type = opts["hash_type"] +- return _create_and_execute_salt_state( +- chunks, file_refs, test, hash_type, activate_transaction ++ return call( ++ "state.highstate", ++ activate_transaction=activate_transaction, ++ concurrent=True, ++ **kwargs + ) + + +-def single(fun, name, test=None, activate_transaction=False, queue=False, **kwargs): ++def single(fun, name, activate_transaction=False, queue=False, **kwargs): + """Execute a single state function with the named kwargs, returns + False if insufficient data is sent to the command + +@@ -1282,44 +1142,11 @@ def single(fun, name, test=None, activate_transaction=False, queue=False, **kwar + if conflict is not None: + return conflict + +- # Get a copy of the pillar data, to avoid overwriting the current +- # pillar, instead the one delegated +- pillar = copy.deepcopy(__pillar__.value()) +- pillar.update(kwargs.get("pillar", {})) +- +- # Clone the options data and apply some default values. May not be +- # needed, as this module just delegate +- opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) +- st_ = salt.client.ssh.state.SSHState(opts, pillar) +- +- # state.fun -> [state, fun] +- comps = fun.split(".") +- if len(comps) < 2: +- __context__["retcode"] = 1 +- return "Invalid function passed" +- +- # Create the low chunk, using kwargs as a base +- kwargs.update({"state": comps[0], "fun": comps[1], "__id__": name, "name": name}) +- +- # Verify the low chunk +- err = st_.verify_data(kwargs) +- if err: +- __context__["retcode"] = 1 +- return err +- +- # Must be a list of low-chunks +- chunks = [kwargs] +- +- # Retrieve file refs for the state run, so we can copy relevant +- # files down to the minion before executing the state +- file_refs = salt.client.ssh.state.lowstate_file_refs( +- chunks, +- salt.client.ssh.wrapper.state._merge_extra_filerefs( +- kwargs.get("extra_filerefs", ""), opts.get("extra_filerefs", "") +- ), +- ) +- +- hash_type = opts["hash_type"] +- return _create_and_execute_salt_state( +- chunks, file_refs, test, hash_type, activate_transaction ++ return call( ++ "state.single", ++ fun=fun, ++ name=name, ++ activate_transaction=activate_transaction, ++ concurrent=True, ++ **kwargs + ) +diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py +index 31a2bf0b64..6cfb34a550 100644 +--- a/salt/utils/parsers.py ++++ b/salt/utils/parsers.py +@@ -3079,6 +3079,12 @@ class SaltCallOptionParser( + action="store_true", + help=("Force a refresh of the grains cache."), + ) ++ self.add_option( ++ "--no-return-event", ++ default=False, ++ action="store_true", ++ help=("Do not produce the return event back to master."), ++ ) + self.add_option( + "-t", + "--timeout", +diff --git a/tests/pytests/unit/modules/test_transactional_update.py b/tests/pytests/unit/modules/test_transactional_update.py +index e7293cf3e2..64c06c1693 100644 +--- a/tests/pytests/unit/modules/test_transactional_update.py ++++ b/tests/pytests/unit/modules/test_transactional_update.py +@@ -1,5 +1,3 @@ +-import sys +- + import pytest + import salt.loader_context + import salt.modules.state as statemod +@@ -353,114 +351,23 @@ def test_call_fails_input_validation(): + tu.call("") + + +-@patch("tempfile.mkdtemp", MagicMock(return_value="/var/cache/salt/minion/tmp01")) +-def test_call_fails_untar(): +- """Test transactional_update.call when tar fails""" +- utils_mock = { +- "thin.gen_thin": MagicMock(return_value="/salt-thin.tgz"), +- "files.rm_rf": MagicMock(), +- } +- opts_mock = {"cachedir": "/var/cache/salt/minion"} +- salt_mock = { +- "cmd.run": MagicMock(return_value="Error"), +- "config.option": MagicMock(), +- } +- with patch.dict(tu.__utils__, utils_mock), patch.dict( +- tu.__opts__, opts_mock +- ), patch.dict(tu.__salt__, salt_mock): +- assert tu.call("/chroot", "test.ping") == { +- "result": False, +- "comment": "Error", +- } +- +- utils_mock["thin.gen_thin"].assert_called_once() +- salt_mock["config.option"].assert_called() +- salt_mock["cmd.run"].assert_called_once() +- utils_mock["files.rm_rf"].assert_called_once() +- +- +-@patch("tempfile.mkdtemp", MagicMock(return_value="/var/cache/salt/minion/tmp01")) +-def test_call_fails_salt_thin(): +- """Test transactional_update.chroot when fails salt_thin""" +- utils_mock = { +- "thin.gen_thin": MagicMock(return_value="/salt-thin.tgz"), +- "files.rm_rf": MagicMock(), +- "json.find_json": MagicMock(side_effect=ValueError()), +- } +- opts_mock = {"cachedir": "/var/cache/salt/minion"} +- salt_mock = { +- "cmd.run": MagicMock(return_value=""), +- "config.option": MagicMock(), +- "cmd.run_all": MagicMock(return_value={"retcode": 1, "stderr": "Error"}), +- } +- with patch.dict(tu.__utils__, utils_mock), patch.dict( +- tu.__opts__, opts_mock +- ), patch.dict(tu.__salt__, salt_mock): +- assert tu.call("test.ping") == { +- "result": False, +- "retcode": 1, +- "comment": "Error", +- } +- +- utils_mock["thin.gen_thin"].assert_called_once() +- salt_mock["config.option"].assert_called() +- salt_mock["cmd.run"].assert_called_once() +- salt_mock["cmd.run_all"].assert_called_with( +- [ +- "transactional-update", +- "--non-interactive", +- "--drop-if-no-change", +- "--no-selfupdate", +- "--continue", +- "--quiet", +- "run", +- "python{}".format(sys.version_info[0]), +- "/var/cache/salt/minion/tmp01/salt-call", +- "--metadata", +- "--local", +- "--log-file", +- "/var/cache/salt/minion/tmp01/log", +- "--cachedir", +- "/var/cache/salt/minion/tmp01/cache", +- "--out", +- "json", +- "-l", +- "quiet", +- "--", +- "test.ping", +- ] +- ) +- utils_mock["files.rm_rf"].assert_called_once() +- +- +-@patch("tempfile.mkdtemp", MagicMock(return_value="/var/cache/salt/minion/tmp01")) + def test_call_fails_function(): + """Test transactional_update.chroot when fails the function""" + utils_mock = { +- "thin.gen_thin": MagicMock(return_value="/salt-thin.tgz"), +- "files.rm_rf": MagicMock(), + "json.find_json": MagicMock(side_effect=ValueError()), + } +- opts_mock = {"cachedir": "/var/cache/salt/minion"} + salt_mock = { +- "cmd.run": MagicMock(return_value=""), +- "config.option": MagicMock(), + "cmd.run_all": MagicMock( + return_value={"retcode": 0, "stdout": "Not found", "stderr": ""} + ), + } +- with patch.dict(tu.__utils__, utils_mock), patch.dict( +- tu.__opts__, opts_mock +- ), patch.dict(tu.__salt__, salt_mock): ++ with patch.dict(tu.__utils__, utils_mock), patch.dict(tu.__salt__, salt_mock): + assert tu.call("test.ping") == { + "result": False, + "retcode": 1, + "comment": "Not found", + } + +- utils_mock["thin.gen_thin"].assert_called_once() +- salt_mock["config.option"].assert_called() +- salt_mock["cmd.run"].assert_called_once() + salt_mock["cmd.run_all"].assert_called_with( + [ + "transactional-update", +@@ -470,47 +377,29 @@ def test_call_fails_function(): + "--continue", + "--quiet", + "run", +- "python{}".format(sys.version_info[0]), +- "/var/cache/salt/minion/tmp01/salt-call", +- "--metadata", +- "--local", +- "--log-file", +- "/var/cache/salt/minion/tmp01/log", +- "--cachedir", +- "/var/cache/salt/minion/tmp01/cache", ++ "salt-call", + "--out", + "json", + "-l", + "quiet", ++ "--no-return-event", + "--", + "test.ping", + ] + ) +- utils_mock["files.rm_rf"].assert_called_once() + + +-@patch("tempfile.mkdtemp", MagicMock(return_value="/var/cache/salt/minion/tmp01")) + def test_call_success_no_reboot(): + """Test transactional_update.chroot when succeed""" + utils_mock = { +- "thin.gen_thin": MagicMock(return_value="/salt-thin.tgz"), +- "files.rm_rf": MagicMock(), + "json.find_json": MagicMock(return_value={"return": "result"}), + } +- opts_mock = {"cachedir": "/var/cache/salt/minion"} + salt_mock = { +- "cmd.run": MagicMock(return_value=""), +- "config.option": MagicMock(), + "cmd.run_all": MagicMock(return_value={"retcode": 0, "stdout": ""}), + } +- with patch.dict(tu.__utils__, utils_mock), patch.dict( +- tu.__opts__, opts_mock +- ), patch.dict(tu.__salt__, salt_mock): ++ with patch.dict(tu.__utils__, utils_mock), patch.dict(tu.__salt__, salt_mock): + assert tu.call("test.ping") == "result" + +- utils_mock["thin.gen_thin"].assert_called_once() +- salt_mock["config.option"].assert_called() +- salt_mock["cmd.run"].assert_called_once() + salt_mock["cmd.run_all"].assert_called_with( + [ + "transactional-update", +@@ -520,43 +409,30 @@ def test_call_success_no_reboot(): + "--continue", + "--quiet", + "run", +- "python{}".format(sys.version_info[0]), +- "/var/cache/salt/minion/tmp01/salt-call", +- "--metadata", +- "--local", +- "--log-file", +- "/var/cache/salt/minion/tmp01/log", +- "--cachedir", +- "/var/cache/salt/minion/tmp01/cache", ++ "salt-call", + "--out", + "json", + "-l", + "quiet", ++ "--no-return-event", + "--", + "test.ping", + ] + ) +- utils_mock["files.rm_rf"].assert_called_once() + + +-@patch("tempfile.mkdtemp", MagicMock(return_value="/var/cache/salt/minion/tmp01")) + def test_call_success_reboot(): + """Test transactional_update.chroot when succeed and reboot""" + pending_transaction_mock = MagicMock(return_value=True) + reboot_mock = MagicMock() + utils_mock = { +- "thin.gen_thin": MagicMock(return_value="/salt-thin.tgz"), +- "files.rm_rf": MagicMock(), + "json.find_json": MagicMock(return_value={"return": "result"}), + } +- opts_mock = {"cachedir": "/var/cache/salt/minion"} + salt_mock = { +- "cmd.run": MagicMock(return_value=""), +- "config.option": MagicMock(), + "cmd.run_all": MagicMock(return_value={"retcode": 0, "stdout": ""}), + } + with patch.dict(tu.__utils__, utils_mock), patch.dict( +- tu.__opts__, opts_mock ++ tu.__salt__, salt_mock + ), patch.dict(tu.__salt__, salt_mock), patch( + "salt.modules.transactional_update.pending_transaction", + pending_transaction_mock, +@@ -567,9 +443,6 @@ def test_call_success_reboot(): + tu.call("transactional_update.dup", activate_transaction=True) == "result" + ) + +- utils_mock["thin.gen_thin"].assert_called_once() +- salt_mock["config.option"].assert_called() +- salt_mock["cmd.run"].assert_called_once() + salt_mock["cmd.run_all"].assert_called_with( + [ + "transactional-update", +@@ -579,49 +452,31 @@ def test_call_success_reboot(): + "--continue", + "--quiet", + "run", +- "python{}".format(sys.version_info[0]), +- "/var/cache/salt/minion/tmp01/salt-call", +- "--metadata", +- "--local", +- "--log-file", +- "/var/cache/salt/minion/tmp01/log", +- "--cachedir", +- "/var/cache/salt/minion/tmp01/cache", ++ "salt-call", + "--out", + "json", + "-l", + "quiet", ++ "--no-return-event", + "--", + "transactional_update.dup", + ] + ) +- utils_mock["files.rm_rf"].assert_called_once() + pending_transaction_mock.assert_called_once() + reboot_mock.assert_called_once() + + +-@patch("tempfile.mkdtemp", MagicMock(return_value="/var/cache/salt/minion/tmp01")) + def test_call_success_parameters(): + """Test transactional_update.chroot when succeed with parameters""" + utils_mock = { +- "thin.gen_thin": MagicMock(return_value="/salt-thin.tgz"), +- "files.rm_rf": MagicMock(), + "json.find_json": MagicMock(return_value={"return": "result"}), + } +- opts_mock = {"cachedir": "/var/cache/salt/minion"} + salt_mock = { +- "cmd.run": MagicMock(return_value=""), +- "config.option": MagicMock(), + "cmd.run_all": MagicMock(return_value={"retcode": 0, "stdout": ""}), + } +- with patch.dict(tu.__utils__, utils_mock), patch.dict( +- tu.__opts__, opts_mock +- ), patch.dict(tu.__salt__, salt_mock): ++ with patch.dict(tu.__utils__, utils_mock), patch.dict(tu.__salt__, salt_mock): + assert tu.call("module.function", key="value") == "result" + +- utils_mock["thin.gen_thin"].assert_called_once() +- salt_mock["config.option"].assert_called() +- salt_mock["cmd.run"].assert_called_once() + salt_mock["cmd.run_all"].assert_called_with( + [ + "transactional-update", +@@ -631,75 +486,32 @@ def test_call_success_parameters(): + "--continue", + "--quiet", + "run", +- "python{}".format(sys.version_info[0]), +- "/var/cache/salt/minion/tmp01/salt-call", +- "--metadata", +- "--local", +- "--log-file", +- "/var/cache/salt/minion/tmp01/log", +- "--cachedir", +- "/var/cache/salt/minion/tmp01/cache", ++ "salt-call", + "--out", + "json", + "-l", + "quiet", ++ "--no-return-event", + "--", + "module.function", + "key=value", + ] + ) +- utils_mock["files.rm_rf"].assert_called_once() + + + def test_sls(): + """Test transactional_update.sls""" +- transactional_update_highstate_mock = MagicMock() +- transactional_update_highstate_mock.return_value = ( +- transactional_update_highstate_mock +- ) +- transactional_update_highstate_mock.render_highstate.return_value = (None, []) +- transactional_update_highstate_mock.state.reconcile_extend.return_value = (None, []) +- transactional_update_highstate_mock.state.requisite_in.return_value = (None, []) +- transactional_update_highstate_mock.state.verify_high.return_value = [] +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock(return_value=[]), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.modules.transactional_update.TransactionalUpdateHighstate", +- transactional_update_highstate_mock, +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.sls("module") == "result" +- _create_and_execute_salt_state_mock.assert_called_once() + + + def test_sls_queue_true(): + """Test transactional_update.sls""" +- transactional_update_highstate_mock = MagicMock() +- transactional_update_highstate_mock.return_value = ( +- transactional_update_highstate_mock +- ) +- transactional_update_highstate_mock.render_highstate.return_value = (None, []) +- transactional_update_highstate_mock.state.reconcile_extend.return_value = (None, []) +- transactional_update_highstate_mock.state.requisite_in.return_value = (None, []) +- transactional_update_highstate_mock.state.verify_high.return_value = [] +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock( + side_effect=[ +@@ -714,37 +526,14 @@ def test_sls_queue_true(): + ] + ), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.modules.transactional_update.TransactionalUpdateHighstate", +- transactional_update_highstate_mock, +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.sls("module", queue=True) == "result" +- _create_and_execute_salt_state_mock.assert_called_once() + + + def test_sls_queue_false_failing(): + """Test transactional_update.sls""" +- transactional_update_highstate_mock = MagicMock() +- transactional_update_highstate_mock.return_value = ( +- transactional_update_highstate_mock +- ) +- transactional_update_highstate_mock.render_highstate.return_value = (None, []) +- transactional_update_highstate_mock.state.reconcile_extend.return_value = (None, []) +- transactional_update_highstate_mock.state.requisite_in.return_value = (None, []) +- transactional_update_highstate_mock.state.verify_high.return_value = [] +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock( + side_effect=[ +@@ -759,65 +548,27 @@ def test_sls_queue_false_failing(): + ] + ), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.modules.transactional_update.TransactionalUpdateHighstate", +- transactional_update_highstate_mock, +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.sls("module", queue=False) == [ + 'The function "state.running" is running as PID 4126 and was started at 2015, Mar 25 12:34:07.204096 with jid 20150325123407204096' + ] +- _create_and_execute_salt_state_mock.assert_not_called() + + + def test_highstate(): + """Test transactional_update.highstage""" +- transactional_update_highstate_mock = MagicMock() +- transactional_update_highstate_mock.return_value = ( +- transactional_update_highstate_mock +- ) +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock(return_value=[]), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.modules.transactional_update.TransactionalUpdateHighstate", +- transactional_update_highstate_mock, +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.highstate() == "result" +- _create_and_execute_salt_state_mock.assert_called_once() + + + def test_highstate_queue_true(): + """Test transactional_update.highstage""" +- transactional_update_highstate_mock = MagicMock() +- transactional_update_highstate_mock.return_value = ( +- transactional_update_highstate_mock +- ) +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock( + side_effect=[ +@@ -832,33 +583,14 @@ def test_highstate_queue_true(): + ] + ), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.modules.transactional_update.TransactionalUpdateHighstate", +- transactional_update_highstate_mock, +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.highstate(queue=True) == "result" +- _create_and_execute_salt_state_mock.assert_called_once() + + + def test_highstate_queue_false_failing(): + """Test transactional_update.highstage""" +- transactional_update_highstate_mock = MagicMock() +- transactional_update_highstate_mock.return_value = ( +- transactional_update_highstate_mock +- ) +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock( + side_effect=[ +@@ -873,62 +605,27 @@ def test_highstate_queue_false_failing(): + ] + ), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.modules.transactional_update.TransactionalUpdateHighstate", +- transactional_update_highstate_mock, +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.highstate(queue=False) == [ + 'The function "state.running" is running as PID 4126 and was started at 2015, Mar 25 12:34:07.204096 with jid 20150325123407204096' + ] +- _create_and_execute_salt_state_mock.assert_not_called() + + + def test_single(): + """Test transactional_update.single""" +- ssh_state_mock = MagicMock() +- ssh_state_mock.return_value = ssh_state_mock +- ssh_state_mock.verify_data.return_value = None +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock(return_value=[]), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.client.ssh.state.SSHState", ssh_state_mock +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.single("pkg.installed", name="emacs") == "result" +- _create_and_execute_salt_state_mock.assert_called_once() + + + def test_single_queue_false_failing(): + """Test transactional_update.single""" +- ssh_state_mock = MagicMock() +- ssh_state_mock.return_value = ssh_state_mock +- ssh_state_mock.verify_data.return_value = None +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock( + side_effect=[ +@@ -943,33 +640,16 @@ def test_single_queue_false_failing(): + ] + ), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.client.ssh.state.SSHState", ssh_state_mock +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.single("pkg.installed", name="emacs", queue=False) == [ + 'The function "state.running" is running as PID 4126 and was started at 2015, Mar 25 12:34:07.204096 with jid 20150325123407204096' + ] +- _create_and_execute_salt_state_mock.assert_not_called() + + + def test_single_queue_true(): + """Test transactional_update.single""" +- ssh_state_mock = MagicMock() +- ssh_state_mock.return_value = ssh_state_mock +- ssh_state_mock.verify_data.return_value = None +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock( + side_effect=[ +@@ -984,16 +664,7 @@ def test_single_queue_true(): + ] + ), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.client.ssh.state.SSHState", ssh_state_mock +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.single("pkg.installed", name="emacs", queue=True) == "result" +- _create_and_execute_salt_state_mock.assert_called_once() +diff --git a/tests/unit/modules/test_state.py b/tests/unit/modules/test_state.py +index ffa5428873..03f434dbcc 100644 +--- a/tests/unit/modules/test_state.py ++++ b/tests/unit/modules/test_state.py +@@ -801,7 +801,7 @@ class StateTestCase(TestCase, LoaderModuleMockMixin): + ) + + mock = MagicMock(side_effect=["A", None, None]) +- with patch.object(state, "_check_queue", mock): ++ with patch.object(state, "running", mock): + self.assertEqual(state.highstate("whitelist=sls1.sls"), "A") + + with patch.dict(state.__opts__, {"test": "A"}): +-- +2.33.1 + + diff --git a/salt.changes b/salt.changes index b3f68cc..40ce008 100644 --- a/salt.changes +++ b/salt.changes @@ -1,3 +1,40 @@ +------------------------------------------------------------------- +Mon Nov 15 15:14:54 UTC 2021 - Pablo Suárez Hernández + +- Simplify "transactional_update" module to not use SSH wrapper and allow more flexible execution +- Add "--no-return-event" option to salt-call to prevent sending return event back to master. +- Make "state.highstate" to acts on concurrent flag. +- Fix print regression for yumnotify plugin + +- Added: + * refactor-and-improvements-for-transactional-updates-.patch + * fix-the-regression-for-yumnotify-plugin-456.patch + +------------------------------------------------------------------- +Tue Nov 9 08:07:14 UTC 2021 - Victor Zhestkov + +- Use dnfnotify instead yumnotify for relevant distros +- dnfnotify pkgset plugin implementation +- Add rpm_vercmp python library support for version comparison +- Prevent pkg plugins errors on missing cookie path (bsc#1186738) + +- Added: + * add-rpm_vercmp-python-library-for-version-comparison.patch + * mock-ip_addrs-in-utils-minions.py-unit-test-443.patch + * dnfnotify-pkgset-plugin-implementation-3002.2-450.patch + * fix-traceback.print_exc-calls-for-test_pip_state-432.patch + * prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch + +------------------------------------------------------------------- +Fri Oct 15 15:06:25 UTC 2021 - Pablo Suárez Hernández + +- Fix ip6_interface grain to not leak secondary IPv4 aliases (bsc#1191412) +- Make "salt-api" package to require python3-cherrypy on RHEL systems +- Make "tar" as required for "salt-transactional-update" package + +- Added: + * fix-ip6_interface-grain-to-not-leak-secondary-ipv4-a.patch + ------------------------------------------------------------------- Fri Oct 8 15:48:09 UTC 2021 - Pablo Suárez Hernández diff --git a/salt.spec b/salt.spec index 9a330bc..a1a2d30 100644 --- a/salt.spec +++ b/salt.spec @@ -293,6 +293,22 @@ Patch71: 3003.3-do-not-consider-skipped-targets-as-failed-for.patch Patch72: fix-crash-when-calling-manage.not_alive-runners.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/61014 Patch73: fix-issues-with-salt-ssh-s-extra-filerefs.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61061 +Patch74: fix-ip6_interface-grain-to-not-leak-secondary-ipv4-a.patch +# PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/432 (missing upstream PR) +Patch75: fix-traceback.print_exc-calls-for-test_pip_state-432.patch +# PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/415 (missing upstream PR) +Patch76: prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60815 +Patch77: add-rpm_vercmp-python-library-for-version-comparison.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61180 +Patch78: dnfnotify-pkgset-plugin-implementation-3002.2-450.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60324 +Patch79: mock-ip_addrs-in-utils-minions.py-unit-test-443.patch +# PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/456 (missing upstream PR) +Patch80: fix-the-regression-for-yumnotify-plugin-456.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61188 +Patch81: refactor-and-improvements-for-transactional-updates-.patch BuildRoot: %{_tmppath}/%{name}-%{version}-build @@ -483,7 +499,11 @@ Summary: The api for Salt a parallel remote execution system Group: System/Management Requires: %{name} = %{version}-%{release} Requires: %{name}-master = %{version}-%{release} +%if 0%{?suse_version} Requires: python3-CherryPy >= 3.2.2 +%else +Requires: python3-cherrypy >= 3.2.2 +%endif %description api salt-api is a modular interface on top of Salt that can provide a variety of entry points into a running Salt system. @@ -696,6 +716,7 @@ Summary: Transactional update executor configuration Group: System/Management Requires: %{name} = %{version}-%{release} Requires: %{name}-minion = %{version}-%{release} +Requires: tar %description transactional-update For transactional systems, like MicroOS, Salt can operate @@ -781,6 +802,14 @@ cp %{S:6} . %patch71 -p1 %patch72 -p1 %patch73 -p1 +%patch74 -p1 +%patch75 -p1 +%patch76 -p1 +%patch77 -p1 +%patch78 -p1 +%patch79 -p1 +%patch80 -p1 +%patch81 -p1 %build # Putting /usr/bin at the front of $PATH is needed for RHEL/RES 7. Without this @@ -864,10 +893,22 @@ sed -i '1s=^#!/usr/bin/\(python\|env python\)[0-9.]*=#!/usr/bin/python3=' %{buil # Install Yum plugins only on RH machines %if 0%{?fedora} || 0%{?rhel} +%if 0%{?fedora} >= 22 || 0%{?rhel} >= 8 +install -Dd %{buildroot}%{python3_sitelib}/dnf-plugins +install -Dd %{buildroot}%{python3_sitelib}/dnf-plugins/__pycache__ +install -Dd %{buildroot}%{_sysconfdir}/dnf/plugins +%{__install} scripts/suse/dnf/plugins/dnfnotify.py %{buildroot}%{python3_sitelib}/dnf-plugins +%{__install} scripts/suse/dnf/plugins/dnfnotify.conf %{buildroot}%{_sysconfdir}/dnf/plugins +%{__python3} -m compileall -d %{python3_sitelib}/dnf-plugins %{buildroot}%{python3_sitelib}/dnf-plugins/dnfnotify.py +%{__python3} -O -m compileall -d %{python3_sitelib}/dnf-plugins %{buildroot}%{python3_sitelib}/dnf-plugins/dnfnotify.py +%else install -Dd %{buildroot}%{_prefix}/share/yum-plugins -install -Dd %{buildroot}/etc/yum/pluginconf.d +install -Dd %{buildroot}%{_sysconfdir}/yum/pluginconf.d %{__install} scripts/suse/yum/plugins/yumnotify.py %{buildroot}%{_prefix}/share/yum-plugins -%{__install} scripts/suse/yum/plugins/yumnotify.conf %{buildroot}/etc/yum/pluginconf.d +%{__install} scripts/suse/yum/plugins/yumnotify.conf %{buildroot}%{_sysconfdir}/yum/pluginconf.d +%{__python} -m compileall -d %{_prefix}/share/yum-plugins %{buildroot}%{_prefix}/share/yum-plugins/yumnotify.py +%{__python} -O -m compileall -d %{_prefix}/share/yum-plugins %{buildroot}%{_prefix}/share/yum-plugins/yumnotify.py +%endif %endif ## install init and systemd scripts @@ -1323,8 +1364,14 @@ rm -f %{_localstatedir}/cache/salt/minion/thin/version # Install Yum plugins only on RH machines %if 0%{?fedora} || 0%{?rhel} -%{_prefix}/share/yum-plugins/ -/etc/yum/pluginconf.d/yumnotify.conf +%if 0%{?fedora} >= 22 || 0%{?rhel} >= 8 +%{python3_sitelib}/dnf-plugins/dnfnotify.py +%{python3_sitelib}/dnf-plugins/__pycache__/dnfnotify.* +%{_sysconfdir}/dnf/plugins/dnfnotify.conf +%else +%{_prefix}/share/yum-plugins/yumnotify.* +%{_sysconfdir}/yum/pluginconf.d/yumnotify.conf +%endif %endif %if %{with systemd}