Sync from SUSE:ALP:Source:Standard:1.0 salt revision 7acf5f38fdb4dfcaa487fd53bc3679fb

This commit is contained in:
Adrian Schröter 2023-09-22 09:44:49 +02:00
commit 101832fd70
92 changed files with 27244 additions and 0 deletions

23
.gitattributes vendored Normal file
View File

@ -0,0 +1,23 @@
## Default LFS
*.7z filter=lfs diff=lfs merge=lfs -text
*.bsp filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.gem filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.jar filter=lfs diff=lfs merge=lfs -text
*.lz filter=lfs diff=lfs merge=lfs -text
*.lzma filter=lfs diff=lfs merge=lfs -text
*.obscpio filter=lfs diff=lfs merge=lfs -text
*.oxt filter=lfs diff=lfs merge=lfs -text
*.pdf filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text
*.rpm filter=lfs diff=lfs merge=lfs -text
*.tbz filter=lfs diff=lfs merge=lfs -text
*.tbz2 filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.ttf filter=lfs diff=lfs merge=lfs -text
*.txz filter=lfs diff=lfs merge=lfs -text
*.whl filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text

View File

@ -0,0 +1,505 @@
From 327a5e5b24c4fa047df44b245abd672e02999cca Mon Sep 17 00:00:00 2001
From: Michael Calmer <Michael.Calmer@suse.de>
Date: Mon, 23 Jan 2023 14:33:26 +0100
Subject: [PATCH] 3005.1 implement zypper removeptf (#573)
* handle ptf packages inside of normal pkg.remove function
* add testcase for remove and removeptf
* add changelog
* adapt old tests to changed function
* Update Docs
Co-authored-by: Megan Wilhite <mwilhite@vmware.com>
---
changelog/63442.added | 1 +
salt/modules/zypperpkg.py | 38 +-
tests/pytests/unit/modules/test_zypperpkg.py | 356 ++++++++++++++++++-
tests/unit/modules/test_zypperpkg.py | 1 +
4 files changed, 394 insertions(+), 2 deletions(-)
create mode 100644 changelog/63442.added
diff --git a/changelog/63442.added b/changelog/63442.added
new file mode 100644
index 0000000000..ad81b2f9d5
--- /dev/null
+++ b/changelog/63442.added
@@ -0,0 +1 @@
+implement removal of ptf packages in zypper pkg module
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 051f8f72c7..44f2cdbd3a 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -2073,17 +2073,21 @@ def _uninstall(inclusion_detection, name=None, pkgs=None, root=None):
except MinionError as exc:
raise CommandExecutionError(exc)
+ ptfpackages = _find_ptf_packages(pkg_params.keys(), root=root)
includes = _detect_includes(pkg_params.keys(), inclusion_detection)
old = list_pkgs(root=root, includes=includes)
targets = []
for target in pkg_params:
+ if target in ptfpackages:
+ # ptfpackages needs special handling
+ continue
# Check if package version set to be removed is actually installed:
# old[target] contains a comma-separated list of installed versions
if target in old and pkg_params[target] in old[target].split(","):
targets.append(target + "-" + pkg_params[target])
elif target in old and not pkg_params[target]:
targets.append(target)
- if not targets:
+ if not targets and not ptfpackages:
return {}
systemd_scope = _systemd_scope()
@@ -2095,6 +2099,13 @@ def _uninstall(inclusion_detection, name=None, pkgs=None, root=None):
)
targets = targets[500:]
+ # handle ptf packages
+ while ptfpackages:
+ __zypper__(systemd_scope=systemd_scope, root=root).call(
+ "removeptf", "--allow-downgrade", *ptfpackages[:500]
+ )
+ ptfpackages = ptfpackages[500:]
+
_clean_cache()
new = list_pkgs(root=root, includes=includes)
ret = salt.utils.data.compare_dicts(old, new)
@@ -2183,6 +2194,11 @@ def remove(
salt '*' pkg.remove <package name>
salt '*' pkg.remove <package1>,<package2>,<package3>
salt '*' pkg.remove pkgs='["foo", "bar"]'
+
+ .. versionchanged:: 3007
+ Can now remove also PTF packages which require a different handling in the backend.
+
+ Can now remove also PTF packages which require a different handling in the backend.
"""
return _uninstall(inclusion_detection, name=name, pkgs=pkgs, root=root)
@@ -2658,6 +2674,26 @@ def _get_visible_patterns(root=None):
return patterns
+def _find_ptf_packages(pkgs, root=None):
+ """
+ Find ptf packages in "pkgs" and return them as list
+ """
+ ptfs = []
+ cmd = ["rpm"]
+ if root:
+ cmd.extend(["--root", root])
+ cmd.extend(["-q", "--qf", "%{NAME}: [%{PROVIDES} ]\n"])
+ cmd.extend(pkgs)
+ output = __salt__["cmd.run"](cmd)
+ for line in output.splitlines():
+ if not line.strip():
+ continue
+ pkg, provides = line.split(":", 1)
+ if "ptf()" in provides:
+ ptfs.append(pkg)
+ return ptfs
+
+
def _get_installed_patterns(root=None):
"""
List all installed patterns.
diff --git a/tests/pytests/unit/modules/test_zypperpkg.py b/tests/pytests/unit/modules/test_zypperpkg.py
index 91132b7277..c996662e1c 100644
--- a/tests/pytests/unit/modules/test_zypperpkg.py
+++ b/tests/pytests/unit/modules/test_zypperpkg.py
@@ -11,7 +11,7 @@ import pytest
import salt.modules.pkg_resource as pkg_resource
import salt.modules.zypperpkg as zypper
from salt.exceptions import CommandExecutionError, SaltInvocationError
-from tests.support.mock import MagicMock, mock_open, patch
+from tests.support.mock import MagicMock, mock_open, call, patch
@pytest.fixture
@@ -27,6 +27,11 @@ def configure_loader_modules():
}
+@pytest.fixture(autouse=True)
+def fresh_zypper_instance():
+ zypper.__zypper__ = zypper._Zypper()
+
+
def test_list_pkgs_no_context():
"""
Test packages listing.
@@ -395,3 +400,352 @@ def test_del_repo_key():
with patch.dict(zypper.__salt__, salt_mock):
assert zypper.del_repo_key(keyid="keyid", root="/mnt")
salt_mock["lowpkg.remove_gpg_key"].assert_called_once_with("keyid", "/mnt")
+
+@pytest.mark.parametrize(
+ "zypper_version,lowpkg_version_cmp,expected_inst_avc,expected_dup_avc",
+ [
+ ("0.5", [-1, -1], False, False),
+ ("1.11.34", [0, -1], False, True),
+ ("1.14.8", [0, 0], True, True),
+ ],
+)
+def test_refresh_zypper_flags(
+ zypper_version, lowpkg_version_cmp, expected_inst_avc, expected_dup_avc
+):
+ with patch(
+ "salt.modules.zypperpkg.version", MagicMock(return_value=zypper_version)
+ ), patch.dict(
+ zypper.__salt__,
+ {"lowpkg.version_cmp": MagicMock(side_effect=lowpkg_version_cmp)},
+ ):
+ _zypper = zypper._Zypper()
+ _zypper.refresh_zypper_flags()
+ assert _zypper.inst_avc == expected_inst_avc
+ assert _zypper.dup_avc == expected_dup_avc
+
+
+@pytest.mark.parametrize(
+ "inst_avc,dup_avc,avc,allowvendorchange_param,novendorchange_param,expected",
+ [
+ # inst_avc = True, dup_avc = True
+ (True, True, False, False, False, True),
+ (True, True, False, True, False, True),
+ (True, True, False, False, True, False),
+ (True, True, False, True, True, True),
+ # inst_avc = False, dup_avc = True
+ (False, True, False, False, False, True),
+ (False, True, False, True, False, True),
+ (False, True, False, False, True, False),
+ (False, True, False, True, True, True),
+ # inst_avc = False, dup_avc = False
+ (False, False, False, False, False, False),
+ (False, False, False, True, False, False),
+ (False, False, False, False, True, False),
+ (False, False, False, True, True, False),
+ ],
+)
+@patch("salt.modules.zypperpkg._Zypper.refresh_zypper_flags", MagicMock())
+def test_allow_vendor_change(
+ inst_avc,
+ dup_avc,
+ avc,
+ allowvendorchange_param,
+ novendorchange_param,
+ expected,
+):
+ _zypper = zypper._Zypper()
+ _zypper.inst_avc = inst_avc
+ _zypper.dup_avc = dup_avc
+ _zypper.avc = avc
+ _zypper.allow_vendor_change(allowvendorchange_param, novendorchange_param)
+ assert _zypper.avc == expected
+
+
+@pytest.mark.parametrize(
+ "package,pre_version,post_version,fromrepo_param,name_param,pkgs_param,diff_attr_param",
+ [
+ ("vim", "1.1", "1.2", [], "", [], "all"),
+ ("kernel-default", "1.1", "1.1,1.2", ["dummy", "dummy2"], "", [], None),
+ ("vim", "1.1", "1.2", [], "vim", [], None),
+ ],
+)
+@patch.object(zypper, "refresh_db", MagicMock(return_value=True))
+def test_upgrade(
+ package,
+ pre_version,
+ post_version,
+ fromrepo_param,
+ name_param,
+ pkgs_param,
+ diff_attr_param,
+):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call"
+ ) as zypper_mock, patch.object(
+ zypper,
+ "list_pkgs",
+ MagicMock(side_effect=[{package: pre_version}, {package: post_version}]),
+ ) as list_pkgs_mock:
+ expected_call = ["update", "--auto-agree-with-licenses"]
+ for repo in fromrepo_param:
+ expected_call.extend(["--repo", repo])
+
+ if pkgs_param:
+ expected_call.extend(pkgs_param)
+ elif name_param:
+ expected_call.append(name_param)
+
+ result = zypper.upgrade(
+ name=name_param,
+ pkgs=pkgs_param,
+ fromrepo=fromrepo_param,
+ diff_attr=diff_attr_param,
+ )
+ zypper_mock.assert_any_call(*expected_call)
+ assert result == {package: {"old": pre_version, "new": post_version}}
+ list_pkgs_mock.assert_any_call(root=None, attr=diff_attr_param)
+
+
+@pytest.mark.parametrize(
+ "package,pre_version,post_version,fromrepo_param",
+ [
+ ("vim", "1.1", "1.2", []),
+ ("emacs", "1.1", "1.2", ["Dummy", "Dummy2"]),
+ ],
+)
+@patch.object(zypper, "refresh_db", MagicMock(return_value=True))
+def test_dist_upgrade(package, pre_version, post_version, fromrepo_param):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call"
+ ) as zypper_mock, patch.object(
+ zypper,
+ "list_pkgs",
+ MagicMock(side_effect=[{package: pre_version}, {package: post_version}]),
+ ):
+ expected_call = ["dist-upgrade", "--auto-agree-with-licenses"]
+
+ for repo in fromrepo_param:
+ expected_call.extend(["--from", repo])
+
+ result = zypper.upgrade(dist_upgrade=True, fromrepo=fromrepo_param)
+ zypper_mock.assert_any_call(*expected_call)
+ assert result == {package: {"old": pre_version, "new": post_version}}
+
+
+@pytest.mark.parametrize(
+ "package,pre_version,post_version,dup_avc,novendorchange_param,allowvendorchange_param,vendor_change",
+ [
+ # dup_avc = True, both params = default -> no vendor change
+ ("vim", "1.1", "1.2", True, True, False, False),
+ # dup_avc = True, allowvendorchange = True -> vendor change
+ (
+ "emacs",
+ "1.1",
+ "1.2",
+ True,
+ True,
+ True,
+ True,
+ ),
+ # dup_avc = True, novendorchange = False -> vendor change
+ ("joe", "1.1", "1.2", True, False, False, True),
+ # dup_avc = True, both params = toggled -> vendor change
+ ("kate", "1.1", "1.2", True, False, True, True),
+ # dup_avc = False -> no vendor change
+ (
+ "gedit",
+ "1.1",
+ "1.2",
+ False,
+ False,
+ True,
+ False
+ ),
+ ],
+)
+@patch.object(zypper, "refresh_db", MagicMock(return_value=True))
+def test_dist_upgrade_vendorchange(
+ package,
+ pre_version,
+ post_version,
+ dup_avc,
+ novendorchange_param,
+ allowvendorchange_param,
+ vendor_change
+):
+ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None})
+ with patch.object(
+ zypper,
+ "list_pkgs",
+ MagicMock(side_effect=[{package: pre_version}, {package: post_version}]),
+ ), patch("salt.modules.zypperpkg.__zypper__.refresh_zypper_flags",), patch.dict(
+ zypper.__salt__, {"cmd.run_all": cmd_run_mock}
+ ):
+ expected_cmd = ["zypper", "--non-interactive", "--no-refresh", "dist-upgrade"]
+ # --allow-vendor-change is injected right after "dist-upgrade"
+ if vendor_change:
+ expected_cmd.append("--allow-vendor-change")
+ expected_cmd.append("--auto-agree-with-licenses")
+
+ zypper.__zypper__.dup_avc = dup_avc
+ zypper.upgrade(
+ dist_upgrade=True,
+ allowvendorchange=allowvendorchange_param,
+ novendorchange=novendorchange_param,
+ )
+ cmd_run_mock.assert_any_call(
+ expected_cmd, output_loglevel="trace", python_shell=False, env={}
+ )
+
+
+@pytest.mark.parametrize(
+ "package,pre_version,post_version,fromrepo_param",
+ [
+ ("vim", "1.1", "1.1", []),
+ ("emacs", "1.1", "1.1", ["Dummy", "Dummy2"]),
+ ],
+)
+@patch.object(zypper, "refresh_db", MagicMock(return_value=True))
+def test_dist_upgrade_dry_run(package, pre_version, post_version, fromrepo_param):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call"
+ ) as zypper_mock, patch.object(
+ zypper,
+ "list_pkgs",
+ MagicMock(side_effect=[{package: pre_version}, {package: post_version}]),
+ ):
+ expected_call = ["dist-upgrade", "--auto-agree-with-licenses", "--dry-run"]
+
+ for repo in fromrepo_param:
+ expected_call.extend(["--from", repo])
+
+ zypper.upgrade(dist_upgrade=True, dryrun=True, fromrepo=fromrepo_param)
+ zypper_mock.assert_any_call(*expected_call)
+ # dryrun=True causes two calls, one with a trailing --debug-solver flag
+ expected_call.append("--debug-solver")
+ zypper_mock.assert_any_call(*expected_call)
+
+
+@patch.object(zypper, "refresh_db", MagicMock(return_value=True))
+def test_dist_upgrade_failure():
+ zypper_output = textwrap.dedent(
+ """\
+ Loading repository data...
+ Reading installed packages...
+ Computing distribution upgrade...
+ Use 'zypper repos' to get the list of defined repositories.
+ Repository 'DUMMY' not found by its alias, number, or URI.
+ """
+ )
+ call_spy = MagicMock()
+ zypper_mock = MagicMock()
+ zypper_mock.stdout = zypper_output
+ zypper_mock.stderr = ""
+ zypper_mock.exit_code = 3
+ zypper_mock.noraise.call = call_spy
+ with patch("salt.modules.zypperpkg.__zypper__", zypper_mock), patch.object(
+ zypper, "list_pkgs", MagicMock(side_effect=[{"vim": 1.1}, {"vim": 1.1}])
+ ):
+ expected_call = [
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--from",
+ "Dummy",
+ ]
+
+ with pytest.raises(CommandExecutionError) as exc:
+ zypper.upgrade(dist_upgrade=True, fromrepo=["Dummy"])
+ call_spy.assert_called_with(*expected_call)
+
+ assert exc.exception.info["changes"] == {}
+ assert exc.exception.info["result"]["stdout"] == zypper_output
+
+
+def test_remove_multiple_pkgs_with_ptf():
+ call_spy = MagicMock()
+ zypper_mock = MagicMock()
+ zypper_mock.stdout = ""
+ zypper_mock.stderr = ""
+ zypper_mock.exit_code = 0
+ zypper_mock.call = call_spy
+
+ rpm_output = textwrap.dedent(
+ """
+ vim: vi vim vim(x86-64) vim-base vim-enhanced vim-python vim_client
+ ptf-12345: ptf() ptf-12345
+ """
+ )
+ rpm_mock = MagicMock(side_effect=[rpm_output])
+
+ with patch(
+ "salt.modules.zypperpkg.__zypper__", MagicMock(return_value=zypper_mock)
+ ), patch.object(
+ zypper,
+ "list_pkgs",
+ MagicMock(side_effect=[{"vim": "0.18.0", "ptf-12345": "1"}, {}]),
+ ), patch.dict(
+ zypper.__salt__, {"cmd.run": rpm_mock}
+ ):
+ expected_calls = [
+ call(
+ "remove",
+ "vim",
+ ),
+ call(
+ "removeptf",
+ "--allow-downgrade",
+ "ptf-12345",
+ ),
+ ]
+
+ result = zypper.remove(name="vim,ptf-12345")
+ call_spy.assert_has_calls(expected_calls, any_order=False)
+ assert result["vim"]["new"] == "", result
+ assert result["vim"]["old"] == "0.18.0", result
+ assert result["ptf-12345"]["new"] == "", result
+ assert result["ptf-12345"]["old"] == "1", result
+
+
+def test_remove_ptf():
+ call_spy = MagicMock()
+ zypper_mock = MagicMock()
+ zypper_mock.stdout = ""
+ zypper_mock.stderr = ""
+ zypper_mock.exit_code = 0
+ zypper_mock.call = call_spy
+
+ rpm_mock = MagicMock(
+ side_effect=[
+ "vim: vi vim vim(x86-64) vim-base vim-enhanced vim-python vim_client",
+ "ptf-12345: ptf() ptf-12345",
+ ]
+ )
+
+ with patch(
+ "salt.modules.zypperpkg.__zypper__", MagicMock(return_value=zypper_mock)
+ ), patch.object(
+ zypper,
+ "list_pkgs",
+ MagicMock(side_effect=[{"vim": "0.18.0"}, {}, {"ptf-12345": "1"}, {}]),
+ ), patch.dict(
+ zypper.__salt__, {"cmd.run": rpm_mock}
+ ):
+ expected_call_vim = [
+ "remove",
+ "vim",
+ ]
+ expected_call_ptf = [
+ "removeptf",
+ "--allow-downgrade",
+ "ptf-12345",
+ ]
+
+ result = zypper.remove(name="vim")
+ call_spy.assert_called_with(*expected_call_vim)
+ assert result["vim"]["new"] == "", result
+ assert result["vim"]["old"] == "0.18.0", result
+
+ result = zypper.remove(name="ptf-12345")
+ call_spy.assert_called_with(*expected_call_ptf)
+ assert result["ptf-12345"]["new"] == "", result
+ assert result["ptf-12345"]["old"] == "1", result
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index f5b6d74b6f..6e5ca88895 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -1953,6 +1953,7 @@ Repository 'DUMMY' not found by its alias, number, or URI.
# If config.get starts being used elsewhere, we'll need to write a
# side_effect function.
patches = {
+ "cmd.run": MagicMock(return_value="vim: vi vim\npico: pico"),
"cmd.run_all": MagicMock(return_value=cmd_out),
"pkg_resource.parse_targets": MagicMock(return_value=parsed_targets),
"pkg_resource.stringify": MagicMock(),
--
2.39.2

View File

@ -0,0 +1,71 @@
From 40a57afc65e71835127a437248ed655404cff0e8 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 27 Jun 2023 11:24:39 +0100
Subject: [PATCH] 3006.0: Prevent _pygit2.GitError: error loading
known_hosts when $HOME is not set (bsc#1210994) (#588)
* Prevent _pygit2.GitError: error loading known_hosts when $HOME is not set
* Add unit test to cover case of unset home
---
salt/utils/gitfs.py | 5 +++++
tests/unit/utils/test_gitfs.py | 14 ++++++++++++++
2 files changed, 19 insertions(+)
diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py
index cc9895d8ab..38e84f38aa 100644
--- a/salt/utils/gitfs.py
+++ b/salt/utils/gitfs.py
@@ -34,6 +34,7 @@ import salt.utils.stringutils
import salt.utils.url
import salt.utils.user
import salt.utils.versions
+import salt.syspaths
from salt.config import DEFAULT_MASTER_OPTS as _DEFAULT_MASTER_OPTS
from salt.exceptions import FileserverConfigError, GitLockError, get_error_message
from salt.utils.event import tagify
@@ -1867,6 +1868,10 @@ class Pygit2(GitProvider):
# pruning only available in pygit2 >= 0.26.2
pass
try:
+ # Make sure $HOME env variable is set to prevent
+ # _pygit2.GitError: error loading known_hosts in some libgit2 versions.
+ if "HOME" not in os.environ:
+ os.environ["HOME"] = salt.syspaths.HOME_DIR
fetch_results = origin.fetch(**fetch_kwargs)
except GitError as exc: # pylint: disable=broad-except
exc_str = get_error_message(exc).lower()
diff --git a/tests/unit/utils/test_gitfs.py b/tests/unit/utils/test_gitfs.py
index b99da3ef91..7c400b69af 100644
--- a/tests/unit/utils/test_gitfs.py
+++ b/tests/unit/utils/test_gitfs.py
@@ -14,6 +14,7 @@ import salt.utils.gitfs
import salt.utils.platform
import tests.support.paths
from salt.exceptions import FileserverConfigError
+from tests.support.helpers import patched_environ
from tests.support.mixins import AdaptedConfigurationTestCaseMixin
from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase
@@ -335,3 +336,16 @@ class TestPygit2(TestCase):
self.assertIn(provider.cachedir, provider.checkout())
provider.branch = "does_not_exist"
self.assertIsNone(provider.checkout())
+
+ def test_checkout_with_home_env_unset(self):
+ remote = os.path.join(tests.support.paths.TMP, "pygit2-repo")
+ cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache")
+ self._prepare_remote_repository(remote)
+ provider = self._prepare_cache_repository(remote, cache)
+ provider.remotecallbacks = None
+ provider.credentials = None
+ with patched_environ(__cleanup__=["HOME"]):
+ self.assertTrue("HOME" not in os.environ)
+ provider.init_remote()
+ provider.fetch()
+ self.assertTrue("HOME" in os.environ)
--
2.41.0

31
README.SUSE Normal file
View File

@ -0,0 +1,31 @@
Salt-master as non-root user
============================
With this version of salt the salt-master will run as salt user.
Why an extra user
=================
While the current setup runs the master as root user, this is considered a security issue
and not in line with the other configuration management tools (eg. puppet) which runs as a
dedicated user.
How can I undo the change
=========================
If you would like to make the change before you can do the following steps manually:
1. change the user parameter in the master configuration
user: root
2. update the file permissions:
as root: chown -R root /etc/salt /var/cache/salt /var/log/salt /var/run/salt
3. restart the salt-master daemon:
as root: rcsalt-master restart or systemctl restart salt-master
NOTE
====
Running the salt-master daemon as a root user is considers by some a security risk, but
running as root, enables the pam external auth system, as this system needs root access to check authentication.
For more information:
http://docs.saltstack.com/en/latest/ref/configuration/nonroot.html

1
_lastrevision Normal file
View File

@ -0,0 +1 @@
3becea2e5b00beff724c22a8ae320d4567031c7b

20
_service Normal file
View File

@ -0,0 +1,20 @@
<services>
<service name="tar_scm" mode="disabled">
<param name="url">https://github.com/openSUSE/salt-packaging.git</param>
<param name="subdir">salt</param>
<param name="filename">package</param>
<param name="revision">release/3006.0</param>
<param name="scm">git</param>
</service>
<service name="extract_file" mode="disabled">
<param name="archive">*package*.tar</param>
<param name="files">*/*</param>
</service>
<service name="download_url" mode="disabled">
<param name="host">codeload.github.com</param>
<param name="path">openSUSE/salt/tar.gz/v3006.0-suse</param>
<param name="filename">v3006.0.tar.gz</param>
</service>
<service name="update_changelog" mode="disabled"></service>
</services>

View File

@ -0,0 +1,28 @@
From f2938966bd1fcb46df0f202f5a86729ab190565a Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 17 Oct 2017 16:52:33 +0200
Subject: [PATCH] Activate all beacons sources: config/pillar/grains
---
salt/minion.py | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/salt/minion.py b/salt/minion.py
index 6237fcc4b7..2f905e4a4f 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -503,9 +503,7 @@ class MinionBase:
the pillar or grains changed
"""
if "config.merge" in functions:
- b_conf = functions["config.merge"](
- "beacons", self.opts["beacons"], omit_opts=True
- )
+ b_conf = functions["config.merge"]("beacons", self.opts["beacons"])
if b_conf:
return self.beacons.process(
b_conf, self.opts["grains"]
--
2.39.2

View File

@ -0,0 +1,30 @@
From 311d4e320527158b6ff88604b45e15f0dc2bfa62 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 18 Jan 2022 12:59:43 +0100
Subject: [PATCH] Add custom SUSE capabilities as Grains
Add new custom SUSE capability for saltutil state module
---
salt/grains/extra.py | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/salt/grains/extra.py b/salt/grains/extra.py
index 300052f1ee..f2504dbf19 100644
--- a/salt/grains/extra.py
+++ b/salt/grains/extra.py
@@ -96,3 +96,11 @@ def uefi():
def transactional():
"""Determine if the system is transactional."""
return {"transactional": bool(salt.utils.path.which("transactional-update"))}
+
+
+def suse_backported_capabilities():
+ return {
+ '__suse_reserved_pkg_all_versions_support': True,
+ '__suse_reserved_pkg_patches_support': True,
+ '__suse_reserved_saltutil_states_support': True
+ }
--
2.39.2

View File

@ -0,0 +1,83 @@
From d7682d1bc67ccdd63022c63b2d3229f8ab40d52b Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 18 Jan 2022 12:57:21 +0100
Subject: [PATCH] Add environment variable to know if yum is invoked from
Salt(bsc#1057635)
---
salt/modules/yumpkg.py | 23 +++++++++++++++++------
1 file changed, 17 insertions(+), 6 deletions(-)
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index 4d0070f21a..b362d30bf4 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -964,7 +964,9 @@ def list_repo_pkgs(*args, **kwargs):
None
if _yum() != "yum"
else LooseVersion(
- __salt__["cmd.run"](["yum", "--version"], python_shell=False)
+ __salt__["cmd.run"](
+ ["yum", "--version"], python_shell=False, env={"SALT_RUNNING": "1"}
+ )
.splitlines()[0]
.strip()
)
@@ -2474,7 +2476,9 @@ def list_holds(pattern=__HOLD_PATTERN, full=True):
"""
_check_versionlock()
- out = __salt__["cmd.run"]([_yum(), "versionlock", "list"], python_shell=False)
+ out = __salt__["cmd.run"](
+ [_yum(), "versionlock", "list"], python_shell=False, env={"SALT_RUNNING": "1"}
+ )
ret = []
for line in salt.utils.itertools.split(out, "\n"):
match = _get_hold(line, pattern=pattern, full=full)
@@ -2542,7 +2546,10 @@ def group_list():
}
out = __salt__["cmd.run_stdout"](
- [_yum(), "grouplist", "hidden"], output_loglevel="trace", python_shell=False
+ [_yum(), "grouplist", "hidden"],
+ output_loglevel="trace",
+ python_shell=False,
+ env={"SALT_RUNNING": "1"},
)
key = None
for line in salt.utils.itertools.split(out, "\n"):
@@ -2613,7 +2620,9 @@ def group_info(name, expand=False, ignore_groups=None):
ret[pkgtype] = set()
cmd = [_yum(), "--quiet", "groupinfo", name]
- out = __salt__["cmd.run_stdout"](cmd, output_loglevel="trace", python_shell=False)
+ out = __salt__["cmd.run_stdout"](
+ cmd, output_loglevel="trace", python_shell=False, env={"SALT_RUNNING": "1"}
+ )
g_info = {}
for line in salt.utils.itertools.split(out, "\n"):
@@ -3342,7 +3351,9 @@ def download(*packages, **kwargs):
cmd = ["yumdownloader", "-q", "--destdir={}".format(CACHE_DIR)]
cmd.extend(packages)
- __salt__["cmd.run"](cmd, output_loglevel="trace", python_shell=False)
+ __salt__["cmd.run"](
+ cmd, output_loglevel="trace", python_shell=False, env={"SALT_RUNNING": "1"}
+ )
ret = {}
for dld_result in os.listdir(CACHE_DIR):
if not dld_result.endswith(".rpm"):
@@ -3418,7 +3429,7 @@ def _get_patches(installed_only=False):
patches = {}
cmd = [_yum(), "--quiet", "updateinfo", "list", "all"]
- ret = __salt__["cmd.run_stdout"](cmd, python_shell=False)
+ ret = __salt__["cmd.run_stdout"](cmd, python_shell=False, env={"SALT_RUNNING": "1"})
parsing_errors = False
for line in salt.utils.itertools.split(ret, os.linesep):
--
2.39.2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,26 @@
From 3ef2071daf7a415f2c43e1339affe2b7cad93b3e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 28 May 2020 09:37:08 +0100
Subject: [PATCH] Add publish_batch to ClearFuncs exposed methods
---
salt/master.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/salt/master.py b/salt/master.py
index 2a526b4f21..a0552fa232 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -1960,6 +1960,7 @@ class ClearFuncs(TransportMethods):
expose_methods = (
"ping",
"publish",
+ "publish_batch",
"get_token",
"mk_token",
"wheel",
--
2.39.2

View File

@ -0,0 +1,795 @@
From 3fd6c0c6793632c819fb5f8fb3b3538463eaaccc Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Thu, 24 Feb 2022 16:52:24 +0300
Subject: [PATCH] Add salt-ssh support with venv-salt-minion - 3004
(#493)
* Add salt-ssh support with venv-salt-minion
* Add some comments and drop the commented line
* Fix return in check_venv_hash_file
* Convert all script parameters to strings
* Reduce the size of minion response
Minion response contains SSH_PY_CODE wrapped to base64.
This fix reduces the size of the response in DEBUG logging
* Make VENV_HASH_FILE global
* Pass the context to roster modules
* Avoid race condition on loading roster modules
* Prevent simultaneous to salt-ssh minion
* Make ssh session grace time configurable
* Prevent possible segfault by GC
* Revert "Avoid race condition on loading roster modules"
This reverts commit 8ff822a162cc494d3528184aef983ad20e09f4e2.
* Prevent deadlocks with importlib on using LazyLoader
* Make logging on salt-ssh errors more informative
* Add comments about using salt.loader.LOAD_LOCK
* Fix test_loader test
* Prevent deadlocks on using logging
* Use collections.deque instead of list for salt-ssh
Suggested by @agraul
* Get proper exitstatus from salt.utils.vt.Terminal
to prevent empty event returns due to improperly detecting
the child process as failed
* Do not run pre flight script for raw_shell
---
salt/_logging/impl.py | 55 +++++++-----
salt/client/ssh/__init__.py | 157 ++++++++++++++++++++++++++++-----
salt/client/ssh/client.py | 7 +-
salt/client/ssh/shell.py | 8 ++
salt/client/ssh/ssh_py_shim.py | 108 +++++++++++++----------
salt/loader/__init__.py | 31 ++++++-
salt/netapi/__init__.py | 3 +-
salt/roster/__init__.py | 6 +-
tests/unit/test_loader.py | 2 +-
9 files changed, 278 insertions(+), 99 deletions(-)
diff --git a/salt/_logging/impl.py b/salt/_logging/impl.py
index cc18f49a9e..e050f43caf 100644
--- a/salt/_logging/impl.py
+++ b/salt/_logging/impl.py
@@ -14,6 +14,7 @@ import re
import socket
import sys
import traceback
+import threading
import types
import urllib.parse
@@ -104,6 +105,10 @@ DFLT_LOG_DATEFMT_LOGFILE = "%Y-%m-%d %H:%M:%S"
DFLT_LOG_FMT_CONSOLE = "[%(levelname)-8s] %(message)s"
DFLT_LOG_FMT_LOGFILE = "%(asctime)s,%(msecs)03d [%(name)-17s:%(lineno)-4d][%(levelname)-8s][%(process)d] %(message)s"
+# LOG_LOCK is used to prevent deadlocks on using logging
+# in combination with multiprocessing with salt-api
+LOG_LOCK = threading.Lock()
+
class SaltLogRecord(logging.LogRecord):
def __init__(self, *args, **kwargs):
@@ -270,27 +275,35 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta):
else:
extra["exc_info_on_loglevel"] = exc_info_on_loglevel
- if sys.version_info < (3, 8):
- LOGGING_LOGGER_CLASS._log(
- self,
- level,
- msg,
- args,
- exc_info=exc_info,
- extra=extra,
- stack_info=stack_info,
- )
- else:
- LOGGING_LOGGER_CLASS._log(
- self,
- level,
- msg,
- args,
- exc_info=exc_info,
- extra=extra,
- stack_info=stack_info,
- stacklevel=stacklevel,
- )
+ try:
+ LOG_LOCK.acquire()
+ if sys.version_info < (3,):
+ LOGGING_LOGGER_CLASS._log(
+ self, level, msg, args, exc_info=exc_info, extra=extra
+ )
+ elif sys.version_info < (3, 8):
+ LOGGING_LOGGER_CLASS._log(
+ self,
+ level,
+ msg,
+ args,
+ exc_info=exc_info,
+ extra=extra,
+ stack_info=stack_info,
+ )
+ else:
+ LOGGING_LOGGER_CLASS._log(
+ self,
+ level,
+ msg,
+ args,
+ exc_info=exc_info,
+ extra=extra,
+ stack_info=stack_info,
+ stacklevel=stacklevel,
+ )
+ finally:
+ LOG_LOCK.release()
def makeRecord(
self,
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index 19089ce8ad..e6837df4e5 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -6,11 +6,13 @@ import base64
import binascii
import copy
import datetime
+import gc
import getpass
import hashlib
import logging
import multiprocessing
import os
+import psutil
import queue
import re
import shlex
@@ -20,6 +22,7 @@ import tarfile
import tempfile
import time
import uuid
+from collections import deque
import salt.client.ssh.shell
import salt.client.ssh.wrapper
@@ -47,6 +50,7 @@ import salt.utils.url
import salt.utils.verify
from salt._logging import LOG_LEVELS
from salt._logging.mixins import MultiprocessingStateMixin
+from salt._logging.impl import LOG_LOCK
from salt.template import compile_template
from salt.utils.process import Process
from salt.utils.zeromq import zmq
@@ -146,15 +150,26 @@ if [ "$SUDO" ] && [ "$SUDO_USER" ]
then SUDO="$SUDO -u $SUDO_USER"
fi
EX_PYTHON_INVALID={EX_THIN_PYTHON_INVALID}
-PYTHON_CMDS="python3 /usr/libexec/platform-python python27 python2.7 python26 python2.6 python2 python"
+set +x
+SSH_PY_CODE='import base64;
+ exec(base64.b64decode("""{{SSH_PY_CODE}}""").decode("utf-8"))'
+if [ -n "$DEBUG" ]
+ then set -x
+fi
+PYTHON_CMDS="/var/tmp/venv-salt-minion/bin/python python3 /usr/libexec/platform-python python27 python2.7 python26 python2.6 python2 python"
for py_cmd in $PYTHON_CMDS
do
if command -v "$py_cmd" >/dev/null 2>&1 && "$py_cmd" -c "import sys; sys.exit(not (sys.version_info >= (2, 6)));"
then
py_cmd_path=`"$py_cmd" -c 'from __future__ import print_function;import sys; print(sys.executable);'`
cmdpath=`command -v $py_cmd 2>/dev/null || which $py_cmd 2>/dev/null`
+ cmdpath=`readlink -f $cmdpath`
if file $cmdpath | grep "shell script" > /dev/null
then
+ if echo $cmdpath | grep venv-salt-minion > /dev/null
+ then
+ exec $SUDO "$cmdpath" -c "$SSH_PY_CODE"
+ fi
ex_vars="'PATH', 'LD_LIBRARY_PATH', 'MANPATH', \
'XDG_DATA_DIRS', 'PKG_CONFIG_PATH'"
export `$py_cmd -c \
@@ -166,13 +181,9 @@ do
exec $SUDO PATH=$PATH LD_LIBRARY_PATH=$LD_LIBRARY_PATH \
MANPATH=$MANPATH XDG_DATA_DIRS=$XDG_DATA_DIRS \
PKG_CONFIG_PATH=$PKG_CONFIG_PATH \
- "$py_cmd_path" -c \
- 'import base64;
- exec(base64.b64decode("""{{SSH_PY_CODE}}""").decode("utf-8"))'
+ "$py_cmd_path" -c "$SSH_PY_CODE"
else
- exec $SUDO "$py_cmd_path" -c \
- 'import base64;
- exec(base64.b64decode("""{{SSH_PY_CODE}}""").decode("utf-8"))'
+ exec $SUDO "$py_cmd_path" -c "$SSH_PY_CODE"
fi
exit 0
else
@@ -189,6 +200,9 @@ EOF'''.format(
]
)
+# The file on a salt-ssh minion used to identify if Salt Bundle was deployed
+VENV_HASH_FILE = "/var/tmp/venv-salt-minion/venv-hash.txt"
+
if not salt.utils.platform.is_windows() and not salt.utils.platform.is_junos():
shim_file = os.path.join(os.path.dirname(__file__), "ssh_py_shim.py")
if not os.path.exists(shim_file):
@@ -209,7 +223,7 @@ class SSH(MultiprocessingStateMixin):
ROSTER_UPDATE_FLAG = "#__needs_update"
- def __init__(self, opts):
+ def __init__(self, opts, context=None):
self.__parsed_rosters = {SSH.ROSTER_UPDATE_FLAG: True}
pull_sock = os.path.join(opts["sock_dir"], "master_event_pull.ipc")
if os.path.exists(pull_sock) and zmq:
@@ -236,7 +250,9 @@ class SSH(MultiprocessingStateMixin):
else "glob"
)
self._expand_target()
- self.roster = salt.roster.Roster(self.opts, self.opts.get("roster", "flat"))
+ self.roster = salt.roster.Roster(
+ self.opts, self.opts.get("roster", "flat"), context=context
+ )
self.targets = self.roster.targets(self.opts["tgt"], self.tgt_type)
if not self.targets:
self._update_targets()
@@ -316,6 +332,13 @@ class SSH(MultiprocessingStateMixin):
extended_cfg=self.opts.get("ssh_ext_alternatives"),
)
self.mods = mod_data(self.fsclient)
+ self.cache = salt.cache.Cache(self.opts)
+ self.master_id = self.opts["id"]
+ self.max_pid_wait = int(self.opts.get("ssh_max_pid_wait", 600))
+ self.session_flock_file = os.path.join(
+ self.opts["cachedir"], "salt-ssh.session.lock"
+ )
+ self.ssh_session_grace_time = int(self.opts.get("ssh_session_grace_time", 3))
# __setstate__ and __getstate__ are only used on spawning platforms.
def __setstate__(self, state):
@@ -546,6 +569,8 @@ class SSH(MultiprocessingStateMixin):
"""
Run the routine in a "Thread", put a dict on the queue
"""
+ LOG_LOCK.release()
+ salt.loader.LOAD_LOCK.release()
opts = copy.deepcopy(opts)
single = Single(
opts,
@@ -585,7 +610,7 @@ class SSH(MultiprocessingStateMixin):
"""
que = multiprocessing.Queue()
running = {}
- target_iter = self.targets.__iter__()
+ targets_queue = deque(self.targets.keys())
returned = set()
rets = set()
init = False
@@ -594,11 +619,43 @@ class SSH(MultiprocessingStateMixin):
log.error("No matching targets found in roster.")
break
if len(running) < self.opts.get("ssh_max_procs", 25) and not init:
- try:
- host = next(target_iter)
- except StopIteration:
+ if targets_queue:
+ host = targets_queue.popleft()
+ else:
init = True
continue
+ with salt.utils.files.flopen(self.session_flock_file, "w"):
+ cached_session = self.cache.fetch("salt-ssh/session", host)
+ if cached_session is not None and "ts" in cached_session:
+ prev_session_running = time.time() - cached_session["ts"]
+ if (
+ "pid" in cached_session
+ and cached_session.get("master_id", self.master_id)
+ == self.master_id
+ ):
+ pid_running = (
+ False
+ if cached_session["pid"] == 0
+ else psutil.pid_exists(cached_session["pid"])
+ )
+ if (
+ pid_running and prev_session_running < self.max_pid_wait
+ ) or (
+ not pid_running
+ and prev_session_running < self.ssh_session_grace_time
+ ):
+ targets_queue.append(host)
+ time.sleep(0.3)
+ continue
+ self.cache.store(
+ "salt-ssh/session",
+ host,
+ {
+ "pid": 0,
+ "master_id": self.master_id,
+ "ts": time.time(),
+ },
+ )
for default in self.defaults:
if default not in self.targets[host]:
self.targets[host][default] = self.defaults[default]
@@ -630,8 +687,38 @@ class SSH(MultiprocessingStateMixin):
mine,
)
routine = Process(target=self.handle_routine, args=args)
- routine.start()
+ # Explicitly call garbage collector to prevent possible segfault
+ # in salt-api child process. (bsc#1188607)
+ gc.collect()
+ try:
+ # salt.loader.LOAD_LOCK is used to prevent deadlock
+ # with importlib in combination with using multiprocessing (bsc#1182851)
+ # If the salt-api child process is creating while LazyLoader instance
+ # is loading module, new child process gets the lock for this module acquired.
+ # Touching this module with importlib inside child process leads to deadlock.
+ #
+ # salt.loader.LOAD_LOCK is used to prevent salt-api child process creation
+ # while creating new instance of LazyLoader
+ # salt.loader.LOAD_LOCK must be released explicitly in self.handle_routine
+ salt.loader.LOAD_LOCK.acquire()
+ # The same solution applied to fix logging deadlock
+ # LOG_LOCK must be released explicitly in self.handle_routine
+ LOG_LOCK.acquire()
+ routine.start()
+ finally:
+ LOG_LOCK.release()
+ salt.loader.LOAD_LOCK.release()
running[host] = {"thread": routine}
+ with salt.utils.files.flopen(self.session_flock_file, "w"):
+ self.cache.store(
+ "salt-ssh/session",
+ host,
+ {
+ "pid": routine.pid,
+ "master_id": self.master_id,
+ "ts": time.time(),
+ },
+ )
continue
ret = {}
try:
@@ -662,12 +749,27 @@ class SSH(MultiprocessingStateMixin):
)
ret = {"id": host, "ret": error}
log.error(error)
+ log.error(
+ "PID %s did not return any data for host '%s'",
+ running[host]["thread"].pid,
+ host,
+ )
yield {ret["id"]: ret["ret"]}
running[host]["thread"].join()
rets.add(host)
for host in rets:
if host in running:
running.pop(host)
+ with salt.utils.files.flopen(self.session_flock_file, "w"):
+ self.cache.store(
+ "salt-ssh/session",
+ host,
+ {
+ "pid": 0,
+ "master_id": self.master_id,
+ "ts": time.time(),
+ },
+ )
if len(rets) >= len(self.targets):
break
# Sleep when limit or all threads started
@@ -1036,14 +1138,24 @@ class Single:
return False
return True
+ def check_venv_hash_file(self):
+ """
+ check if the venv exists on the remote machine
+ """
+ stdout, stderr, retcode = self.shell.exec_cmd(
+ "test -f {}".format(VENV_HASH_FILE)
+ )
+ return retcode == 0
+
def deploy(self):
"""
Deploy salt-thin
"""
- self.shell.send(
- self.thin,
- os.path.join(self.thin_dir, "salt-thin.tgz"),
- )
+ if not self.check_venv_hash_file():
+ self.shell.send(
+ self.thin,
+ os.path.join(self.thin_dir, "salt-thin.tgz"),
+ )
self.deploy_ext()
return True
@@ -1071,8 +1183,9 @@ class Single:
Returns tuple of (stdout, stderr, retcode)
"""
stdout = stderr = retcode = None
+ raw_shell = self.opts.get("raw_shell", False)
- if self.ssh_pre_flight:
+ if self.ssh_pre_flight and not raw_shell:
if not self.opts.get("ssh_run_pre_flight", False) and self.check_thin_dir():
log.info(
"%s thin dir already exists. Not running ssh_pre_flight script",
@@ -1086,14 +1199,16 @@ class Single:
stdout, stderr, retcode = self.run_ssh_pre_flight()
if retcode != 0:
log.error(
- "Error running ssh_pre_flight script %s", self.ssh_pre_file
+ "Error running ssh_pre_flight script %s for host '%s'",
+ self.ssh_pre_file,
+ self.target["host"],
)
return stdout, stderr, retcode
log.info(
"Successfully ran the ssh_pre_flight script: %s", self.ssh_pre_file
)
- if self.opts.get("raw_shell", False):
+ if raw_shell:
cmd_str = " ".join([self._escape_arg(arg) for arg in self.argv])
stdout, stderr, retcode = self.shell.exec_cmd(cmd_str)
diff --git a/salt/client/ssh/client.py b/salt/client/ssh/client.py
index be9247cb15..0b67598fc6 100644
--- a/salt/client/ssh/client.py
+++ b/salt/client/ssh/client.py
@@ -108,7 +108,7 @@ class SSHClient:
return sane_kwargs
def _prep_ssh(
- self, tgt, fun, arg=(), timeout=None, tgt_type="glob", kwarg=None, **kwargs
+ self, tgt, fun, arg=(), timeout=None, tgt_type="glob", kwarg=None, context=None, **kwargs
):
"""
Prepare the arguments
@@ -123,7 +123,7 @@ class SSHClient:
opts["selected_target_option"] = tgt_type
opts["tgt"] = tgt
opts["arg"] = arg
- return salt.client.ssh.SSH(opts)
+ return salt.client.ssh.SSH(opts, context=context)
def cmd_iter(
self,
@@ -160,7 +160,7 @@ class SSHClient:
final.update(ret)
return final
- def cmd_sync(self, low):
+ def cmd_sync(self, low, context=None):
"""
Execute a salt-ssh call synchronously.
@@ -193,6 +193,7 @@ class SSHClient:
low.get("timeout"),
low.get("tgt_type"),
low.get("kwarg"),
+ context=context,
**kwargs
)
diff --git a/salt/client/ssh/shell.py b/salt/client/ssh/shell.py
index cfa82d13c2..bc1ad034df 100644
--- a/salt/client/ssh/shell.py
+++ b/salt/client/ssh/shell.py
@@ -464,6 +464,14 @@ class Shell:
if stdout:
old_stdout = stdout
time.sleep(0.01)
+ if term.exitstatus is None:
+ try:
+ term.wait()
+ except: # pylint: disable=broad-except
+ # It's safe to put the broad exception handling here
+ # as we just need to ensure the child process in term finished
+ # to get proper term.exitstatus instead of None
+ pass
return ret_stdout, ret_stderr, term.exitstatus
finally:
term.close(terminate=True, kill=True)
diff --git a/salt/client/ssh/ssh_py_shim.py b/salt/client/ssh/ssh_py_shim.py
index b77749f495..293ea1b7fa 100644
--- a/salt/client/ssh/ssh_py_shim.py
+++ b/salt/client/ssh/ssh_py_shim.py
@@ -279,56 +279,72 @@ def main(argv): # pylint: disable=W0613
"""
Main program body
"""
- thin_path = os.path.join(OPTIONS.saltdir, THIN_ARCHIVE)
- if os.path.isfile(thin_path):
- if OPTIONS.checksum != get_hash(thin_path, OPTIONS.hashfunc):
- need_deployment()
- unpack_thin(thin_path)
- # Salt thin now is available to use
- else:
- if not sys.platform.startswith("win"):
- scpstat = subprocess.Popen(["/bin/sh", "-c", "command -v scp"]).wait()
- if scpstat != 0:
- sys.exit(EX_SCP_NOT_FOUND)
-
- if os.path.exists(OPTIONS.saltdir) and not os.path.isdir(OPTIONS.saltdir):
- sys.stderr.write(
- 'ERROR: salt path "{0}" exists but is not a directory\n'.format(
- OPTIONS.saltdir
+
+ virt_env = os.getenv("VIRTUAL_ENV", None)
+ # VIRTUAL_ENV environment variable is defined by venv-salt-minion wrapper
+ # it's used to check if the shim is running under this wrapper
+ venv_salt_call = None
+ if virt_env and "venv-salt-minion" in virt_env:
+ venv_salt_call = os.path.join(virt_env, "bin", "salt-call")
+ if not os.path.exists(venv_salt_call):
+ venv_salt_call = None
+ elif not os.path.exists(OPTIONS.saltdir):
+ os.makedirs(OPTIONS.saltdir)
+ cache_dir = os.path.join(OPTIONS.saltdir, "running_data", "var", "cache")
+ os.makedirs(os.path.join(cache_dir, "salt"))
+ os.symlink("salt", os.path.relpath(os.path.join(cache_dir, "venv-salt-minion")))
+
+ if venv_salt_call is None:
+ # Use Salt thin only if Salt Bundle (venv-salt-minion) is not available
+ thin_path = os.path.join(OPTIONS.saltdir, THIN_ARCHIVE)
+ if os.path.isfile(thin_path):
+ if OPTIONS.checksum != get_hash(thin_path, OPTIONS.hashfunc):
+ need_deployment()
+ unpack_thin(thin_path)
+ # Salt thin now is available to use
+ else:
+ if not sys.platform.startswith("win"):
+ scpstat = subprocess.Popen(["/bin/sh", "-c", "command -v scp"]).wait()
+ if scpstat != 0:
+ sys.exit(EX_SCP_NOT_FOUND)
+
+ if os.path.exists(OPTIONS.saltdir) and not os.path.isdir(OPTIONS.saltdir):
+ sys.stderr.write(
+ 'ERROR: salt path "{0}" exists but is'
+ " not a directory\n".format(OPTIONS.saltdir)
)
- )
- sys.exit(EX_CANTCREAT)
+ sys.exit(EX_CANTCREAT)
- if not os.path.exists(OPTIONS.saltdir):
- need_deployment()
+ if not os.path.exists(OPTIONS.saltdir):
+ need_deployment()
- code_checksum_path = os.path.normpath(
- os.path.join(OPTIONS.saltdir, "code-checksum")
- )
- if not os.path.exists(code_checksum_path) or not os.path.isfile(
- code_checksum_path
- ):
- sys.stderr.write(
- "WARNING: Unable to locate current code checksum: {0}.\n".format(
- code_checksum_path
- )
+ code_checksum_path = os.path.normpath(
+ os.path.join(OPTIONS.saltdir, "code-checksum")
)
- need_deployment()
- with open(code_checksum_path, "r") as vpo:
- cur_code_cs = vpo.readline().strip()
- if cur_code_cs != OPTIONS.code_checksum:
- sys.stderr.write(
- "WARNING: current code checksum {0} is different to {1}.\n".format(
- cur_code_cs, OPTIONS.code_checksum
+ if not os.path.exists(code_checksum_path) or not os.path.isfile(
+ code_checksum_path
+ ):
+ sys.stderr.write(
+ "WARNING: Unable to locate current code checksum: {0}.\n".format(
+ code_checksum_path
+ )
)
- )
- need_deployment()
- # Salt thin exists and is up-to-date - fall through and use it
+ need_deployment()
+ with open(code_checksum_path, "r") as vpo:
+ cur_code_cs = vpo.readline().strip()
+ if cur_code_cs != OPTIONS.code_checksum:
+ sys.stderr.write(
+ "WARNING: current code checksum {0} is different to {1}.\n".format(
+ cur_code_cs, OPTIONS.code_checksum
+ )
+ )
+ need_deployment()
+ # Salt thin exists and is up-to-date - fall through and use it
- salt_call_path = os.path.join(OPTIONS.saltdir, "salt-call")
- if not os.path.isfile(salt_call_path):
- sys.stderr.write('ERROR: thin is missing "{0}"\n'.format(salt_call_path))
- need_deployment()
+ salt_call_path = os.path.join(OPTIONS.saltdir, "salt-call")
+ if not os.path.isfile(salt_call_path):
+ sys.stderr.write('ERROR: thin is missing "{0}"\n'.format(salt_call_path))
+ need_deployment()
with open(os.path.join(OPTIONS.saltdir, "minion"), "w") as config:
config.write(OPTIONS.config + "\n")
@@ -351,8 +367,8 @@ def main(argv): # pylint: disable=W0613
argv_prepared = ARGS
salt_argv = [
- get_executable(),
- salt_call_path,
+ sys.executable if venv_salt_call is not None else get_executable(),
+ venv_salt_call if venv_salt_call is not None else salt_call_path,
"--retcode-passthrough",
"--local",
"--metadata",
diff --git a/salt/loader/__init__.py b/salt/loader/__init__.py
index 72a5e54401..32f8a7702c 100644
--- a/salt/loader/__init__.py
+++ b/salt/loader/__init__.py
@@ -9,6 +9,7 @@ import inspect
import logging
import os
import re
+import threading
import time
import types
@@ -31,7 +32,7 @@ from salt.exceptions import LoaderError
from salt.template import check_render_pipe_str
from salt.utils import entrypoints
-from .lazy import SALT_BASE_PATH, FilterDictWrapper, LazyLoader
+from .lazy import SALT_BASE_PATH, FilterDictWrapper, LazyLoader as _LazyLoader
log = logging.getLogger(__name__)
@@ -81,6 +82,18 @@ SALT_INTERNAL_LOADERS_PATHS = (
str(SALT_BASE_PATH / "wheel"),
)
+LOAD_LOCK = threading.Lock()
+
+
+def LazyLoader(*args, **kwargs):
+ # This wrapper is used to prevent deadlocks with importlib (bsc#1182851)
+ # LOAD_LOCK is also used directly in salt.client.ssh.SSH
+ try:
+ LOAD_LOCK.acquire()
+ return _LazyLoader(*args, **kwargs)
+ finally:
+ LOAD_LOCK.release()
+
def static_loader(
opts,
@@ -725,7 +738,7 @@ def fileserver(opts, backends, loaded_base_name=None):
)
-def roster(opts, runner=None, utils=None, whitelist=None, loaded_base_name=None):
+def roster(opts, runner=None, utils=None, whitelist=None, loaded_base_name=None, context=None):
"""
Returns the roster modules
@@ -736,12 +749,15 @@ def roster(opts, runner=None, utils=None, whitelist=None, loaded_base_name=None)
:param str loaded_base_name: The imported modules namespace when imported
by the salt loader.
"""
+ if context is None:
+ context = {}
+
return LazyLoader(
_module_dirs(opts, "roster"),
opts,
tag="roster",
whitelist=whitelist,
- pack={"__runner__": runner, "__utils__": utils},
+ pack={"__runner__": runner, "__utils__": utils, "__context__": context},
extra_module_dirs=utils.module_dirs if utils else None,
loaded_base_name=loaded_base_name,
)
@@ -933,7 +949,14 @@ def render(
)
rend = FilterDictWrapper(ret, ".render")
- if not check_render_pipe_str(
+ def _check_render_pipe_str(pipestr, renderers, blacklist, whitelist):
+ try:
+ LOAD_LOCK.acquire()
+ return check_render_pipe_str(pipestr, renderers, blacklist, whitelist)
+ finally:
+ LOAD_LOCK.release()
+
+ if not _check_render_pipe_str(
opts["renderer"], rend, opts["renderer_blacklist"], opts["renderer_whitelist"]
):
err = (
diff --git a/salt/netapi/__init__.py b/salt/netapi/__init__.py
index a89c1a19af..8a28c48460 100644
--- a/salt/netapi/__init__.py
+++ b/salt/netapi/__init__.py
@@ -79,6 +79,7 @@ class NetapiClient:
self.loadauth = salt.auth.LoadAuth(apiopts)
self.key = salt.daemons.masterapi.access_keys(apiopts)
self.ckminions = salt.utils.minions.CkMinions(apiopts)
+ self.context = {}
def _is_master_running(self):
"""
@@ -245,7 +246,7 @@ class NetapiClient:
with salt.client.ssh.client.SSHClient(
mopts=self.opts, disable_custom_roster=True
) as client:
- return client.cmd_sync(kwargs)
+ return client.cmd_sync(kwargs, context=self.context)
def runner(self, fun, timeout=None, full_return=False, **kwargs):
"""
diff --git a/salt/roster/__init__.py b/salt/roster/__init__.py
index fc7339d785..ea23d550d7 100644
--- a/salt/roster/__init__.py
+++ b/salt/roster/__init__.py
@@ -59,7 +59,7 @@ class Roster:
minion aware
"""
- def __init__(self, opts, backends="flat"):
+ def __init__(self, opts, backends="flat", context=None):
self.opts = opts
if isinstance(backends, list):
self.backends = backends
@@ -71,7 +71,9 @@ class Roster:
self.backends = ["flat"]
utils = salt.loader.utils(self.opts)
runner = salt.loader.runner(self.opts, utils=utils)
- self.rosters = salt.loader.roster(self.opts, runner=runner, utils=utils)
+ self.rosters = salt.loader.roster(
+ self.opts, runner=runner, utils=utils, context=context
+ )
def _gen_back(self):
"""
diff --git a/tests/unit/test_loader.py b/tests/unit/test_loader.py
index cf33903320..1b616375b3 100644
--- a/tests/unit/test_loader.py
+++ b/tests/unit/test_loader.py
@@ -1697,7 +1697,7 @@ class LazyLoaderRefreshFileMappingTest(TestCase):
cls.funcs = salt.loader.minion_mods(cls.opts, utils=cls.utils, proxy=cls.proxy)
def setUp(self):
- class LazyLoaderMock(salt.loader.LazyLoader):
+ class LazyLoaderMock(salt.loader._LazyLoader):
pass
self.LOADER_CLASS = LazyLoaderMock
--
2.39.2

View File

@ -0,0 +1,41 @@
From bad9e783e1a6923d85bdb1477a2e9766887a511e Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Thu, 18 Feb 2021 14:49:38 +0300
Subject: [PATCH] Add sleep on exception handling on minion connection
attempt to the master (bsc#1174855) (#321)
* Async batch implementation fix
* Add sleep on exception handling on minion connection attempt to the master (bsc#1174855)
---
salt/minion.py | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/salt/minion.py b/salt/minion.py
index 2f905e4a4f..c3b65f16c3 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -1123,6 +1123,9 @@ class MinionManager(MinionBase):
last = 0 # never have we signed in
auth_wait = minion.opts["acceptance_wait_time"]
failed = False
+ retry_wait = 1
+ retry_wait_inc = 1
+ max_retry_wait = 20
while True:
try:
if minion.opts.get("beacons_before_connect", False):
@@ -1161,6 +1164,9 @@ class MinionManager(MinionBase):
minion.opts["master"],
exc_info=True,
)
+ yield salt.ext.tornado.gen.sleep(retry_wait)
+ if retry_wait < max_retry_wait:
+ retry_wait += retry_wait_inc
# Multi Master Tune In
def tune_in(self):
--
2.39.2

View File

@ -0,0 +1,26 @@
From 94e702e83c05814296ea8987a722b71e99117360 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 22 May 2019 13:00:46 +0100
Subject: [PATCH] Add standalone configuration file for enabling package
formulas
---
conf/suse/standalone-formulas-configuration.conf | 4 ++++
1 file changed, 4 insertions(+)
create mode 100644 conf/suse/standalone-formulas-configuration.conf
diff --git a/conf/suse/standalone-formulas-configuration.conf b/conf/suse/standalone-formulas-configuration.conf
new file mode 100644
index 0000000000..94d05fb2ee
--- /dev/null
+++ b/conf/suse/standalone-formulas-configuration.conf
@@ -0,0 +1,4 @@
+file_roots:
+ base:
+ - /usr/share/salt-formulas/states
+ - /srv/salt
--
2.39.2

View File

@ -0,0 +1,369 @@
From 2e103365c50fe42a72de3e9d57c3fdbee47454aa Mon Sep 17 00:00:00 2001
From: Michael Calmer <mc@suse.de>
Date: Fri, 8 Jul 2022 10:15:37 +0200
Subject: [PATCH] add support for gpgautoimport (#539)
* add support for gpgautoimport to refresh_db in the zypperpkg module
* call refresh_db function from mod_repo
* call refresh_db with kwargs where possible
* ignore no repos defined exit code
* fix zypperpkg test after adding more success return codes
---
salt/modules/zypperpkg.py | 47 +++++++---
tests/unit/modules/test_zypperpkg.py | 124 +++++++++++++++++++++++----
2 files changed, 140 insertions(+), 31 deletions(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 318c871b37..051f8f72c7 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -623,7 +623,7 @@ def list_upgrades(refresh=True, root=None, **kwargs):
salt '*' pkg.list_upgrades
"""
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
ret = dict()
cmd = ["list-updates"]
@@ -737,7 +737,7 @@ def info_available(*names, **kwargs):
# Refresh db before extracting the latest package
if kwargs.get("refresh", True):
- refresh_db(root)
+ refresh_db(root, **kwargs)
pkg_info = []
batch = names[:]
@@ -1439,7 +1439,6 @@ def mod_repo(repo, **kwargs):
cmd_opt.append(kwargs.get("name"))
if kwargs.get("gpgautoimport") is True:
- global_cmd_opt.append("--gpg-auto-import-keys")
call_refresh = True
if cmd_opt:
@@ -1451,8 +1450,8 @@ def mod_repo(repo, **kwargs):
# when used with "zypper ar --refresh" or "zypper mr --refresh"
# --gpg-auto-import-keys is not doing anything
# so we need to specifically refresh here with --gpg-auto-import-keys
- refresh_opts = global_cmd_opt + ["refresh"] + [repo]
- __zypper__(root=root).xml.call(*refresh_opts)
+ kwargs.update({"repos": repo})
+ refresh_db(root=root, **kwargs)
elif not added and not cmd_opt:
comment = "Specified arguments did not result in modification of repo"
@@ -1463,7 +1462,7 @@ def mod_repo(repo, **kwargs):
return repo
-def refresh_db(force=None, root=None):
+def refresh_db(force=None, root=None, **kwargs):
"""
Trigger a repository refresh by calling ``zypper refresh``. Refresh will run
with ``--force`` if the "force=True" flag is passed on the CLI or
@@ -1474,6 +1473,17 @@ def refresh_db(force=None, root=None):
{'<database name>': Bool}
+ gpgautoimport : False
+ If set to True, automatically trust and import public GPG key for
+ the repository.
+
+ .. versionadded:: 3005
+
+ repos
+ Refresh just the specified repos
+
+ .. versionadded:: 3005
+
root
operate on a different root directory.
@@ -1494,11 +1504,22 @@ def refresh_db(force=None, root=None):
salt.utils.pkg.clear_rtag(__opts__)
ret = {}
refresh_opts = ["refresh"]
+ global_opts = []
if force is None:
force = __pillar__.get("zypper", {}).get("refreshdb_force", True)
if force:
refresh_opts.append("--force")
- out = __zypper__(root=root).refreshable.call(*refresh_opts)
+ repos = kwargs.get("repos", [])
+ refresh_opts.extend([repos] if not isinstance(repos, list) else repos)
+
+ if kwargs.get("gpgautoimport", False):
+ global_opts.append("--gpg-auto-import-keys")
+
+ # We do the actual call to zypper refresh.
+ # We ignore retcode 6 which is returned when there are no repositories defined.
+ out = __zypper__(root=root).refreshable.call(
+ *global_opts, *refresh_opts, success_retcodes=[0, 6]
+ )
for line in out.splitlines():
if not line:
@@ -1683,7 +1704,7 @@ def install(
'arch': '<new-arch>'}}}
"""
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
try:
pkg_params, pkg_type = __salt__["pkg_resource.parse_targets"](
@@ -1980,7 +2001,7 @@ def upgrade(
cmd_update.insert(0, "--no-gpg-checks")
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
if dryrun:
cmd_update.append("--dry-run")
@@ -2808,7 +2829,7 @@ def search(criteria, refresh=False, **kwargs):
root = kwargs.get("root", None)
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
cmd = ["search"]
if kwargs.get("match") == "exact":
@@ -2959,7 +2980,7 @@ def download(*packages, **kwargs):
refresh = kwargs.get("refresh", False)
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
pkg_ret = {}
for dld_result in (
@@ -3111,7 +3132,7 @@ def list_patches(refresh=False, root=None, **kwargs):
salt '*' pkg.list_patches
"""
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
return _get_patches(root=root)
@@ -3205,7 +3226,7 @@ def resolve_capabilities(pkgs, refresh=False, root=None, **kwargs):
salt '*' pkg.resolve_capabilities resolve_capabilities=True w3m_ssl
"""
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
ret = list()
for pkg in pkgs:
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index e85c93da3b..f5b6d74b6f 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -377,7 +377,12 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
run_out = {"stderr": "", "stdout": "\n".join(ref_out), "retcode": 0}
zypper_mock = MagicMock(return_value=run_out)
- call_kwargs = {"output_loglevel": "trace", "python_shell": False, "env": {}}
+ call_kwargs = {
+ "output_loglevel": "trace",
+ "python_shell": False,
+ "env": {},
+ "success_retcodes": [0, 6],
+ }
with patch.dict(zypper.__salt__, {"cmd.run_all": zypper_mock}):
with patch.object(salt.utils.pkg, "clear_rtag", Mock()):
result = zypper.refresh_db()
@@ -395,6 +400,73 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
zypper_mock.assert_called_with(
["zypper", "--non-interactive", "refresh", "--force"], **call_kwargs
)
+ zypper.refresh_db(gpgautoimport=True)
+ zypper_mock.assert_called_with(
+ [
+ "zypper",
+ "--non-interactive",
+ "--gpg-auto-import-keys",
+ "refresh",
+ "--force",
+ ],
+ **call_kwargs
+ )
+ zypper.refresh_db(gpgautoimport=True, force=True)
+ zypper_mock.assert_called_with(
+ [
+ "zypper",
+ "--non-interactive",
+ "--gpg-auto-import-keys",
+ "refresh",
+ "--force",
+ ],
+ **call_kwargs
+ )
+ zypper.refresh_db(gpgautoimport=True, force=False)
+ zypper_mock.assert_called_with(
+ [
+ "zypper",
+ "--non-interactive",
+ "--gpg-auto-import-keys",
+ "refresh",
+ ],
+ **call_kwargs
+ )
+ zypper.refresh_db(
+ gpgautoimport=True,
+ refresh=True,
+ repos="mock-repo-name",
+ root=None,
+ url="http://repo.url/some/path",
+ )
+ zypper_mock.assert_called_with(
+ [
+ "zypper",
+ "--non-interactive",
+ "--gpg-auto-import-keys",
+ "refresh",
+ "--force",
+ "mock-repo-name",
+ ],
+ **call_kwargs
+ )
+ zypper.refresh_db(
+ gpgautoimport=True,
+ repos="mock-repo-name",
+ root=None,
+ url="http://repo.url/some/path",
+ )
+ zypper_mock.assert_called_with(
+ [
+ "zypper",
+ "--non-interactive",
+ "--gpg-auto-import-keys",
+ "refresh",
+ "--force",
+ "mock-repo-name",
+ ],
+ **call_kwargs
+ )
def test_info_installed(self):
"""
@@ -2082,18 +2154,23 @@ Repository 'DUMMY' not found by its alias, number, or URI.
url = self.new_repo_config["url"]
name = self.new_repo_config["name"]
- with zypper_patcher:
+ with zypper_patcher, patch.object(zypper, "refresh_db", Mock()) as refreshmock:
zypper.mod_repo(name, **{"url": url, "gpgautoimport": True})
self.assertEqual(
zypper.__zypper__(root=None).xml.call.call_args_list,
[
call("ar", url, name),
- call("--gpg-auto-import-keys", "refresh", name),
],
)
self.assertTrue(
zypper.__zypper__(root=None).refreshable.xml.call.call_count == 0
)
+ refreshmock.assert_called_once_with(
+ gpgautoimport=True,
+ repos=name,
+ root=None,
+ url="http://repo.url/some/path",
+ )
def test_repo_noadd_nomod_ref(self):
"""
@@ -2112,15 +2189,17 @@ Repository 'DUMMY' not found by its alias, number, or URI.
"salt.modules.zypperpkg", **self.zypper_patcher_config
)
- with zypper_patcher:
+ with zypper_patcher, patch.object(zypper, "refresh_db", Mock()) as refreshmock:
zypper.mod_repo(name, **{"url": url, "gpgautoimport": True})
- self.assertEqual(
- zypper.__zypper__(root=None).xml.call.call_args_list,
- [call("--gpg-auto-import-keys", "refresh", name)],
- )
self.assertTrue(
zypper.__zypper__(root=None).refreshable.xml.call.call_count == 0
)
+ refreshmock.assert_called_once_with(
+ gpgautoimport=True,
+ repos=name,
+ root=None,
+ url="http://repo.url/some/path",
+ )
def test_repo_add_mod_ref(self):
"""
@@ -2133,10 +2212,10 @@ Repository 'DUMMY' not found by its alias, number, or URI.
zypper_patcher = patch.multiple(
"salt.modules.zypperpkg", **self.zypper_patcher_config
)
-
url = self.new_repo_config["url"]
name = self.new_repo_config["name"]
- with zypper_patcher:
+
+ with zypper_patcher, patch.object(zypper, "refresh_db", Mock()) as refreshmock:
zypper.mod_repo(
name, **{"url": url, "refresh": True, "gpgautoimport": True}
)
@@ -2144,11 +2223,17 @@ Repository 'DUMMY' not found by its alias, number, or URI.
zypper.__zypper__(root=None).xml.call.call_args_list,
[
call("ar", url, name),
- call("--gpg-auto-import-keys", "refresh", name),
],
)
zypper.__zypper__(root=None).refreshable.xml.call.assert_called_once_with(
- "--gpg-auto-import-keys", "mr", "--refresh", name
+ "mr", "--refresh", name
+ )
+ refreshmock.assert_called_once_with(
+ gpgautoimport=True,
+ refresh=True,
+ repos=name,
+ root=None,
+ url="http://repo.url/some/path",
)
def test_repo_noadd_mod_ref(self):
@@ -2168,16 +2253,19 @@ Repository 'DUMMY' not found by its alias, number, or URI.
"salt.modules.zypperpkg", **self.zypper_patcher_config
)
- with zypper_patcher:
+ with zypper_patcher, patch.object(zypper, "refresh_db", Mock()) as refreshmock:
zypper.mod_repo(
name, **{"url": url, "refresh": True, "gpgautoimport": True}
)
- self.assertEqual(
- zypper.__zypper__(root=None).xml.call.call_args_list,
- [call("--gpg-auto-import-keys", "refresh", name)],
- )
zypper.__zypper__(root=None).refreshable.xml.call.assert_called_once_with(
- "--gpg-auto-import-keys", "mr", "--refresh", name
+ "mr", "--refresh", name
+ )
+ refreshmock.assert_called_once_with(
+ gpgautoimport=True,
+ refresh=True,
+ repos=name,
+ root=None,
+ url="http://repo.url/some/path",
)
def test_wildcard_to_query_match_all(self):
--
2.39.2

View File

@ -0,0 +1,841 @@
From a36d6524e530eca32966f46597c88dbfd4b90e78 Mon Sep 17 00:00:00 2001
From: Martin Seidl <mseidl@suse.de>
Date: Tue, 27 Oct 2020 16:12:29 +0100
Subject: [PATCH] Allow vendor change option with zypper
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Fix novendorchange option (#284)
* Fixed novendorchange handling in zypperpkg
* refactor handling of novendorchange and fix tests
add patch support for allow vendor change option with zypper
Revert "add patch support for allow vendor change option with zypper"
This reverts commit cee4cc182b4740c912861c712dea7bc44eb70ffb.
Allow vendor change option with zypper (#313)
* add patch support for allow vendor change option with zypper
* adjust unit tests vendor change refactor, dropping cli arg
* Fix pr issues
Co-authored-by: Pablo Suárez Hernández <psuarezhernandez@suse.com>
* Fix unit test for allow vendor change on upgrade
* Add unit test with unsupported zypper version
Co-authored-by: Pablo Suárez Hernández <psuarezhernandez@suse.com>
Move vendor change logic to zypper class (#355)
* move vendor change logic to zypper class
* fix thing in zypperkg
* refactor unit tests
* Fix for syntax error
* Fix mocking issue in unit test
* fix issues with pr
* Fix for zypperpkg unit test after refactor of vendorchangeflags
Co-authored-by: Pablo Suárez Hernández <psuarezhernandez@suse.com>
* fix docs for vendor change options
* Fix doc strings, and clean up tests
Co-authored-by: Jochen Breuer <jbreuer@suse.de>
Co-authored-by: Pablo Suárez Hernández <psuarezhernandez@suse.com>
---
salt/modules/zypperpkg.py | 105 ++++--
tests/unit/modules/test_zypperpkg.py | 532 ++++++++++++++++++++++++++-
2 files changed, 612 insertions(+), 25 deletions(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 4bb10f445a..2da470bea3 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -36,6 +36,8 @@ import salt.utils.stringutils
import salt.utils.systemd
import salt.utils.versions
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError
+
+# pylint: disable=import-error,redefined-builtin,no-name-in-module
from salt.utils.versions import LooseVersion
if salt.utils.files.is_fcntl_available():
@@ -140,6 +142,13 @@ class _Zypper:
self.__systemd_scope = False
self.__root = None
+ # Dist upgrade vendor change support (SLE12+)
+ self.dup_avc = False
+ # Install/Patch/Upgrade vendor change support (SLE15+)
+ self.inst_avc = False
+ # Flag if allow vendor change should be allowed
+ self.avc = False
+
# Call status
self.__called = False
@@ -184,6 +193,8 @@ class _Zypper:
self.__no_raise = True
elif item == "refreshable":
self.__refresh = True
+ elif item == "allow_vendor_change":
+ return self.__allow_vendor_change
elif item == "call":
return self.__call
else:
@@ -224,6 +235,33 @@ class _Zypper:
def pid(self):
return self.__call_result.get("pid", "")
+ def __allow_vendor_change(self, allowvendorchange, novendorchange):
+ if allowvendorchange or not novendorchange:
+ self.refresh_zypper_flags()
+ if self.dup_avc or self.inst_avc:
+ log.info("Enabling vendor change")
+ self.avc = True
+ else:
+ log.warning(
+ "Enabling/Disabling vendor changes is not supported on this Zypper version"
+ )
+ return self
+
+ def refresh_zypper_flags(self):
+ try:
+ zypp_version = version("zypper")
+ # zypper version 1.11.34 in SLE12 update supports vendor change for only dist upgrade
+ if version_cmp(zypp_version, "1.11.34") >= 0:
+ # zypper version supports vendor change for dist upgrade
+ self.dup_avc = True
+ # zypper version 1.14.8 in SLE15 update supports vendor change in install/patch/upgrading
+ if version_cmp(zypp_version, "1.14.8") >= 0:
+ self.inst_avc = True
+ else:
+ log.error("Failed to compare Zypper version")
+ except Exception as ex:
+ log.error("Unable to get Zypper version: {}".format(ex))
+
def _is_error(self):
"""
Is this is an error code?
@@ -362,6 +400,15 @@ class _Zypper:
if self.__systemd_scope:
cmd.extend(["systemd-run", "--scope"])
cmd.extend(self.__cmd)
+
+ if self.avc:
+ for i in ["install", "upgrade", "dist-upgrade"]:
+ if i in cmd:
+ if i == "install" and self.inst_avc:
+ cmd.insert(cmd.index(i) + 1, "--allow-vendor-change")
+ elif i in ["upgrade", "dist-upgrade"] and self.dup_avc:
+ cmd.insert(cmd.index(i) + 1, "--allow-vendor-change")
+
log.debug("Calling Zypper: %s", " ".join(cmd))
self.__call_result = __salt__["cmd.run_all"](cmd, **kwargs)
if self._check_result():
@@ -1490,6 +1537,8 @@ def install(
no_recommends=False,
root=None,
inclusion_detection=False,
+ novendorchange=True,
+ allowvendorchange=False,
**kwargs
):
"""
@@ -1537,6 +1586,13 @@ def install(
skip_verify
Skip the GPG verification check (e.g., ``--no-gpg-checks``)
+ novendorchange
+ DEPRECATED(use allowvendorchange): If set to True, do not allow vendor changes. Default: True
+
+ allowvendorchange
+ If set to True, vendor change is allowed. Default: False
+ If both allowvendorchange and novendorchange are passed, only allowvendorchange is used.
+
version
Can be either a version number, or the combination of a comparison
operator (<, >, <=, >=, =) and a version number (ex. '>1.2.3-4').
@@ -1702,6 +1758,7 @@ def install(
cmd_install.append(
kwargs.get("resolve_capabilities") and "--capability" or "--name"
)
+ # Install / patching / upgrade with vendor change support is only in SLE 15+ opensuse Leap 15+
if not refresh:
cmd_install.insert(0, "--no-refresh")
@@ -1738,6 +1795,7 @@ def install(
systemd_scope=systemd_scope,
root=root,
)
+ .allow_vendor_change(allowvendorchange, novendorchange)
.call(*cmd)
.splitlines()
):
@@ -1750,7 +1808,9 @@ def install(
while downgrades:
cmd = cmd_install + ["--force"] + downgrades[:500]
downgrades = downgrades[500:]
- __zypper__(no_repo_failure=ignore_repo_failure, root=root).call(*cmd)
+ __zypper__(no_repo_failure=ignore_repo_failure, root=root).allow_vendor_change(
+ allowvendorchange, novendorchange
+ ).call(*cmd)
_clean_cache()
new = (
@@ -1783,7 +1843,8 @@ def upgrade(
dryrun=False,
dist_upgrade=False,
fromrepo=None,
- novendorchange=False,
+ novendorchange=True,
+ allowvendorchange=False,
skip_verify=False,
no_recommends=False,
root=None,
@@ -1844,7 +1905,11 @@ def upgrade(
Specify a list of package repositories to upgrade from. Default: None
novendorchange
- If set to True, no allow vendor changes. Default: False
+ DEPRECATED(use allowvendorchange): If set to True, do not allow vendor changes. Default: True
+
+ allowvendorchange
+ If set to True, vendor change is allowed. Default: False
+ If both allowvendorchange and novendorchange are passed, only allowvendorchange is used.
skip_verify
Skip the GPG verification check (e.g., ``--no-gpg-checks``)
@@ -1927,28 +1992,18 @@ def upgrade(
cmd_update.extend(["--from" if dist_upgrade else "--repo", repo])
log.info("Targeting repos: %s", fromrepo)
- if dist_upgrade:
- if novendorchange:
- # TODO: Grains validation should be moved to Zypper class
- if __grains__["osrelease_info"][0] > 11:
- cmd_update.append("--no-allow-vendor-change")
- log.info("Disabling vendor changes")
- else:
- log.warning(
- "Disabling vendor changes is not supported on this Zypper version"
- )
+ if no_recommends:
+ cmd_update.append("--no-recommends")
+ log.info("Disabling recommendations")
- if no_recommends:
- cmd_update.append("--no-recommends")
- log.info("Disabling recommendations")
+ if dryrun:
+ # Creates a solver test case for debugging.
+ log.info("Executing debugsolver and performing a dry-run dist-upgrade")
+ __zypper__(systemd_scope=_systemd_scope(), root=root).allow_vendor_change(
+ allowvendorchange, novendorchange
+ ).noraise.call(*cmd_update + ["--debug-solver"])
- if dryrun:
- # Creates a solver test case for debugging.
- log.info("Executing debugsolver and performing a dry-run dist-upgrade")
- __zypper__(systemd_scope=_systemd_scope(), root=root).noraise.call(
- *cmd_update + ["--debug-solver"]
- )
- else:
+ if not dist_upgrade:
if name or pkgs:
try:
(pkg_params, _) = __salt__["pkg_resource.parse_targets"](
@@ -1962,7 +2017,9 @@ def upgrade(
old = list_pkgs(root=root, attr=diff_attr)
- __zypper__(systemd_scope=_systemd_scope(), root=root).noraise.call(*cmd_update)
+ __zypper__(systemd_scope=_systemd_scope(), root=root).allow_vendor_change(
+ allowvendorchange, novendorchange
+ ).noraise.call(*cmd_update)
_clean_cache()
new = list_pkgs(root=root, attr=diff_attr)
ret = salt.utils.data.compare_dicts(old, new)
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index 5e4c967520..e85c93da3b 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -137,6 +137,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
stdout_xml_snippet = '<?xml version="1.0"?><test foo="bar"/>'
sniffer = RunSniffer(stdout=stdout_xml_snippet)
+ zypper.__zypper__._reset()
with patch.dict("salt.modules.zypperpkg.__salt__", {"cmd.run_all": sniffer}):
self.assertEqual(zypper.__zypper__.call("foo"), stdout_xml_snippet)
self.assertEqual(len(sniffer.calls), 1)
@@ -628,13 +629,495 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
{"vim": "7.4.326-2.62", "fakepkg": ""},
)
+ def test_upgrade_without_vendor_change(self):
+ """
+ Dist-upgrade without vendor change option.
+ """
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]),
+ ):
+ ret = zypper.upgrade(dist_upgrade=True)
+ self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}})
+ zypper_mock.assert_any_call(
+ "dist-upgrade", "--auto-agree-with-licenses",
+ )
+
+ def test_refresh_zypper_flags(self):
+ zypper.__zypper__._reset()
+ with patch(
+ "salt.modules.zypperpkg.version", MagicMock(return_value="0.5")
+ ), patch.dict(
+ zypper.__salt__, {"lowpkg.version_cmp": MagicMock(side_effect=[-1, -1])}
+ ):
+ zypper.__zypper__.refresh_zypper_flags()
+ assert zypper.__zypper__.inst_avc == False
+ assert zypper.__zypper__.dup_avc == False
+ with patch(
+ "salt.modules.zypperpkg.version", MagicMock(return_value="1.11.34")
+ ), patch.dict(
+ zypper.__salt__, {"lowpkg.version_cmp": MagicMock(side_effect=[0, -1])}
+ ):
+ zypper.__zypper__.refresh_zypper_flags()
+ assert zypper.__zypper__.inst_avc == False
+ assert zypper.__zypper__.dup_avc == True
+ with patch(
+ "salt.modules.zypperpkg.version", MagicMock(return_value="1.14.8")
+ ), patch.dict(
+ zypper.__salt__, {"lowpkg.version_cmp": MagicMock(side_effect=[0, 0])}
+ ):
+ zypper.__zypper__.refresh_zypper_flags()
+ assert zypper.__zypper__.inst_avc == True
+ assert zypper.__zypper__.dup_avc == True
+
+ @patch("salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock())
+ def test_allow_vendor_change_function(self):
+ zypper.__zypper__._reset()
+ zypper.__zypper__.inst_avc = True
+ zypper.__zypper__.dup_avc = True
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(False, False)
+ assert zypper.__zypper__.avc == True
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(True, False)
+ assert zypper.__zypper__.avc == True
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(False, True)
+ assert zypper.__zypper__.avc == False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(True, True)
+ assert zypper.__zypper__.avc == True
+
+ zypper.__zypper__._reset()
+ zypper.__zypper__.inst_avc = False
+ zypper.__zypper__.dup_avc = True
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(False, False)
+ assert zypper.__zypper__.avc == True
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(True, False)
+ assert zypper.__zypper__.avc == True
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(False, True)
+ assert zypper.__zypper__.avc == False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(True, True)
+ assert zypper.__zypper__.avc == True
+
+ zypper.__zypper__._reset()
+ zypper.__zypper__.inst_avc = False
+ zypper.__zypper__.dup_avc = False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(False, False)
+ assert zypper.__zypper__.avc == False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(True, False)
+ assert zypper.__zypper__.avc == False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(False, True)
+ assert zypper.__zypper__.avc == False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(True, True)
+ assert zypper.__zypper__.avc == False
+
+ @patch(
+ "salt.utils.environment.get_module_environment",
+ MagicMock(return_value={"SALT_RUNNING": "1"}),
+ )
+ def test_zypper_call_dist_upgrade_with_avc_true(self):
+ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None})
+ zypper.__zypper__._reset()
+ with patch.dict(zypper.__salt__, {"cmd.run_all": cmd_run_mock}), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch("salt.modules.zypperpkg.__zypper__._reset", MagicMock()):
+ zypper.__zypper__.dup_avc = True
+ zypper.__zypper__.avc = True
+ zypper.__zypper__.call("dist-upgrade")
+ cmd_run_mock.assert_any_call(
+ [
+ "zypper",
+ "--non-interactive",
+ "--no-refresh",
+ "dist-upgrade",
+ "--allow-vendor-change",
+ ],
+ output_loglevel="trace",
+ python_shell=False,
+ env={"SALT_RUNNING": "1"},
+ )
+
+ @patch(
+ "salt.utils.environment.get_module_environment",
+ MagicMock(return_value={"SALT_RUNNING": "1"}),
+ )
+ def test_zypper_call_dist_upgrade_with_avc_false(self):
+ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None})
+ zypper.__zypper__._reset()
+ with patch.dict(zypper.__salt__, {"cmd.run_all": cmd_run_mock}), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch("salt.modules.zypperpkg.__zypper__._reset", MagicMock()):
+ zypper.__zypper__.dup_avc = False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.call("dist-upgrade")
+ cmd_run_mock.assert_any_call(
+ ["zypper", "--non-interactive", "--no-refresh", "dist-upgrade",],
+ output_loglevel="trace",
+ python_shell=False,
+ env={"SALT_RUNNING": "1"},
+ )
+
+ @patch(
+ "salt.utils.environment.get_module_environment",
+ MagicMock(return_value={"SALT_RUNNING": "1"}),
+ )
+ def test_zypper_call_install_with_avc_true(self):
+ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None})
+ zypper.__zypper__._reset()
+ with patch.dict(zypper.__salt__, {"cmd.run_all": cmd_run_mock}), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch("salt.modules.zypperpkg.__zypper__._reset", MagicMock()):
+ zypper.__zypper__.inst_avc = True
+ zypper.__zypper__.avc = True
+ zypper.__zypper__.call("install")
+ cmd_run_mock.assert_any_call(
+ [
+ "zypper",
+ "--non-interactive",
+ "--no-refresh",
+ "install",
+ "--allow-vendor-change",
+ ],
+ output_loglevel="trace",
+ python_shell=False,
+ env={"SALT_RUNNING": "1"},
+ )
+
+ @patch(
+ "salt.utils.environment.get_module_environment",
+ MagicMock(return_value={"SALT_RUNNING": "1"}),
+ )
+ def test_zypper_call_install_with_avc_false(self):
+ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None})
+ zypper.__zypper__._reset()
+ with patch.dict(zypper.__salt__, {"cmd.run_all": cmd_run_mock}), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch("salt.modules.zypperpkg.__zypper__._reset", MagicMock()):
+ zypper.__zypper__.inst_avc = False
+ zypper.__zypper__.dup_avc = True
+ zypper.__zypper__.avc = True
+ zypper.__zypper__.call("install")
+ cmd_run_mock.assert_any_call(
+ ["zypper", "--non-interactive", "--no-refresh", "install",],
+ output_loglevel="trace",
+ python_shell=False,
+ env={"SALT_RUNNING": "1"},
+ )
+
+ def test_upgrade_with_novendorchange_true(self):
+ """
+ Dist-upgrade without vendor change option.
+ """
+ zypper.__zypper__._reset()
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ) as refresh_flags_mock, patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]),
+ ):
+ ret = zypper.upgrade(dist_upgrade=True, novendorchange=True)
+ refresh_flags_mock.assert_not_called()
+ zypper_mock.assert_any_call(
+ "dist-upgrade", "--auto-agree-with-licenses",
+ )
+
+ def test_upgrade_with_novendorchange_false(self):
+ """
+ Perform dist-upgrade with novendorchange set to False.
+ """
+ zypper.__zypper__._reset()
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ zypper.__zypper__.inst_avc = True
+ zypper.__zypper__.dup_avc = True
+ with patch.dict(
+ zypper.__salt__,
+ {
+ "pkg_resource.version": MagicMock(return_value="1.15"),
+ "lowpkg.version_cmp": MagicMock(return_value=1),
+ },
+ ):
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ novendorchange=False,
+ )
+ assert zypper.__zypper__.avc == True
+
+ def test_upgrade_with_allowvendorchange_true(self):
+ """
+ Perform dist-upgrade with allowvendorchange set to True.
+ """
+ zypper.__zypper__._reset()
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ with patch.dict(
+ zypper.__salt__,
+ {
+ "pkg_resource.version": MagicMock(return_value="1.15"),
+ "lowpkg.version_cmp": MagicMock(return_value=1),
+ },
+ ):
+
+ zypper.__zypper__.inst_avc = True
+ zypper.__zypper__.dup_avc = True
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ allowvendorchange=True,
+ )
+ assert zypper.__zypper__.avc == True
+
+ def test_upgrade_with_allowvendorchange_false(self):
+ """
+ Perform dist-upgrade with allowvendorchange set to False.
+ """
+ zypper.__zypper__._reset()
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ with patch.dict(
+ zypper.__salt__,
+ {
+ "pkg_resource.version": MagicMock(return_value="1.15"),
+ "lowpkg.version_cmp": MagicMock(return_value=1),
+ },
+ ):
+
+ zypper.__zypper__.inst_avc = True
+ zypper.__zypper__.dup_avc = True
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ allowvendorchange=False,
+ )
+ assert zypper.__zypper__.avc == False
+
+ def test_upgrade_old_zypper(self):
+ zypper.__zypper__._reset()
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ) as refresh_flags_mock, patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ with patch.dict(
+ zypper.__salt__,
+ {
+ "pkg_resource.version": MagicMock(return_value="1.11"),
+ "lowpkg.version_cmp": MagicMock(return_value=-1),
+ },
+ ):
+ zypper.__zypper__.inst_avc = False
+ zypper.__zypper__.dup_avc = False
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ novendorchange=False,
+ )
+ zypper.__zypper__.avc = False
+
+ def test_upgrade_success(self):
+ """
+ Test system upgrade and dist-upgrade success.
+
+ :return:
+ """
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]),
+ ):
+ ret = zypper.upgrade()
+ self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}})
+ zypper_mock.assert_any_call("update", "--auto-agree-with-licenses")
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(
+ side_effect=[
+ {"kernel-default": "1.1"},
+ {"kernel-default": "1.1,1.2"},
+ ]
+ ),
+ ):
+ ret = zypper.upgrade()
+ self.assertDictEqual(
+ ret, {"kernel-default": {"old": "1.1", "new": "1.1,1.2"}}
+ )
+ zypper_mock.assert_any_call("update", "--auto-agree-with-licenses")
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1,1.2"}]),
+ ):
+ ret = zypper.upgrade()
+ self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.1,1.2"}})
+ zypper_mock.assert_any_call("update", "--auto-agree-with-licenses")
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ ret = zypper.upgrade(dist_upgrade=True, dryrun=True)
+ zypper_mock.assert_any_call(
+ "dist-upgrade", "--auto-agree-with-licenses", "--dry-run"
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--debug-solver",
+ )
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ ret = zypper.upgrade(
+ dist_upgrade=False, fromrepo=["Dummy", "Dummy2"], dryrun=False
+ )
+ zypper_mock.assert_any_call(
+ "update",
+ "--auto-agree-with-licenses",
+ "--repo",
+ "Dummy",
+ "--repo",
+ "Dummy2",
+ )
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ novendorchange=True,
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--from",
+ "Dummy",
+ "--from",
+ "Dummy2",
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--from",
+ "Dummy",
+ "--from",
+ "Dummy2",
+ "--debug-solver",
+ )
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ ret = zypper.upgrade(
+ dist_upgrade=False, fromrepo=["Dummy", "Dummy2"], dryrun=False
+ )
+ zypper_mock.assert_any_call(
+ "update",
+ "--auto-agree-with-licenses",
+ "--repo",
+ "Dummy",
+ "--repo",
+ "Dummy2",
+ )
+
def test_upgrade_kernel(self):
"""
Test kernel package upgrade success.
:return:
"""
- with patch.dict(zypper.__grains__, {"osrelease_info": [12, 1]}), patch(
+ with patch(
"salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
), patch(
"salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
@@ -672,6 +1155,53 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
},
)
+ def test_upgrade_failure(self):
+ """
+ Test system upgrade failure.
+
+ :return:
+ """
+ zypper_out = """
+Loading repository data...
+Reading installed packages...
+Computing distribution upgrade...
+Use 'zypper repos' to get the list of defined repositories.
+Repository 'DUMMY' not found by its alias, number, or URI.
+"""
+
+ class FailingZypperDummy:
+ def __init__(self):
+ self.stdout = zypper_out
+ self.stderr = ""
+ self.pid = 1234
+ self.exit_code = 555
+ self.noraise = MagicMock()
+ self.allow_vendor_change = self
+ self.SUCCESS_EXIT_CODES = [0]
+
+ def __call__(self, *args, **kwargs):
+ return self
+
+ with patch(
+ "salt.modules.zypperpkg.__zypper__", FailingZypperDummy()
+ ) as zypper_mock, patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ zypper_mock.noraise.call = MagicMock()
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ with self.assertRaises(CommandExecutionError) as cmd_exc:
+ ret = zypper.upgrade(dist_upgrade=True, fromrepo=["DUMMY"])
+ self.assertEqual(cmd_exc.exception.info["changes"], {})
+ self.assertEqual(cmd_exc.exception.info["result"]["stdout"], zypper_out)
+ zypper_mock.noraise.call.assert_called_with(
+ "dist-upgrade", "--auto-agree-with-licenses", "--from", "DUMMY",
+ )
+
def test_upgrade_available(self):
"""
Test whether or not an upgrade is available for a given package.
--
2.39.2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,47 @@
From 8e9f2587aea52c1d0a5c07d5f9bb77a23ae4d4a6 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 23 May 2023 10:40:02 +0100
Subject: [PATCH] Avoid conflicts with dependencies versions
(bsc#1211612) (#581)
This commit fixes the Salt requirements file that are used to
generate the "requires.txt" file that is included in Salt egginfo
in order to be consistent with the installed packages
of Salt dependencies.
This prevents issues when resolving and validating Salt dependencies
with "pkg_resources" Python module.
---
requirements/base.txt | 2 +-
requirements/zeromq.txt | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/requirements/base.txt b/requirements/base.txt
index c19d8804a2..437aa01d31 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -6,7 +6,7 @@ MarkupSafe
requests>=1.0.0
distro>=1.0.1
psutil>=5.0.0
-packaging>=21.3
+packaging>=17.1
looseversion
# We need contextvars for salt-ssh
contextvars
diff --git a/requirements/zeromq.txt b/requirements/zeromq.txt
index 1e9a815c1b..23d1ef25dc 100644
--- a/requirements/zeromq.txt
+++ b/requirements/zeromq.txt
@@ -1,5 +1,5 @@
-r base.txt
-r crypto.txt
-pyzmq>=20.0.0
+pyzmq>=17.1.2
pyzmq==25.0.2 ; sys_platform == "win32"
--
2.39.2

View File

@ -0,0 +1,26 @@
From 4d8c88d6e467c22ea74738743de5be6577f81085 Mon Sep 17 00:00:00 2001
From: Hubert Mantel <mantel@suse.de>
Date: Mon, 27 Nov 2017 13:55:13 +0100
Subject: [PATCH] avoid excessive syslogging by watchdog cronjob (#58)
---
pkg/old/suse/salt-minion | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pkg/old/suse/salt-minion b/pkg/old/suse/salt-minion
index 2e418094ed..73a91ebd62 100755
--- a/pkg/old/suse/salt-minion
+++ b/pkg/old/suse/salt-minion
@@ -55,7 +55,7 @@ WATCHDOG_CRON="/etc/cron.d/salt-minion"
set_watchdog() {
if [ ! -f $WATCHDOG_CRON ]; then
- echo -e '* * * * * root /usr/bin/salt-daemon-watcher --with-init\n' > $WATCHDOG_CRON
+ echo -e '-* * * * * root /usr/bin/salt-daemon-watcher --with-init\n' > $WATCHDOG_CRON
# Kick the watcher for 1 minute immediately, because cron will wake up only afterwards
/usr/bin/salt-daemon-watcher --with-init & disown
fi
--
2.39.2

View File

@ -0,0 +1,112 @@
From 2ca37fe7d2a03ad86ed738f2636fe240b9f4467e Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Tue, 6 Oct 2020 12:36:41 +0300
Subject: [PATCH] bsc#1176024: Fix file/directory user and group
ownership containing UTF-8 characters (#275)
* Fix check_perm typos of file module
* Fix UTF8 support for user/group ownership operations with file module and state
* Fix UTF8 support for user/group ownership operations with file module and state
Co-authored-by: Victor Zhestkov <vzhestkov@vz-thinkpad.vzhestkov.net>
---
salt/modules/file.py | 20 ++++++++++----------
salt/states/file.py | 12 ++++++++++--
2 files changed, 20 insertions(+), 12 deletions(-)
diff --git a/salt/modules/file.py b/salt/modules/file.py
index 69d7992f5a..4612d65511 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -245,7 +245,7 @@ def group_to_gid(group):
try:
if isinstance(group, int):
return group
- return grp.getgrnam(group).gr_gid
+ return grp.getgrnam(salt.utils.stringutils.to_str(group)).gr_gid
except KeyError:
return ""
@@ -336,7 +336,7 @@ def user_to_uid(user):
try:
if isinstance(user, int):
return user
- return pwd.getpwnam(user).pw_uid
+ return pwd.getpwnam(salt.utils.stringutils.to_str(user)).pw_uid
except KeyError:
return ""
@@ -5133,8 +5133,8 @@ def check_perms(
salt.utils.platform.is_windows() and not user_to_uid(user) == cur["uid"]
) or (
not salt.utils.platform.is_windows()
- and not user == cur["user"]
- and not user == cur["uid"]
+ and not salt.utils.stringutils.to_str(user) == cur["user"]
+ and not salt.utils.stringutils.to_str(user) == cur["uid"]
):
perms["cuser"] = user
@@ -5143,8 +5143,8 @@ def check_perms(
salt.utils.platform.is_windows() and not group_to_gid(group) == cur["gid"]
) or (
not salt.utils.platform.is_windows()
- and not group == cur["group"]
- and not group == cur["gid"]
+ and not salt.utils.stringutils.to_str(group) == cur["group"]
+ and not salt.utils.stringutils.to_str(group) == cur["gid"]
):
perms["cgroup"] = group
@@ -5188,8 +5188,8 @@ def check_perms(
salt.utils.platform.is_windows() and not user_to_uid(user) == post["uid"]
) or (
not salt.utils.platform.is_windows()
- and not user == post["user"]
- and not user == post["uid"]
+ and not salt.utils.stringutils.to_str(user) == post["user"]
+ and not salt.utils.stringutils.to_str(user) == post["uid"]
):
if __opts__["test"] is True:
ret["changes"]["user"] = user
@@ -5204,8 +5204,8 @@ def check_perms(
salt.utils.platform.is_windows() and not group_to_gid(group) == post["gid"]
) or (
not salt.utils.platform.is_windows()
- and not group == post["group"]
- and not group == post["gid"]
+ and not salt.utils.stringutils.to_str(group) == post["group"]
+ and not salt.utils.stringutils.to_str(group) == post["gid"]
):
if __opts__["test"] is True:
ret["changes"]["group"] = group
diff --git a/salt/states/file.py b/salt/states/file.py
index 9f32151b8b..024e5e34ce 100644
--- a/salt/states/file.py
+++ b/salt/states/file.py
@@ -864,9 +864,17 @@ def _check_dir_meta(name, user, group, mode, follow_symlinks=False):
if not stats:
changes["directory"] = "new"
return changes
- if user is not None and user != stats["user"] and user != stats.get("uid"):
+ if (
+ user is not None
+ and salt.utils.stringutils.to_str(user) != stats["user"]
+ and user != stats.get("uid")
+ ):
changes["user"] = user
- if group is not None and group != stats["group"] and group != stats.get("gid"):
+ if (
+ group is not None
+ and salt.utils.stringutils.to_str(group) != stats["group"]
+ and group != stats.get("gid")
+ ):
changes["group"] = group
# Normalize the dir mode
smode = salt.utils.files.normalize_mode(stats["mode"])
--
2.39.2

View File

@ -0,0 +1,30 @@
From b7a554e2dec3351c91c237497fe37cbc30d664bd Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <Victor.Zhestkov@suse.com>
Date: Thu, 1 Sep 2022 14:42:24 +0300
Subject: [PATCH] Change the delimeters to prevent possible tracebacks on
some packages with dpkg_lowpkg
* Use another separator on query to dpkg-query
* Fix the test test_dpkg_lowpkg::test_info
---
salt/modules/dpkg_lowpkg.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/modules/dpkg_lowpkg.py b/salt/modules/dpkg_lowpkg.py
index 4d716c8772..78990492cf 100644
--- a/salt/modules/dpkg_lowpkg.py
+++ b/salt/modules/dpkg_lowpkg.py
@@ -347,7 +347,7 @@ def _get_pkg_info(*packages, **kwargs):
if build_date:
pkg_data["build_date"] = build_date
pkg_data["build_date_time_t"] = build_date_t
- pkg_data["description"] = pkg_descr.split(":", 1)[-1]
+ pkg_data["description"] = pkg_descr
ret.append(pkg_data)
return ret
--
2.39.2

View File

@ -0,0 +1,37 @@
From fcb43735942ca1b796f656d5647e49a93f770bb2 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 10 Jan 2023 15:04:01 +0100
Subject: [PATCH] Control the collection of lvm grains via config
lvm grain collection can take a long time on systems with a lot of
volumes and volume groups. On one server we measured ~3 minutes, which
is way too long for grains.
This change is backwards-compatible, leaving the lvm grain collection
enabled by default. Users with a lot of lvm volumes/volume groups can
disable these grains in the minion config by setting
enable_lvm_grains: False
---
salt/grains/lvm.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/salt/grains/lvm.py b/salt/grains/lvm.py
index 586b187ddb..f5c406cb44 100644
--- a/salt/grains/lvm.py
+++ b/salt/grains/lvm.py
@@ -17,6 +17,10 @@ __salt__ = {
log = logging.getLogger(__name__)
+def __virtual__():
+ return __opts__.get("enable_lvm_grains", True)
+
+
def lvm():
"""
Return list of LVM devices
--
2.39.2

View File

@ -0,0 +1,351 @@
From 2fbc5b580661b094cf79cc5da0860745b72088e4 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 25 Jan 2022 17:08:57 +0100
Subject: [PATCH] Debian info_installed compatibility (#50453)
Remove unused variable
Get unit ticks installation time
Pass on unix ticks installation date time
Implement function to figure out package build time
Unify arch attribute
Add 'attr' support.
Use attr parameter in aptpkg
Add 'all_versions' output structure backward compatibility
Fix docstring
Add UT for generic test of function 'info'
Add UT for 'info' function with the parameter 'attr'
Add UT for info_installed's 'attr' param
Fix docstring
Add returned type check
Add UT for info_installed with 'all_versions=True' output structure
Refactor UT for 'owner' function
Refactor UT: move to decorators, add more checks
Schedule TODO for next refactoring of UT 'show' function
Refactor UT: get rid of old assertion way, flatten tests
Refactor UT: move to native assertions, cleanup noise, flatten complexity for better visibility what is tested
Lintfix: too many empty lines
Adjust architecture getter according to the lowpkg info
Fix wrong Git merge: missing function signature
Reintroducing reverted changes
Reintroducing changes from commit e20362f6f053eaa4144583604e6aac3d62838419
that got partially reverted by this commit:
https://github.com/openSUSE/salt/commit/d0ef24d113bdaaa29f180031b5da384cffe08c64#diff-820e6ce667fe3afddbc1b9cf1682fdef
---
salt/modules/aptpkg.py | 24 ++++-
salt/modules/dpkg_lowpkg.py | 110 ++++++++++++++++++----
tests/pytests/unit/modules/test_aptpkg.py | 52 ++++++++++
3 files changed, 167 insertions(+), 19 deletions(-)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 8e89744b5e..938e37cc9e 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -3440,6 +3440,15 @@ def info_installed(*names, **kwargs):
.. versionadded:: 2016.11.3
+ attr
+ Comma-separated package attributes. If no 'attr' is specified, all available attributes returned.
+
+ Valid attributes are:
+ version, vendor, release, build_date, build_date_time_t, install_date, install_date_time_t,
+ build_host, group, source_rpm, arch, epoch, size, license, signature, packager, url, summary, description.
+
+ .. versionadded:: Neon
+
CLI Example:
.. code-block:: bash
@@ -3450,11 +3459,19 @@ def info_installed(*names, **kwargs):
"""
kwargs = salt.utils.args.clean_kwargs(**kwargs)
failhard = kwargs.pop("failhard", True)
+ kwargs.pop("errors", None) # Only for compatibility with RPM
+ attr = kwargs.pop("attr", None) # Package attributes to return
+ all_versions = kwargs.pop(
+ "all_versions", False
+ ) # This is for backward compatible structure only
+
if kwargs:
salt.utils.args.invalid_kwargs(kwargs)
ret = dict()
- for pkg_name, pkg_nfo in __salt__["lowpkg.info"](*names, failhard=failhard).items():
+ for pkg_name, pkg_nfo in __salt__["lowpkg.info"](
+ *names, failhard=failhard, attr=attr
+ ).items():
t_nfo = dict()
if pkg_nfo.get("status", "ii")[1] != "i":
continue # return only packages that are really installed
@@ -3475,7 +3492,10 @@ def info_installed(*names, **kwargs):
else:
t_nfo[key] = value
- ret[pkg_name] = t_nfo
+ if all_versions:
+ ret.setdefault(pkg_name, []).append(t_nfo)
+ else:
+ ret[pkg_name] = t_nfo
return ret
diff --git a/salt/modules/dpkg_lowpkg.py b/salt/modules/dpkg_lowpkg.py
index eefd852c51..4d716c8772 100644
--- a/salt/modules/dpkg_lowpkg.py
+++ b/salt/modules/dpkg_lowpkg.py
@@ -234,6 +234,44 @@ def file_dict(*packages, **kwargs):
return {"errors": errors, "packages": ret}
+def _get_pkg_build_time(name):
+ """
+ Get package build time, if possible.
+
+ :param name:
+ :return:
+ """
+ iso_time = iso_time_t = None
+ changelog_dir = os.path.join("/usr/share/doc", name)
+ if os.path.exists(changelog_dir):
+ for fname in os.listdir(changelog_dir):
+ try:
+ iso_time_t = int(os.path.getmtime(os.path.join(changelog_dir, fname)))
+ iso_time = (
+ datetime.datetime.utcfromtimestamp(iso_time_t).isoformat() + "Z"
+ )
+ break
+ except OSError:
+ pass
+
+ # Packager doesn't care about Debian standards, therefore Plan B: brute-force it.
+ if not iso_time:
+ for pkg_f_path in __salt__["cmd.run"](
+ "dpkg-query -L {}".format(name)
+ ).splitlines():
+ if "changelog" in pkg_f_path.lower() and os.path.exists(pkg_f_path):
+ try:
+ iso_time_t = int(os.path.getmtime(pkg_f_path))
+ iso_time = (
+ datetime.datetime.utcfromtimestamp(iso_time_t).isoformat() + "Z"
+ )
+ break
+ except OSError:
+ pass
+
+ return iso_time, iso_time_t
+
+
def _get_pkg_info(*packages, **kwargs):
"""
Return list of package information. If 'packages' parameter is empty,
@@ -257,7 +295,7 @@ def _get_pkg_info(*packages, **kwargs):
cmd = (
"dpkg-query -W -f='package:" + bin_var + "\\n"
"revision:${binary:Revision}\\n"
- "architecture:${Architecture}\\n"
+ "arch:${Architecture}\\n"
"maintainer:${Maintainer}\\n"
"summary:${Summary}\\n"
"source:${source:Package}\\n"
@@ -299,10 +337,17 @@ def _get_pkg_info(*packages, **kwargs):
key, value = pkg_info_line.split(":", 1)
if value:
pkg_data[key] = value
- install_date = _get_pkg_install_time(pkg_data.get("package"))
- if install_date:
- pkg_data["install_date"] = install_date
- pkg_data["description"] = pkg_descr
+ install_date, install_date_t = _get_pkg_install_time(
+ pkg_data.get("package"), pkg_data.get("arch")
+ )
+ if install_date:
+ pkg_data["install_date"] = install_date
+ pkg_data["install_date_time_t"] = install_date_t # Unix ticks
+ build_date, build_date_t = _get_pkg_build_time(pkg_data.get("package"))
+ if build_date:
+ pkg_data["build_date"] = build_date
+ pkg_data["build_date_time_t"] = build_date_t
+ pkg_data["description"] = pkg_descr.split(":", 1)[-1]
ret.append(pkg_data)
return ret
@@ -327,24 +372,34 @@ def _get_pkg_license(pkg):
return ", ".join(sorted(licenses))
-def _get_pkg_install_time(pkg):
+def _get_pkg_install_time(pkg, arch):
"""
Return package install time, based on the /var/lib/dpkg/info/<package>.list
:return:
"""
- iso_time = None
+ iso_time = iso_time_t = None
+ loc_root = "/var/lib/dpkg/info"
if pkg is not None:
- location = "/var/lib/dpkg/info/{}.list".format(pkg)
- if os.path.exists(location):
- iso_time = (
- datetime.datetime.utcfromtimestamp(
- int(os.path.getmtime(location))
- ).isoformat()
- + "Z"
- )
+ locations = []
+ if arch is not None and arch != "all":
+ locations.append(os.path.join(loc_root, "{}:{}.list".format(pkg, arch)))
- return iso_time
+ locations.append(os.path.join(loc_root, "{}.list".format(pkg)))
+ for location in locations:
+ try:
+ iso_time_t = int(os.path.getmtime(location))
+ iso_time = (
+ datetime.datetime.utcfromtimestamp(iso_time_t).isoformat() + "Z"
+ )
+ break
+ except OSError:
+ pass
+
+ if iso_time is None:
+ log.debug('Unable to get package installation time for package "%s".', pkg)
+
+ return iso_time, iso_time_t
def _get_pkg_ds_avail():
@@ -394,6 +449,15 @@ def info(*packages, **kwargs):
.. versionadded:: 2016.11.3
+ attr
+ Comma-separated package attributes. If no 'attr' is specified, all available attributes returned.
+
+ Valid attributes are:
+ version, vendor, release, build_date, build_date_time_t, install_date, install_date_time_t,
+ build_host, group, source_rpm, arch, epoch, size, license, signature, packager, url, summary, description.
+
+ .. versionadded:: Neon
+
CLI Example:
.. code-block:: bash
@@ -408,6 +472,10 @@ def info(*packages, **kwargs):
kwargs = salt.utils.args.clean_kwargs(**kwargs)
failhard = kwargs.pop("failhard", True)
+ attr = kwargs.pop("attr", None) or None
+ if attr:
+ attr = attr.split(",")
+
if kwargs:
salt.utils.args.invalid_kwargs(kwargs)
@@ -435,6 +503,14 @@ def info(*packages, **kwargs):
lic = _get_pkg_license(pkg["package"])
if lic:
pkg["license"] = lic
- ret[pkg["package"]] = pkg
+
+ # Remove keys that aren't in attrs
+ pkg_name = pkg["package"]
+ if attr:
+ for k in list(pkg.keys())[:]:
+ if k not in attr:
+ del pkg[k]
+
+ ret[pkg_name] = pkg
return ret
diff --git a/tests/pytests/unit/modules/test_aptpkg.py b/tests/pytests/unit/modules/test_aptpkg.py
index b69402578a..4226957eeb 100644
--- a/tests/pytests/unit/modules/test_aptpkg.py
+++ b/tests/pytests/unit/modules/test_aptpkg.py
@@ -360,6 +360,58 @@ def test_info_installed(lowpkg_info_var):
assert len(aptpkg.info_installed()) == 1
+def test_info_installed_attr(lowpkg_info_var):
+ """
+ Test info_installed 'attr'.
+ This doesn't test 'attr' behaviour per se, since the underlying function is in dpkg.
+ The test should simply not raise exceptions for invalid parameter.
+
+ :return:
+ """
+ expected_pkg = {
+ "url": "http://www.gnu.org/software/wget/",
+ "packager": "Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>",
+ "name": "wget",
+ "install_date": "2016-08-30T22:20:15Z",
+ "description": "retrieves files from the web",
+ "version": "1.15-1ubuntu1.14.04.2",
+ "architecture": "amd64",
+ "group": "web",
+ "source": "wget",
+ }
+ mock = MagicMock(return_value=lowpkg_info_var)
+ with patch.dict(aptpkg.__salt__, {"lowpkg.info": mock}):
+ ret = aptpkg.info_installed("wget", attr="foo,bar")
+ assert ret["wget"] == expected_pkg
+
+
+def test_info_installed_all_versions(lowpkg_info_var):
+ """
+ Test info_installed 'all_versions'.
+ Since Debian won't return same name packages with the different names,
+ this should just return different structure, backward compatible with
+ the RPM equivalents.
+
+ :return:
+ """
+ expected_pkg = {
+ "url": "http://www.gnu.org/software/wget/",
+ "packager": "Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>",
+ "name": "wget",
+ "install_date": "2016-08-30T22:20:15Z",
+ "description": "retrieves files from the web",
+ "version": "1.15-1ubuntu1.14.04.2",
+ "architecture": "amd64",
+ "group": "web",
+ "source": "wget",
+ }
+ mock = MagicMock(return_value=lowpkg_info_var)
+ with patch.dict(aptpkg.__salt__, {"lowpkg.info": mock}):
+ ret = aptpkg.info_installed("wget", all_versions=True)
+ assert isinstance(ret, dict)
+ assert ret["wget"] == [expected_pkg]
+
+
def test_owner():
"""
Test - Return the name of the package that owns the file.
--
2.39.2

View File

@ -0,0 +1,39 @@
From f02e97df14e4927efbb5ddd3a2bbc5a650330b9e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 26 May 2023 16:50:51 +0100
Subject: [PATCH] Define __virtualname__ for transactional_update module
(#582)
This prevent problems with LazyLoader when importing this module,
which was wrongly exposing functions for this module under "state.*"
---
salt/modules/transactional_update.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/salt/modules/transactional_update.py b/salt/modules/transactional_update.py
index 6493966782..658ebccc6b 100644
--- a/salt/modules/transactional_update.py
+++ b/salt/modules/transactional_update.py
@@ -285,6 +285,8 @@ from salt.modules.state import _check_queue, _prior_running_states, _wait, runni
__func_alias__ = {"apply_": "apply"}
+__virtualname__ = "transactional_update"
+
log = logging.getLogger(__name__)
@@ -300,7 +302,7 @@ def __virtual__():
_prior_running_states, globals()
)
running = salt.utils.functools.namespaced_function(running, globals())
- return True
+ return __virtualname__
else:
return (False, "Module transactional_update requires a transactional system")
--
2.39.2

View File

@ -0,0 +1,130 @@
From c2a35c0c0aac093d0cc35181c1fda0162e22ac4c Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Mon, 8 Nov 2021 18:09:53 +0300
Subject: [PATCH] dnfnotify pkgset plugin implementation - 3002.2 (#450)
* dnfnotify pkgset plugin implementation
* Fix failing check
* Add error reporting if not possible to save cookie
* Try to create dir if not exists
* Show the exception message instead of file name
* Fix isort
---
scripts/suse/dnf/plugins/README.md | 21 +++++++++
scripts/suse/dnf/plugins/dnfnotify.conf | 2 +
scripts/suse/dnf/plugins/dnfnotify.py | 60 +++++++++++++++++++++++++
3 files changed, 83 insertions(+)
create mode 100644 scripts/suse/dnf/plugins/README.md
create mode 100644 scripts/suse/dnf/plugins/dnfnotify.conf
create mode 100644 scripts/suse/dnf/plugins/dnfnotify.py
diff --git a/scripts/suse/dnf/plugins/README.md b/scripts/suse/dnf/plugins/README.md
new file mode 100644
index 0000000000..b19428608e
--- /dev/null
+++ b/scripts/suse/dnf/plugins/README.md
@@ -0,0 +1,21 @@
+## What it is
+
+Plugin which provides a notification mechanism to Salt, if DNF is
+used outside of it.
+
+## Installation
+
+Configuration files are going to:
+
+ `/etc/dnf/plugins/[name].conf`
+
+Plugin itself goes to:
+
+ `%{python_sitelib}/dnf-plugins/[name].py`
+ The path to dnf-plugins directory is Python version dependant.
+
+## Permissions
+
+User: root
+Group: root
+Mode: 644
diff --git a/scripts/suse/dnf/plugins/dnfnotify.conf b/scripts/suse/dnf/plugins/dnfnotify.conf
new file mode 100644
index 0000000000..e7002aa3e9
--- /dev/null
+++ b/scripts/suse/dnf/plugins/dnfnotify.conf
@@ -0,0 +1,2 @@
+[main]
+enabled = 1
diff --git a/scripts/suse/dnf/plugins/dnfnotify.py b/scripts/suse/dnf/plugins/dnfnotify.py
new file mode 100644
index 0000000000..6e9df85f71
--- /dev/null
+++ b/scripts/suse/dnf/plugins/dnfnotify.py
@@ -0,0 +1,60 @@
+import hashlib
+import os
+
+import dnf
+from dnfpluginscore import _, logger
+
+
+class DnfNotifyPlugin(dnf.Plugin):
+ def __init__(self, base, cli):
+ super().__init__(base, cli)
+ self.base = base
+ self.cookie_file = "/var/cache/salt/minion/rpmdb.cookie"
+ if os.path.exists("/var/lib/rpm/rpmdb.sqlite"):
+ self.rpmdb_file = "/var/lib/rpm/rpmdb.sqlite"
+ else:
+ self.rpmdb_file = "/var/lib/rpm/Packages"
+
+ def transaction(self):
+ if "SALT_RUNNING" not in os.environ:
+ try:
+ ck_dir = os.path.dirname(self.cookie_file)
+ if not os.path.exists(ck_dir):
+ os.makedirs(ck_dir)
+ with open(self.cookie_file, "w") as ck_fh:
+ ck_fh.write(
+ "{chksum} {mtime}\n".format(
+ chksum=self._get_checksum(), mtime=self._get_mtime()
+ )
+ )
+ except OSError as e:
+ logger.error(_("Unable to save cookie file: %s"), e)
+
+ def _get_mtime(self):
+ """
+ Get the modified time of the RPM Database.
+
+ Returns:
+ Unix ticks
+ """
+ return (
+ os.path.exists(self.rpmdb_file)
+ and int(os.path.getmtime(self.rpmdb_file))
+ or 0
+ )
+
+ def _get_checksum(self):
+ """
+ Get the checksum of the RPM Database.
+
+ Returns:
+ hexdigest
+ """
+ digest = hashlib.sha256()
+ with open(self.rpmdb_file, "rb") as rpm_db_fh:
+ while True:
+ buff = rpm_db_fh.read(0x1000)
+ if not buff:
+ break
+ digest.update(buff)
+ return digest.hexdigest()
--
2.39.2

View File

@ -0,0 +1,155 @@
From da544d7ab09899717e57a02321928ceaf3c6465c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 22 Aug 2023 11:43:46 +0100
Subject: [PATCH] Do not fail on bad message pack message (bsc#1213441,
CVE-2023-20897) (#595)
* Do not fail on bad message pack message
Fix unit test after backporting to openSUSE/release/3006.0
* Better error message when inconsistent decoded payload
---------
Co-authored-by: Daniel A. Wozniak <dwozniak@vmware.com>
---
salt/channel/server.py | 10 +++
salt/transport/zeromq.py | 6 +-
tests/pytests/unit/transport/test_zeromq.py | 69 +++++++++++++++++++++
3 files changed, 84 insertions(+), 1 deletion(-)
diff --git a/salt/channel/server.py b/salt/channel/server.py
index a2117f2934..b6d51fef08 100644
--- a/salt/channel/server.py
+++ b/salt/channel/server.py
@@ -22,6 +22,7 @@ import salt.utils.minions
import salt.utils.platform
import salt.utils.stringutils
import salt.utils.verify
+from salt.exceptions import SaltDeserializationError
from salt.utils.cache import CacheCli
try:
@@ -252,6 +253,15 @@ class ReqServerChannel:
return False
def _decode_payload(self, payload):
+ # Sometimes msgpack deserialization of random bytes could be successful,
+ # so we need to ensure payload in good shape to process this function.
+ if (
+ not isinstance(payload, dict)
+ or "enc" not in payload
+ or "load" not in payload
+ ):
+ raise SaltDeserializationError("bad load received on socket!")
+
# we need to decrypt it
if payload["enc"] == "aes":
try:
diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py
index 3ec7f7726c..7cc6b9987f 100644
--- a/salt/transport/zeromq.py
+++ b/salt/transport/zeromq.py
@@ -428,7 +428,11 @@ class RequestServer(salt.transport.base.DaemonizedRequestServer):
@salt.ext.tornado.gen.coroutine
def handle_message(self, stream, payload):
- payload = self.decode_payload(payload)
+ try:
+ payload = self.decode_payload(payload)
+ except salt.exceptions.SaltDeserializationError:
+ self.stream.send(self.encode_payload({"msg": "bad load"}))
+ return
# XXX: Is header really needed?
reply = yield self.message_handler(payload)
self.stream.send(self.encode_payload(reply))
diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py
index 10bb4917b8..c7cbc53864 100644
--- a/tests/pytests/unit/transport/test_zeromq.py
+++ b/tests/pytests/unit/transport/test_zeromq.py
@@ -11,6 +11,7 @@ import threading
import time
import uuid
+import msgpack
import pytest
import salt.channel.client
@@ -1404,3 +1405,71 @@ async def test_req_chan_auth_v2_new_minion_without_master_pub(pki_dir, io_loop):
assert "sig" in ret
ret = client.auth.handle_signin_response(signin_payload, ret)
assert ret == "retry"
+
+
+async def test_req_server_garbage_request(io_loop):
+ """
+ Validate invalid msgpack messages will not raise exceptions in the
+ RequestServers's message handler.
+ """
+ opts = salt.config.master_config("")
+ request_server = salt.transport.zeromq.RequestServer(opts)
+
+ def message_handler(payload):
+ return payload
+
+ request_server.post_fork(message_handler, io_loop)
+
+ byts = msgpack.dumps({"foo": "bar"})
+ badbyts = byts[:3] + b"^M" + byts[3:]
+
+ valid_response = msgpack.dumps({"msg": "bad load"})
+
+ with MagicMock() as stream:
+ request_server.stream = stream
+
+ try:
+ await request_server.handle_message(stream, badbyts)
+ except Exception as exc: # pylint: disable=broad-except
+ pytest.fail("Exception was raised {}".format(exc))
+
+ request_server.stream.send.assert_called_once_with(valid_response)
+
+
+async def test_req_chan_bad_payload_to_decode(pki_dir, io_loop):
+ opts = {
+ "master_uri": "tcp://127.0.0.1:4506",
+ "interface": "127.0.0.1",
+ "ret_port": 4506,
+ "ipv6": False,
+ "sock_dir": ".",
+ "pki_dir": str(pki_dir.joinpath("minion")),
+ "id": "minion",
+ "__role": "minion",
+ "keysize": 4096,
+ "max_minions": 0,
+ "auto_accept": False,
+ "open_mode": False,
+ "key_pass": None,
+ "publish_port": 4505,
+ "auth_mode": 1,
+ "acceptance_wait_time": 3,
+ "acceptance_wait_time_max": 3,
+ }
+ SMaster.secrets["aes"] = {
+ "secret": multiprocessing.Array(
+ ctypes.c_char,
+ salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()),
+ ),
+ "reload": salt.crypt.Crypticle.generate_key_string,
+ }
+ master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master")))
+ master_opts["master_sign_pubkey"] = False
+ server = salt.channel.server.ReqServerChannel.factory(master_opts)
+
+ with pytest.raises(salt.exceptions.SaltDeserializationError):
+ server._decode_payload(None)
+ with pytest.raises(salt.exceptions.SaltDeserializationError):
+ server._decode_payload({})
+ with pytest.raises(salt.exceptions.SaltDeserializationError):
+ server._decode_payload(12345)
--
2.41.0

View File

@ -0,0 +1,46 @@
From 4060d4cd24ac0fbcf83c1521553921d76c070a57 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 21 Sep 2018 17:31:39 +0200
Subject: [PATCH] Do not load pip state if there is no 3rd party
dependencies
Safe import 3rd party dependency
---
salt/modules/pip.py | 13 ++++++++++++-
1 file changed, 12 insertions(+), 1 deletion(-)
diff --git a/salt/modules/pip.py b/salt/modules/pip.py
index c4de0c2984..a60bdca0bb 100644
--- a/salt/modules/pip.py
+++ b/salt/modules/pip.py
@@ -96,6 +96,12 @@ import salt.utils.url
import salt.utils.versions
from salt.exceptions import CommandExecutionError, CommandNotFoundError
+try:
+ import pkg_resources
+except ImportError:
+ pkg_resources = None
+
+
# This needs to be named logger so we don't shadow it in pip.install
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
@@ -114,7 +120,12 @@ def __virtual__():
entire filesystem. If it's not installed in a conventional location, the
user is required to provide the location of pip each time it is used.
"""
- return "pip"
+ if pkg_resources is None:
+ ret = False, 'Package dependency "pkg_resource" is missing'
+ else:
+ ret = "pip"
+
+ return ret
def _pip_bin_env(cwd, bin_env):
--
2.39.2

View File

@ -0,0 +1,39 @@
From da6adc6984f21c0d93afff0b0ff55d0eb0ee3e9f Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 17 Aug 2021 11:52:00 +0200
Subject: [PATCH] Don't use shell="/sbin/nologin" in requisites
Using shell="/sbin/nologin" in an onlyif/unless requisite does not
really make sense since the condition can't be run. shell=/sbin/nologin
is also a common argument, e.g. for user.present.
Fixes: bsc#1188259
---
salt/state.py | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/salt/state.py b/salt/state.py
index cb434a91e7..cda84a0fcb 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -986,9 +986,14 @@ class State:
cmd_opts[run_cmd_arg] = low_data.get(run_cmd_arg)
if "shell" in low_data and "shell" not in cmd_opts_exclude:
- cmd_opts["shell"] = low_data["shell"]
+ shell = low_data["shell"]
elif "shell" in self.opts["grains"]:
- cmd_opts["shell"] = self.opts["grains"].get("shell")
+ shell = self.opts["grains"].get("shell")
+ else:
+ shell = None
+ # /sbin/nologin always causes the onlyif / unless cmd to fail
+ if shell is not None and shell != "/sbin/nologin":
+ cmd_opts["shell"] = shell
if "onlyif" in low_data:
_ret = self._run_check_onlyif(low_data, cmd_opts)
--
2.39.2

View File

@ -0,0 +1,34 @@
From e7ef0b5a46cc69a9237033d8dc4dbc60c0802a20 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Mon, 31 Jan 2022 10:24:26 +0100
Subject: [PATCH] Drop serial from event.unpack in cli.batch_async
---
salt/cli/batch_async.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py
index 09aa85258b..1012ce37cc 100644
--- a/salt/cli/batch_async.py
+++ b/salt/cli/batch_async.py
@@ -9,7 +9,6 @@ import logging
import salt.client
import salt.ext.tornado
-import tornado
from salt.cli.batch import batch_get_eauth, batch_get_opts, get_bnum
log = logging.getLogger(__name__)
@@ -109,7 +108,7 @@ class BatchAsync:
if not self.event:
return
try:
- mtag, data = self.event.unpack(raw, self.event.serial)
+ mtag, data = self.event.unpack(raw)
for (pattern, op) in self.patterns:
if mtag.startswith(pattern[:-1]):
minion = data["id"]
--
2.39.2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,68 @@
From e9d52cb97d619a76355c5aa1d03b733c125c0f22 Mon Sep 17 00:00:00 2001
From: Maximilian Meister <mmeister@suse.de>
Date: Thu, 3 May 2018 15:52:23 +0200
Subject: [PATCH] enable passing a unix_socket for mysql returners
(bsc#1091371)
quick fix for:
https://bugzilla.suse.com/show_bug.cgi?id=1091371
the upstream patch will go through some bigger refactoring of
the mysql drivers to be cleaner
this patch should only be temporary and can be dropped again once
the refactor is done upstream
Signed-off-by: Maximilian Meister <mmeister@suse.de>
---
salt/returners/mysql.py | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/salt/returners/mysql.py b/salt/returners/mysql.py
index 67b44004ac..a220f11465 100644
--- a/salt/returners/mysql.py
+++ b/salt/returners/mysql.py
@@ -17,6 +17,7 @@ config. These are the defaults:
mysql.pass: 'salt'
mysql.db: 'salt'
mysql.port: 3306
+ mysql.unix_socket: '/tmp/mysql.sock'
SSL is optional. The defaults are set to None. If you do not want to use SSL,
either exclude these options or set them to None.
@@ -42,6 +43,7 @@ optional. The following ssl options are simply for illustration purposes:
alternative.mysql.ssl_ca: '/etc/pki/mysql/certs/localhost.pem'
alternative.mysql.ssl_cert: '/etc/pki/mysql/certs/localhost.crt'
alternative.mysql.ssl_key: '/etc/pki/mysql/certs/localhost.key'
+ alternative.mysql.unix_socket: '/tmp/mysql.sock'
Should you wish the returner data to be cleaned out every so often, set
`keep_jobs_seconds` to the number of hours for the jobs to live in the
@@ -197,6 +199,7 @@ def _get_options(ret=None):
"ssl_ca": None,
"ssl_cert": None,
"ssl_key": None,
+ "unix_socket": "/tmp/mysql.sock",
}
attrs = {
@@ -208,6 +211,7 @@ def _get_options(ret=None):
"ssl_ca": "ssl_ca",
"ssl_cert": "ssl_cert",
"ssl_key": "ssl_key",
+ "unix_socket": "unix_socket",
}
_options = salt.returners.get_returner_options(
@@ -266,6 +270,7 @@ def _get_serv(ret=None, commit=False):
db=_options.get("db"),
port=_options.get("port"),
ssl=ssl_options,
+ unix_socket=_options.get("unix_socket"),
)
try:
--
2.39.2

View File

@ -0,0 +1,425 @@
From 17452801e950b3f49a9ec7ef444e3d57862cd9bf Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 7 Jul 2021 15:41:48 +0100
Subject: [PATCH] Enhance openscap module: add "xccdf_eval" call (#386)
* Enhance openscap module: add xccdf_eval call
* Allow 'tailoring_file' and 'tailoring_id' parameters
* Fix wrong reference to subprocess.PIPE in openscap unit tests
* Add changes suggested by pre-commit
Co-authored-by: Michael Calmer <mc@suse.de>
Fix error handling in openscap module (bsc#1188647) (#409)
---
changelog/59756.added | 1 +
salt/modules/openscap.py | 116 +++++++++++++-
tests/unit/modules/test_openscap.py | 234 ++++++++++++++++++++++++++++
3 files changed, 350 insertions(+), 1 deletion(-)
create mode 100644 changelog/59756.added
diff --git a/changelog/59756.added b/changelog/59756.added
new file mode 100644
index 0000000000..a59fb21eef
--- /dev/null
+++ b/changelog/59756.added
@@ -0,0 +1 @@
+adding new call for openscap xccdf eval supporting new parameters
diff --git a/salt/modules/openscap.py b/salt/modules/openscap.py
index 770c8e7c04..216fd89eef 100644
--- a/salt/modules/openscap.py
+++ b/salt/modules/openscap.py
@@ -4,6 +4,7 @@ Module for OpenSCAP Management
"""
+import os.path
import shlex
import shutil
import tempfile
@@ -55,6 +56,117 @@ _OSCAP_EXIT_CODES_MAP = {
}
+def xccdf_eval(xccdffile, ovalfiles=None, **kwargs):
+ """
+ Run ``oscap xccdf eval`` commands on minions.
+ It uses cp.push_dir to upload the generated files to the salt master
+ in the master's minion files cachedir
+ (defaults to ``/var/cache/salt/master/minions/minion-id/files``)
+
+ It needs ``file_recv`` set to ``True`` in the master configuration file.
+
+ xccdffile
+ the path to the xccdf file to evaluate
+
+ ovalfiles
+ additional oval definition files
+
+ profile
+ the name of Profile to be evaluated
+
+ rule
+ the name of a single rule to be evaluated
+
+ oval_results
+ save OVAL results as well (True or False)
+
+ results
+ write XCCDF Results into given file
+
+ report
+ write HTML report into given file
+
+ fetch_remote_resources
+ download remote content referenced by XCCDF (True or False)
+
+ tailoring_file
+ use given XCCDF Tailoring file
+
+ tailoring_id
+ use given DS component as XCCDF Tailoring file
+
+ remediate
+ automatically execute XCCDF fix elements for failed rules.
+ Use of this option is always at your own risk. (True or False)
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' openscap.xccdf_eval /usr/share/openscap/scap-yast2sec-xccdf.xml profile=Default
+
+ """
+ success = True
+ error = None
+ upload_dir = None
+ returncode = None
+ if not ovalfiles:
+ ovalfiles = []
+
+ cmd_opts = ["oscap", "xccdf", "eval"]
+ if kwargs.get("oval_results"):
+ cmd_opts.append("--oval-results")
+ if "results" in kwargs:
+ cmd_opts.append("--results")
+ cmd_opts.append(kwargs["results"])
+ if "report" in kwargs:
+ cmd_opts.append("--report")
+ cmd_opts.append(kwargs["report"])
+ if "profile" in kwargs:
+ cmd_opts.append("--profile")
+ cmd_opts.append(kwargs["profile"])
+ if "rule" in kwargs:
+ cmd_opts.append("--rule")
+ cmd_opts.append(kwargs["rule"])
+ if "tailoring_file" in kwargs:
+ cmd_opts.append("--tailoring-file")
+ cmd_opts.append(kwargs["tailoring_file"])
+ if "tailoring_id" in kwargs:
+ cmd_opts.append("--tailoring-id")
+ cmd_opts.append(kwargs["tailoring_id"])
+ if kwargs.get("fetch_remote_resources"):
+ cmd_opts.append("--fetch-remote-resources")
+ if kwargs.get("remediate"):
+ cmd_opts.append("--remediate")
+ cmd_opts.append(xccdffile)
+ cmd_opts.extend(ovalfiles)
+
+ if not os.path.exists(xccdffile):
+ success = False
+ error = "XCCDF File '{}' does not exist".format(xccdffile)
+ for ofile in ovalfiles:
+ if success and not os.path.exists(ofile):
+ success = False
+ error = "Oval File '{}' does not exist".format(ofile)
+
+ if success:
+ tempdir = tempfile.mkdtemp()
+ proc = Popen(cmd_opts, stdout=PIPE, stderr=PIPE, cwd=tempdir)
+ (stdoutdata, error) = proc.communicate()
+ success = _OSCAP_EXIT_CODES_MAP.get(proc.returncode, False)
+ if proc.returncode < 0:
+ error += "\nKilled by signal {}\n".format(proc.returncode).encode('ascii')
+ returncode = proc.returncode
+ if success:
+ __salt__["cp.push_dir"](tempdir)
+ upload_dir = tempdir
+ shutil.rmtree(tempdir, ignore_errors=True)
+
+ return dict(
+ success=success, upload_dir=upload_dir, error=error, returncode=returncode
+ )
+
+
def xccdf(params):
"""
Run ``oscap xccdf`` commands on minions.
@@ -92,7 +204,9 @@ def xccdf(params):
tempdir = tempfile.mkdtemp()
proc = Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE, cwd=tempdir)
(stdoutdata, error) = proc.communicate()
- success = _OSCAP_EXIT_CODES_MAP[proc.returncode]
+ success = _OSCAP_EXIT_CODES_MAP.get(proc.returncode, False)
+ if proc.returncode < 0:
+ error += "\nKilled by signal {}\n".format(proc.returncode).encode('ascii')
returncode = proc.returncode
if success:
__salt__["cp.push_dir"](tempdir)
diff --git a/tests/unit/modules/test_openscap.py b/tests/unit/modules/test_openscap.py
index 045c37f7c9..301c1869ec 100644
--- a/tests/unit/modules/test_openscap.py
+++ b/tests/unit/modules/test_openscap.py
@@ -21,6 +21,7 @@ class OpenscapTestCase(TestCase):
"salt.modules.openscap.tempfile.mkdtemp",
Mock(return_value=self.random_temp_dir),
),
+ patch("salt.modules.openscap.os.path.exists", Mock(return_value=True)),
]
for patcher in patchers:
self.apply_patch(patcher)
@@ -211,3 +212,236 @@ class OpenscapTestCase(TestCase):
"returncode": None,
},
)
+
+ def test_new_openscap_xccdf_eval_success(self):
+ with patch(
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+ **{"returncode": 0, "communicate.return_value": ("", "")}
+ )
+ ),
+ ):
+ response = openscap.xccdf_eval(
+ self.policy_file,
+ profile="Default",
+ oval_results=True,
+ results="results.xml",
+ report="report.html",
+ )
+
+ self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1)
+ expected_cmd = [
+ "oscap",
+ "xccdf",
+ "eval",
+ "--oval-results",
+ "--results",
+ "results.xml",
+ "--report",
+ "report.html",
+ "--profile",
+ "Default",
+ self.policy_file,
+ ]
+ openscap.Popen.assert_called_once_with(
+ expected_cmd,
+ cwd=openscap.tempfile.mkdtemp.return_value,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ )
+ openscap.__salt__["cp.push_dir"].assert_called_once_with(
+ self.random_temp_dir
+ )
+ self.assertEqual(openscap.shutil.rmtree.call_count, 1)
+ self.assertEqual(
+ response,
+ {
+ "upload_dir": self.random_temp_dir,
+ "error": "",
+ "success": True,
+ "returncode": 0,
+ },
+ )
+
+ def test_new_openscap_xccdf_eval_success_with_extra_ovalfiles(self):
+ with patch(
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+ **{"returncode": 0, "communicate.return_value": ("", "")}
+ )
+ ),
+ ):
+ response = openscap.xccdf_eval(
+ self.policy_file,
+ ["/usr/share/xml/another-oval.xml", "/usr/share/xml/oval.xml"],
+ profile="Default",
+ oval_results=True,
+ results="results.xml",
+ report="report.html",
+ )
+
+ self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1)
+ expected_cmd = [
+ "oscap",
+ "xccdf",
+ "eval",
+ "--oval-results",
+ "--results",
+ "results.xml",
+ "--report",
+ "report.html",
+ "--profile",
+ "Default",
+ self.policy_file,
+ "/usr/share/xml/another-oval.xml",
+ "/usr/share/xml/oval.xml",
+ ]
+ openscap.Popen.assert_called_once_with(
+ expected_cmd,
+ cwd=openscap.tempfile.mkdtemp.return_value,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ )
+ openscap.__salt__["cp.push_dir"].assert_called_once_with(
+ self.random_temp_dir
+ )
+ self.assertEqual(openscap.shutil.rmtree.call_count, 1)
+ self.assertEqual(
+ response,
+ {
+ "upload_dir": self.random_temp_dir,
+ "error": "",
+ "success": True,
+ "returncode": 0,
+ },
+ )
+
+ def test_new_openscap_xccdf_eval_success_with_failing_rules(self):
+ with patch(
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+ **{"returncode": 2, "communicate.return_value": ("", "some error")}
+ )
+ ),
+ ):
+ response = openscap.xccdf_eval(
+ self.policy_file,
+ profile="Default",
+ oval_results=True,
+ results="results.xml",
+ report="report.html",
+ )
+
+ self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1)
+ expected_cmd = [
+ "oscap",
+ "xccdf",
+ "eval",
+ "--oval-results",
+ "--results",
+ "results.xml",
+ "--report",
+ "report.html",
+ "--profile",
+ "Default",
+ self.policy_file,
+ ]
+ openscap.Popen.assert_called_once_with(
+ expected_cmd,
+ cwd=openscap.tempfile.mkdtemp.return_value,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ )
+ openscap.__salt__["cp.push_dir"].assert_called_once_with(
+ self.random_temp_dir
+ )
+ self.assertEqual(openscap.shutil.rmtree.call_count, 1)
+ self.assertEqual(
+ response,
+ {
+ "upload_dir": self.random_temp_dir,
+ "error": "some error",
+ "success": True,
+ "returncode": 2,
+ },
+ )
+
+ def test_new_openscap_xccdf_eval_success_ignore_unknown_params(self):
+ with patch(
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+ **{"returncode": 2, "communicate.return_value": ("", "some error")}
+ )
+ ),
+ ):
+ response = openscap.xccdf_eval(
+ "/policy/file",
+ param="Default",
+ profile="Default",
+ oval_results=True,
+ results="results.xml",
+ report="report.html",
+ )
+
+ self.assertEqual(
+ response,
+ {
+ "upload_dir": self.random_temp_dir,
+ "error": "some error",
+ "success": True,
+ "returncode": 2,
+ },
+ )
+ expected_cmd = [
+ "oscap",
+ "xccdf",
+ "eval",
+ "--oval-results",
+ "--results",
+ "results.xml",
+ "--report",
+ "report.html",
+ "--profile",
+ "Default",
+ "/policy/file",
+ ]
+ openscap.Popen.assert_called_once_with(
+ expected_cmd,
+ cwd=openscap.tempfile.mkdtemp.return_value,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ )
+
+ def test_new_openscap_xccdf_eval_evaluation_error(self):
+ with patch(
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+ **{
+ "returncode": 1,
+ "communicate.return_value": ("", "evaluation error"),
+ }
+ )
+ ),
+ ):
+ response = openscap.xccdf_eval(
+ self.policy_file,
+ profile="Default",
+ oval_results=True,
+ results="results.xml",
+ report="report.html",
+ )
+
+ self.assertEqual(
+ response,
+ {
+ "upload_dir": None,
+ "error": "evaluation error",
+ "success": False,
+ "returncode": 1,
+ },
+ )
--
2.39.2

25
fix-bsc-1065792.patch Normal file
View File

@ -0,0 +1,25 @@
From 42a5e5d1a898d7b8bdb56a94decf525204ebccb8 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 14 Dec 2017 16:21:40 +0100
Subject: [PATCH] Fix bsc#1065792
---
salt/states/service.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/salt/states/service.py b/salt/states/service.py
index 93c7c4fb07..0d8a4efa03 100644
--- a/salt/states/service.py
+++ b/salt/states/service.py
@@ -78,6 +78,7 @@ def __virtual__():
Only make these states available if a service provider has been detected or
assigned for this minion
"""
+ __salt__._load_all()
if "service.start" in __salt__:
return __virtualname__
else:
--
2.39.2

View File

@ -0,0 +1,39 @@
From 7be26299bc7b6ec2065ab13857f088dc500ee882 Mon Sep 17 00:00:00 2001
From: Jochen Breuer <jbreuer@suse.de>
Date: Thu, 6 Sep 2018 17:15:18 +0200
Subject: [PATCH] Fix for SUSE Expanded Support detection
A SUSE ES installation has both, the centos-release and redhat-release
file. Since os_data only used the centos-release file to detect a
CentOS installation, this lead to SUSE ES being detected as CentOS.
This change also adds a check for redhat-release and then marks the
'lsb_distrib_id' as RedHat.
---
salt/grains/core.py | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 710c57f28f..1199ad274f 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -2279,6 +2279,15 @@ def _legacy_linux_distribution_data(grains, os_release, lsb_has_error):
log.trace("Parsing distrib info from /etc/centos-release")
# CentOS Linux
grains["lsb_distrib_id"] = "CentOS"
+ # Maybe CentOS Linux; could also be SUSE Expanded Support.
+ # SUSE ES has both, centos-release and redhat-release.
+ if os.path.isfile("/etc/redhat-release"):
+ with salt.utils.files.fopen("/etc/redhat-release") as ifile:
+ for line in ifile:
+ if "red hat enterprise linux server" in line.lower():
+ # This is a SUSE Expanded Support Rhel installation
+ grains["lsb_distrib_id"] = "RedHat"
+ break
with salt.utils.files.fopen("/etc/centos-release") as ifile:
for line in ifile:
# Need to pull out the version and codename
--
2.39.2

52
fix-issue-2068-test.patch Normal file
View File

@ -0,0 +1,52 @@
From b0e713d6946526b894837406c0760c262e4312a1 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 9 Jan 2019 16:08:19 +0100
Subject: [PATCH] Fix issue #2068 test
Skip injecting `__call__` if chunk is not dict.
This also fixes `integration/modules/test_state.py:StateModuleTest.test_exclude` that tests `include` and `exclude` state directives containing the only list of strings.
Minor update: more correct is-dict check.
---
salt/state.py | 9 ++++++---
1 file changed, 6 insertions(+), 3 deletions(-)
diff --git a/salt/state.py b/salt/state.py
index 8352a8defc..cb434a91e7 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -12,6 +12,7 @@ The data sent to the state calls is as follows:
"""
+import collections
import copy
import datetime
import fnmatch
@@ -3507,16 +3508,18 @@ class State:
"""
for chunk in high:
state = high[chunk]
+ if not isinstance(state, collections.Mapping):
+ continue
for state_ref in state:
needs_default = True
+ if not isinstance(state[state_ref], list):
+ continue
for argset in state[state_ref]:
if isinstance(argset, str):
needs_default = False
break
if needs_default:
- order = state[state_ref].pop(-1)
- state[state_ref].append("__call__")
- state[state_ref].append(order)
+ state[state_ref].insert(-1, "__call__")
def call_high(self, high, orchestration_jid=None):
"""
--
2.39.2

View File

@ -0,0 +1,30 @@
From 5158ebce305d961a2d2e3cb3f889b0cde593c4a0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Ond=C5=99ej=20Hole=C4=8Dek?= <oholecek@aaannz.eu>
Date: Mon, 10 May 2021 16:23:19 +0200
Subject: [PATCH] Fix missing minion returns in batch mode (#360)
Don't close pub if there are pending events, otherwise events will be lost
resulting in empty minion returns.
Co-authored-by: Denis V. Meltsaykin <dmeltsaykin@mirantis.com>
---
salt/client/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/client/__init__.py b/salt/client/__init__.py
index bcda56c9b4..b2617e4554 100644
--- a/salt/client/__init__.py
+++ b/salt/client/__init__.py
@@ -976,7 +976,7 @@ class LocalClient:
self._clean_up_subscriptions(pub_data["jid"])
finally:
- if not was_listening:
+ if not was_listening and not self.event.pending_events:
self.event.close_pub()
def cmd_full_return(
--
2.39.2

View File

@ -0,0 +1,50 @@
From 5f6488ab9211927c421e3d87a4ee84fe659ceb8b Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Mon, 27 Jun 2022 18:03:49 +0300
Subject: [PATCH] Fix ownership of salt thin directory when using the
Salt Bundle
---
salt/client/ssh/ssh_py_shim.py | 25 ++++++++++++++++++++++++-
1 file changed, 24 insertions(+), 1 deletion(-)
diff --git a/salt/client/ssh/ssh_py_shim.py b/salt/client/ssh/ssh_py_shim.py
index 293ea1b7fa..95171f7aea 100644
--- a/salt/client/ssh/ssh_py_shim.py
+++ b/salt/client/ssh/ssh_py_shim.py
@@ -292,7 +292,30 @@ def main(argv): # pylint: disable=W0613
os.makedirs(OPTIONS.saltdir)
cache_dir = os.path.join(OPTIONS.saltdir, "running_data", "var", "cache")
os.makedirs(os.path.join(cache_dir, "salt"))
- os.symlink("salt", os.path.relpath(os.path.join(cache_dir, "venv-salt-minion")))
+ os.symlink(
+ "salt", os.path.relpath(os.path.join(cache_dir, "venv-salt-minion"))
+ )
+ if os.path.exists(OPTIONS.saltdir) and (
+ "SUDO_UID" in os.environ or "SUDO_GID" in os.environ
+ ):
+ try:
+ sudo_uid = int(os.environ.get("SUDO_UID", -1))
+ except ValueError:
+ sudo_uid = -1
+ try:
+ sudo_gid = int(os.environ.get("SUDO_GID", -1))
+ except ValueError:
+ sudo_gid = -1
+ dstat = os.stat(OPTIONS.saltdir)
+ if (sudo_uid != -1 and dstat.st_uid != sudo_uid) or (
+ sudo_gid != -1 and dstat.st_gid != sudo_gid
+ ):
+ os.chown(OPTIONS.saltdir, sudo_uid, sudo_gid)
+ for dir_path, dir_names, file_names in os.walk(OPTIONS.saltdir):
+ for dir_name in dir_names:
+ os.lchown(os.path.join(dir_path, dir_name), sudo_uid, sudo_gid)
+ for file_name in file_names:
+ os.lchown(os.path.join(dir_path, file_name), sudo_uid, sudo_gid)
if venv_salt_call is None:
# Use Salt thin only if Salt Bundle (venv-salt-minion) is not available
--
2.39.2

View File

@ -0,0 +1,253 @@
From c25c8081ded775f3574b0bc999d809ce14701ba5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 3 Aug 2023 10:07:28 +0100
Subject: [PATCH] Fix regression: multiple values for keyword argument
'saltenv' (bsc#1212844) (#590)
* fix passing wrong keyword arguments to cp.cache_file in pkg.installed with sources
* Drop `**kwargs` usage and be explicit about the supported keyword arguments.
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
* Add regression test for https://github.com/saltstack/salt/issues/64118
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
* Add changelog file
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
---------
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
Co-authored-by: Massimiliano Torromeo <massimiliano.torromeo@gmail.com>
Co-authored-by: Pedro Algarvio <palgarvio@vmware.com>
---
changelog/64118.fixed.md | 1 +
salt/modules/win_pkg.py | 25 +++++++-----
salt/states/pkg.py | 4 +-
tests/pytests/unit/modules/test_win_pkg.py | 2 +-
tests/pytests/unit/states/test_pkg.py | 46 +++++++++++++++++++---
5 files changed, 62 insertions(+), 16 deletions(-)
create mode 100644 changelog/64118.fixed.md
diff --git a/changelog/64118.fixed.md b/changelog/64118.fixed.md
new file mode 100644
index 0000000000..e7251827e9
--- /dev/null
+++ b/changelog/64118.fixed.md
@@ -0,0 +1 @@
+Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg`
diff --git a/salt/modules/win_pkg.py b/salt/modules/win_pkg.py
index 3aa7c7919a..e80dd19322 100644
--- a/salt/modules/win_pkg.py
+++ b/salt/modules/win_pkg.py
@@ -1298,7 +1298,7 @@ def _repo_process_pkg_sls(filename, short_path_name, ret, successful_verbose):
successful_verbose[short_path_name] = []
-def _get_source_sum(source_hash, file_path, saltenv, **kwargs):
+def _get_source_sum(source_hash, file_path, saltenv, verify_ssl=True):
"""
Extract the hash sum, whether it is in a remote hash file, or just a string.
"""
@@ -1315,7 +1315,7 @@ def _get_source_sum(source_hash, file_path, saltenv, **kwargs):
# The source_hash is a file on a server
try:
cached_hash_file = __salt__["cp.cache_file"](
- source_hash, saltenv, verify_ssl=kwargs.get("verify_ssl", True)
+ source_hash, saltenv=saltenv, verify_ssl=verify_ssl
)
except MinionError as exc:
log.exception("Failed to cache %s", source_hash, exc_info=exc)
@@ -1671,7 +1671,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
try:
cached_file = __salt__["cp.cache_file"](
cache_file,
- saltenv,
+ saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
@@ -1686,7 +1686,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
try:
cached_file = __salt__["cp.cache_file"](
cache_file,
- saltenv,
+ saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
@@ -1706,7 +1706,9 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
# It's not cached. Cache it, mate.
try:
cached_pkg = __salt__["cp.cache_file"](
- installer, saltenv, verify_ssl=kwargs.get("verify_ssl", True)
+ installer,
+ saltenv=saltenv,
+ verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
msg = "Failed to cache {}".format(installer)
@@ -1730,7 +1732,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
try:
cached_pkg = __salt__["cp.cache_file"](
installer,
- saltenv,
+ saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
@@ -1754,7 +1756,12 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
# Compare the hash sums
source_hash = pkginfo[version_num].get("source_hash", False)
if source_hash:
- source_sum = _get_source_sum(source_hash, cached_pkg, saltenv, **kwargs)
+ source_sum = _get_source_sum(
+ source_hash,
+ cached_pkg,
+ saltenv=saltenv,
+ verify_ssl=kwargs.get("verify_ssl", True),
+ )
log.debug(
"pkg.install: Source %s hash: %s",
source_sum["hash_type"],
@@ -2126,7 +2133,7 @@ def remove(name=None, pkgs=None, **kwargs):
try:
cached_pkg = __salt__["cp.cache_file"](
uninstaller,
- saltenv,
+ saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
@@ -2150,7 +2157,7 @@ def remove(name=None, pkgs=None, **kwargs):
try:
cached_pkg = __salt__["cp.cache_file"](
uninstaller,
- saltenv,
+ saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
diff --git a/salt/states/pkg.py b/salt/states/pkg.py
index 12fbc87a1a..a605b23107 100644
--- a/salt/states/pkg.py
+++ b/salt/states/pkg.py
@@ -760,7 +760,9 @@ def _find_install_targets(
err = "Unable to cache {0}: {1}"
try:
cached_path = __salt__["cp.cache_file"](
- version_string, saltenv=kwargs["saltenv"], **kwargs
+ version_string,
+ saltenv=kwargs["saltenv"],
+ verify_ssl=kwargs.get("verify_ssl", True),
)
except CommandExecutionError as exc:
problems.append(err.format(version_string, exc))
diff --git a/tests/pytests/unit/modules/test_win_pkg.py b/tests/pytests/unit/modules/test_win_pkg.py
index 76234fb77e..6d435f00a5 100644
--- a/tests/pytests/unit/modules/test_win_pkg.py
+++ b/tests/pytests/unit/modules/test_win_pkg.py
@@ -262,7 +262,7 @@ def test_pkg_install_verify_ssl_false():
result = win_pkg.install(name="nsis", version="3.02", verify_ssl=False)
mock_cp.assert_called_once_with(
"http://download.sourceforge.net/project/nsis/NSIS%203/3.02/nsis-3.02-setup.exe",
- "base",
+ saltenv="base",
verify_ssl=False,
)
assert expected == result
diff --git a/tests/pytests/unit/states/test_pkg.py b/tests/pytests/unit/states/test_pkg.py
index b852f27b00..f58be11011 100644
--- a/tests/pytests/unit/states/test_pkg.py
+++ b/tests/pytests/unit/states/test_pkg.py
@@ -3,6 +3,7 @@ import logging
import pytest
import salt.modules.beacons as beaconmod
+import salt.modules.cp as cp
import salt.modules.pkg_resource as pkg_resource
import salt.modules.yumpkg as yumpkg
import salt.states.beacon as beaconstate
@@ -15,19 +16,28 @@ log = logging.getLogger(__name__)
@pytest.fixture
-def configure_loader_modules():
+def configure_loader_modules(minion_opts):
return {
+ cp: {
+ "__opts__": minion_opts,
+ },
pkg: {
"__env__": "base",
"__salt__": {},
"__grains__": {"os": "CentOS", "os_family": "RedHat"},
- "__opts__": {"test": False, "cachedir": ""},
+ "__opts__": minion_opts,
"__instance_id__": "",
"__low__": {},
"__utils__": {"state.gen_tag": state_utils.gen_tag},
},
- beaconstate: {"__salt__": {}, "__opts__": {}},
- beaconmod: {"__salt__": {}, "__opts__": {}},
+ beaconstate: {
+ "__salt__": {},
+ "__opts__": minion_opts,
+ },
+ beaconmod: {
+ "__salt__": {},
+ "__opts__": minion_opts,
+ },
pkg_resource: {
"__salt__": {},
"__grains__": {"os": "CentOS", "os_family": "RedHat"},
@@ -35,7 +45,7 @@ def configure_loader_modules():
yumpkg: {
"__salt__": {},
"__grains__": {"osarch": "x86_64", "osmajorrelease": 7},
- "__opts__": {},
+ "__opts__": minion_opts,
},
}
@@ -563,6 +573,32 @@ def test_installed_with_changes_test_true(list_pkgs):
assert ret["changes"] == expected
+def test_installed_with_sources(list_pkgs, tmp_path):
+ """
+ Test pkg.installed with passing `sources`
+ """
+
+ list_pkgs = MagicMock(return_value=list_pkgs)
+ pkg_source = tmp_path / "pkga-package-0.3.0.deb"
+
+ with patch.dict(
+ pkg.__salt__,
+ {
+ "cp.cache_file": cp.cache_file,
+ "pkg.list_pkgs": list_pkgs,
+ "pkg_resource.pack_sources": pkg_resource.pack_sources,
+ "lowpkg.bin_pkg_info": MagicMock(),
+ },
+ ), patch("salt.fileclient.get_file_client", return_value=MagicMock()):
+ try:
+ ret = pkg.installed("install-pkgd", sources=[{"pkga": str(pkg_source)}])
+ assert ret["result"] is False
+ except TypeError as exc:
+ if "got multiple values for keyword argument 'saltenv'" in str(exc):
+ pytest.fail(f"TypeError should have not been raised: {exc}")
+ raise exc from None
+
+
@pytest.mark.parametrize("action", ["removed", "purged"])
def test_removed_purged_with_changes_test_true(list_pkgs, action):
"""
--
2.41.0

View File

@ -0,0 +1,53 @@
From 42cfb51fa01e13fe043a62536ba37fd472bc2688 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Tue, 12 Apr 2022 10:08:17 +0300
Subject: [PATCH] Fix regression with depending client.ssh on psutil
(bsc#1197533)
---
salt/client/ssh/__init__.py | 14 ++++++++++++--
1 file changed, 12 insertions(+), 2 deletions(-)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index d5a679821e..b120e0002e 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -12,7 +12,6 @@ import hashlib
import logging
import multiprocessing
import os
-import psutil
import queue
import re
import shlex
@@ -420,6 +419,16 @@ class SSH(MultiprocessingStateMixin):
self.__parsed_rosters[self.ROSTER_UPDATE_FLAG] = False
return
+ def _pid_exists(self, pid):
+ """
+ Check if specified pid is alive
+ """
+ try:
+ os.kill(pid, 0)
+ except OSError:
+ return False
+ return True
+
def _update_roster(self, hostname=None, user=None):
"""
Update default flat roster with the passed in information.
@@ -639,7 +648,8 @@ class SSH(MultiprocessingStateMixin):
pid_running = (
False
if cached_session["pid"] == 0
- else cached_session.get("running", False) or psutil.pid_exists(cached_session["pid"])
+ else cached_session.get("running", False)
+ or self._pid_exists(cached_session["pid"])
)
if (
pid_running and prev_session_running < self.max_pid_wait
--
2.39.2

View File

@ -0,0 +1,128 @@
From 4dbd5534a39fbfaebad32a00d0e6c512d840b0fd Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Thu, 31 Mar 2022 13:39:57 +0300
Subject: [PATCH] Fix salt-ssh opts poisoning (bsc#1197637) - 3004 (#501)
* Fix salt-ssh opts poisoning
* Pass proper __opts__ to roster modules
* Remove redundant copy.deepcopy for opts from handle_routine
---
salt/client/ssh/__init__.py | 17 ++++++++++-------
salt/loader/__init__.py | 7 ++++++-
2 files changed, 16 insertions(+), 8 deletions(-)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index e6837df4e5..a527c03de6 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -338,7 +338,7 @@ class SSH(MultiprocessingStateMixin):
self.session_flock_file = os.path.join(
self.opts["cachedir"], "salt-ssh.session.lock"
)
- self.ssh_session_grace_time = int(self.opts.get("ssh_session_grace_time", 3))
+ self.ssh_session_grace_time = int(self.opts.get("ssh_session_grace_time", 1))
# __setstate__ and __getstate__ are only used on spawning platforms.
def __setstate__(self, state):
@@ -571,7 +571,6 @@ class SSH(MultiprocessingStateMixin):
"""
LOG_LOCK.release()
salt.loader.LOAD_LOCK.release()
- opts = copy.deepcopy(opts)
single = Single(
opts,
opts["argv"],
@@ -608,6 +607,7 @@ class SSH(MultiprocessingStateMixin):
Spin up the needed threads or processes and execute the subsequent
routines
"""
+ opts = copy.deepcopy(self.opts)
que = multiprocessing.Queue()
running = {}
targets_queue = deque(self.targets.keys())
@@ -618,7 +618,7 @@ class SSH(MultiprocessingStateMixin):
if not self.targets:
log.error("No matching targets found in roster.")
break
- if len(running) < self.opts.get("ssh_max_procs", 25) and not init:
+ if len(running) < opts.get("ssh_max_procs", 25) and not init:
if targets_queue:
host = targets_queue.popleft()
else:
@@ -636,7 +636,7 @@ class SSH(MultiprocessingStateMixin):
pid_running = (
False
if cached_session["pid"] == 0
- else psutil.pid_exists(cached_session["pid"])
+ else cached_session.get("running", False) or psutil.pid_exists(cached_session["pid"])
)
if (
pid_running and prev_session_running < self.max_pid_wait
@@ -651,9 +651,10 @@ class SSH(MultiprocessingStateMixin):
"salt-ssh/session",
host,
{
- "pid": 0,
+ "pid": os.getpid(),
"master_id": self.master_id,
"ts": time.time(),
+ "running": True,
},
)
for default in self.defaults:
@@ -681,7 +682,7 @@ class SSH(MultiprocessingStateMixin):
continue
args = (
que,
- self.opts,
+ opts,
host,
self.targets[host],
mine,
@@ -717,6 +718,7 @@ class SSH(MultiprocessingStateMixin):
"pid": routine.pid,
"master_id": self.master_id,
"ts": time.time(),
+ "running": True,
},
)
continue
@@ -768,12 +770,13 @@ class SSH(MultiprocessingStateMixin):
"pid": 0,
"master_id": self.master_id,
"ts": time.time(),
+ "running": False,
},
)
if len(rets) >= len(self.targets):
break
# Sleep when limit or all threads started
- if len(running) >= self.opts.get("ssh_max_procs", 25) or len(
+ if len(running) >= opts.get("ssh_max_procs", 25) or len(
self.targets
) >= len(running):
time.sleep(0.1)
diff --git a/salt/loader/__init__.py b/salt/loader/__init__.py
index 32f8a7702c..bbe4269839 100644
--- a/salt/loader/__init__.py
+++ b/salt/loader/__init__.py
@@ -757,7 +757,12 @@ def roster(opts, runner=None, utils=None, whitelist=None, loaded_base_name=None,
opts,
tag="roster",
whitelist=whitelist,
- pack={"__runner__": runner, "__utils__": utils, "__context__": context},
+ pack={
+ "__runner__": runner,
+ "__utils__": utils,
+ "__context__": context,
+ "__opts__": opts,
+ },
extra_module_dirs=utils.module_dirs if utils else None,
loaded_base_name=loaded_base_name,
)
--
2.39.2

View File

@ -0,0 +1,141 @@
From b4b2c59bfd479d59faeaf0e4d26d672828a519c8 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 25 Nov 2020 15:09:41 +0300
Subject: [PATCH] Fix salt.utils.stringutils.to_str calls to make it
working with numeric uid/gid
Fix upstream tests to work with 3006.
---
salt/modules/file.py | 22 ++++++++++++-------
salt/states/file.py | 11 ++++++++--
.../unit/modules/file/test_file_check.py | 10 ++++-----
3 files changed, 28 insertions(+), 15 deletions(-)
diff --git a/salt/modules/file.py b/salt/modules/file.py
index 4612d65511..55b236fe41 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -5127,14 +5127,20 @@ def check_perms(
is_dir = os.path.isdir(name)
is_link = os.path.islink(name)
+ def __safe_to_str(s):
+ try:
+ return salt.utils.stringutils.to_str(s)
+ except:
+ return salt.utils.stringutils.to_str(str(s))
+
# Check and make user/group/mode changes, then verify they were successful
if user:
if (
salt.utils.platform.is_windows() and not user_to_uid(user) == cur["uid"]
) or (
not salt.utils.platform.is_windows()
- and not salt.utils.stringutils.to_str(user) == cur["user"]
- and not salt.utils.stringutils.to_str(user) == cur["uid"]
+ and not __safe_to_str(user) == cur["user"]
+ and not user == cur["uid"]
):
perms["cuser"] = user
@@ -5143,8 +5149,8 @@ def check_perms(
salt.utils.platform.is_windows() and not group_to_gid(group) == cur["gid"]
) or (
not salt.utils.platform.is_windows()
- and not salt.utils.stringutils.to_str(group) == cur["group"]
- and not salt.utils.stringutils.to_str(group) == cur["gid"]
+ and not __safe_to_str(group) == cur["group"]
+ and not group == cur["gid"]
):
perms["cgroup"] = group
@@ -5188,8 +5194,8 @@ def check_perms(
salt.utils.platform.is_windows() and not user_to_uid(user) == post["uid"]
) or (
not salt.utils.platform.is_windows()
- and not salt.utils.stringutils.to_str(user) == post["user"]
- and not salt.utils.stringutils.to_str(user) == post["uid"]
+ and not __safe_to_str(user) == post["user"]
+ and not user == post["uid"]
):
if __opts__["test"] is True:
ret["changes"]["user"] = user
@@ -5204,8 +5210,8 @@ def check_perms(
salt.utils.platform.is_windows() and not group_to_gid(group) == post["gid"]
) or (
not salt.utils.platform.is_windows()
- and not salt.utils.stringutils.to_str(group) == post["group"]
- and not salt.utils.stringutils.to_str(group) == post["gid"]
+ and not __safe_to_str(group) == post["group"]
+ and not group == post["gid"]
):
if __opts__["test"] is True:
ret["changes"]["group"] = group
diff --git a/salt/states/file.py b/salt/states/file.py
index 024e5e34ce..9630ff7096 100644
--- a/salt/states/file.py
+++ b/salt/states/file.py
@@ -864,15 +864,22 @@ def _check_dir_meta(name, user, group, mode, follow_symlinks=False):
if not stats:
changes["directory"] = "new"
return changes
+
+ def __safe_to_str(s):
+ try:
+ return salt.utils.stringutils.to_str(s)
+ except:
+ return salt.utils.stringutils.to_str(str(s))
+
if (
user is not None
- and salt.utils.stringutils.to_str(user) != stats["user"]
+ and __safe_to_str(user) != stats["user"]
and user != stats.get("uid")
):
changes["user"] = user
if (
group is not None
- and salt.utils.stringutils.to_str(group) != stats["group"]
+ and __safe_to_str(group) != stats["group"]
and group != stats.get("gid")
):
changes["group"] = group
diff --git a/tests/pytests/unit/modules/file/test_file_check.py b/tests/pytests/unit/modules/file/test_file_check.py
index ce86acd7fc..2294e6760b 100644
--- a/tests/pytests/unit/modules/file/test_file_check.py
+++ b/tests/pytests/unit/modules/file/test_file_check.py
@@ -17,7 +17,7 @@ def configure_loader_modules():
return {
filemod: {
"__context__": {},
- "__opts__": {"test": False},
+ "__opts__": {"test": True},
}
}
@@ -172,7 +172,7 @@ def test_check_managed_changes_follow_symlinks(a_link, tfile):
),
# no user/group changes needed by id
(
- {"user": 3001, "group": 4001},
+ {"user": 2001, "group": 1001},
{},
),
],
@@ -184,9 +184,9 @@ def test_check_perms_user_group_name_and_id(input, expected):
stat_out = {
"user": "luser",
"group": "lgroup",
- "uid": 3001,
- "gid": 4001,
- "mode": "123",
+ "uid": 2001,
+ "gid": 1001,
+ "mode": "0123",
}
patch_stats = patch(
--
2.39.2

View File

@ -0,0 +1,118 @@
From 38de9af6bd243d35464713e0ee790255d3b40a7e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 23 Jun 2023 13:02:51 +0100
Subject: [PATCH] Fix some issues detected in "salt-support" CLI, module
and tests (bsc#1211591) (#580)
* saltsupport: avoid debug traceback due missing import
* Use yaml and json wrappers provides by Salt utils
* Remove unnecessary call to deprecated setup_logfile_logger
* Move unittest saltsupport tests to proper place
* Fix test assertion error due wrong capturing of message
---
salt/cli/support/__init__.py | 4 ++--
salt/cli/support/collector.py | 6 ++----
tests/{pytests => }/unit/cli/test_support.py | 0
tests/unit/modules/test_saltsupport.py | 6 +++---
4 files changed, 7 insertions(+), 9 deletions(-)
rename tests/{pytests => }/unit/cli/test_support.py (100%)
diff --git a/salt/cli/support/__init__.py b/salt/cli/support/__init__.py
index 59c2609e07..0a7da72e93 100644
--- a/salt/cli/support/__init__.py
+++ b/salt/cli/support/__init__.py
@@ -6,7 +6,7 @@ import os
import jinja2
import salt.exceptions
-import yaml
+import salt.utils.yaml
log = logging.getLogger(__name__)
@@ -48,7 +48,7 @@ def get_profile(profile, caller, runner):
try:
rendered_template = _render_profile(profile_path, caller, runner)
log.debug("\n{d}\n{t}\n{d}\n".format(d="-" * 80, t=rendered_template))
- data.update(yaml.load(rendered_template))
+ data.update(salt.utils.yaml.load(rendered_template))
except Exception as ex:
log.debug(ex, exc_info=True)
raise salt.exceptions.SaltException(
diff --git a/salt/cli/support/collector.py b/salt/cli/support/collector.py
index 1879cc5220..0ba987580c 100644
--- a/salt/cli/support/collector.py
+++ b/salt/cli/support/collector.py
@@ -1,6 +1,5 @@
import builtins as exceptions
import copy
-import json
import logging
import os
import sys
@@ -16,10 +15,10 @@ import salt.cli.support.intfunc
import salt.cli.support.localrunner
import salt.defaults.exitcodes
import salt.exceptions
-import salt.ext.six as six
import salt.output.table_out
import salt.runner
import salt.utils.files
+import salt.utils.json
import salt.utils.parsers
import salt.utils.platform
import salt.utils.process
@@ -169,7 +168,7 @@ class SupportDataCollector:
content = None
if content is None:
- data = json.loads(json.dumps(data))
+ data = salt.utils.json.loads(salt.utils.json.dumps(data))
if isinstance(data, dict) and data.get("return"):
data = data.get("return")
content = yaml.safe_dump(data, default_flow_style=False, indent=4)
@@ -506,7 +505,6 @@ class SaltSupport(salt.utils.parsers.SaltSupportOptionParser):
self.out.error(ex)
else:
if self.config["log_level"] not in ("quiet",):
- self.setup_logfile_logger()
salt.utils.verify.verify_log(self.config)
salt.cli.support.log = log # Pass update logger so trace is available
diff --git a/tests/pytests/unit/cli/test_support.py b/tests/unit/cli/test_support.py
similarity index 100%
rename from tests/pytests/unit/cli/test_support.py
rename to tests/unit/cli/test_support.py
diff --git a/tests/unit/modules/test_saltsupport.py b/tests/unit/modules/test_saltsupport.py
index 4ef04246b9..2afdd69b3e 100644
--- a/tests/unit/modules/test_saltsupport.py
+++ b/tests/unit/modules/test_saltsupport.py
@@ -251,8 +251,8 @@ professor: Farnsworth
with pytest.raises(salt.exceptions.SaltInvocationError) as err:
support.sync("group-name")
assert (
- ' Support archive "/mnt/storage/three-support-222-222.bz2" was not found'
- in str(err)
+ 'Support archive "/mnt/storage/three-support-222-222.bz2" was not found'
+ in str(err.value)
)
@patch("tempfile.mkstemp", MagicMock(return_value=(0, "dummy")))
@@ -274,7 +274,7 @@ professor: Farnsworth
with pytest.raises(salt.exceptions.SaltInvocationError) as err:
support.sync("group-name", name="lost.bz2")
- assert ' Support archive "lost.bz2" was not found' in str(err)
+ assert 'Support archive "lost.bz2" was not found' in str(err.value)
@patch("tempfile.mkstemp", MagicMock(return_value=(0, "dummy")))
@patch("os.path.exists", MagicMock(return_value=False))
--
2.41.0

View File

@ -0,0 +1,841 @@
From 290d092c06dc378647dd1e49f000f012a7c07904 Mon Sep 17 00:00:00 2001
From: vzhestkov <vzhestkov@suse.com>
Date: Wed, 2 Aug 2023 16:13:49 +0200
Subject: [PATCH] Fix tests to make them running with salt-testsuite
---
tests/pytests/unit/cli/test_batch_async.py | 718 +++++++++++----------
tests/unit/cli/test_support.py | 6 +-
tests/unit/modules/test_saltsupport.py | 4 +-
3 files changed, 364 insertions(+), 364 deletions(-)
diff --git a/tests/pytests/unit/cli/test_batch_async.py b/tests/pytests/unit/cli/test_batch_async.py
index c0b708de76..e0774ffff3 100644
--- a/tests/pytests/unit/cli/test_batch_async.py
+++ b/tests/pytests/unit/cli/test_batch_async.py
@@ -1,386 +1,392 @@
+import pytest
+
import salt.ext.tornado
from salt.cli.batch_async import BatchAsync
-from salt.ext.tornado.testing import AsyncTestCase
from tests.support.mock import MagicMock, patch
-from tests.support.unit import TestCase, skipIf
-
-
-class AsyncBatchTestCase(AsyncTestCase, TestCase):
- def setUp(self):
- self.io_loop = self.get_new_ioloop()
- opts = {
- "batch": "1",
- "conf_file": {},
- "tgt": "*",
- "timeout": 5,
- "gather_job_timeout": 5,
- "batch_presence_ping_timeout": 1,
- "transport": None,
- "sock_dir": "",
- }
-
- with patch("salt.client.get_local_client", MagicMock(return_value=MagicMock())):
- with patch(
- "salt.cli.batch_async.batch_get_opts", MagicMock(return_value=opts)
- ):
- self.batch = BatchAsync(
- opts,
- MagicMock(side_effect=["1234", "1235", "1236"]),
- {
- "tgt": "",
- "fun": "",
- "kwargs": {"batch": "", "batch_presence_ping_timeout": 1},
- },
- )
-
- def test_ping_jid(self):
- self.assertEqual(self.batch.ping_jid, "1234")
-
- def test_batch_jid(self):
- self.assertEqual(self.batch.batch_jid, "1235")
-
- def test_find_job_jid(self):
- self.assertEqual(self.batch.find_job_jid, "1236")
-
- def test_batch_size(self):
- """
- Tests passing batch value as a number
- """
- self.batch.opts = {"batch": "2", "timeout": 5}
- self.batch.minions = {"foo", "bar"}
- self.batch.start_batch()
- self.assertEqual(self.batch.batch_size, 2)
-
- @salt.ext.tornado.testing.gen_test
- def test_batch_start_on_batch_presence_ping_timeout(self):
- self.batch.event = MagicMock()
- future = salt.ext.tornado.gen.Future()
- future.set_result({"minions": ["foo", "bar"]})
- self.batch.local.run_job_async.return_value = future
- ret = self.batch.start()
- # assert start_batch is called later with batch_presence_ping_timeout as param
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.start_batch,),
- )
- # assert test.ping called
- self.assertEqual(
- self.batch.local.run_job_async.call_args[0], ("*", "test.ping", [], "glob")
- )
- # assert targeted_minions == all minions matched by tgt
- self.assertEqual(self.batch.targeted_minions, {"foo", "bar"})
-
- @salt.ext.tornado.testing.gen_test
- def test_batch_start_on_gather_job_timeout(self):
- self.batch.event = MagicMock()
- future = salt.ext.tornado.gen.Future()
- future.set_result({"minions": ["foo", "bar"]})
- self.batch.local.run_job_async.return_value = future
- self.batch.batch_presence_ping_timeout = None
- ret = self.batch.start()
- # assert start_batch is called later with gather_job_timeout as param
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.start_batch,),
- )
- def test_batch_fire_start_event(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.opts = {"batch": "2", "timeout": 5}
- self.batch.event = MagicMock()
- self.batch.metadata = {"mykey": "myvalue"}
- self.batch.start_batch()
- self.assertEqual(
- self.batch.event.fire_event.call_args[0],
- (
+
+@pytest.fixture
+def batch(temp_salt_master):
+ opts = {
+ "batch": "1",
+ "conf_file": {},
+ "tgt": "*",
+ "timeout": 5,
+ "gather_job_timeout": 5,
+ "batch_presence_ping_timeout": 1,
+ "transport": None,
+ "sock_dir": "",
+ }
+
+ with patch("salt.client.get_local_client", MagicMock(return_value=MagicMock())):
+ with patch("salt.cli.batch_async.batch_get_opts", MagicMock(return_value=opts)):
+ batch = BatchAsync(
+ opts,
+ MagicMock(side_effect=["1234", "1235", "1236"]),
{
- "available_minions": {"foo", "bar"},
- "down_minions": set(),
- "metadata": self.batch.metadata,
+ "tgt": "",
+ "fun": "",
+ "kwargs": {"batch": "", "batch_presence_ping_timeout": 1},
},
- "salt/batch/1235/start",
- ),
- )
+ )
+ yield batch
- @salt.ext.tornado.testing.gen_test
- def test_start_batch_calls_next(self):
- self.batch.run_next = MagicMock(return_value=MagicMock())
- self.batch.event = MagicMock()
- self.batch.start_batch()
- self.assertEqual(self.batch.initialized, True)
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0], (self.batch.run_next,)
- )
- def test_batch_fire_done_event(self):
- self.batch.targeted_minions = {"foo", "baz", "bar"}
- self.batch.minions = {"foo", "bar"}
- self.batch.done_minions = {"foo"}
- self.batch.timedout_minions = {"bar"}
- self.batch.event = MagicMock()
- self.batch.metadata = {"mykey": "myvalue"}
- old_event = self.batch.event
- self.batch.end_batch()
- self.assertEqual(
- old_event.fire_event.call_args[0],
- (
- {
- "available_minions": {"foo", "bar"},
- "done_minions": self.batch.done_minions,
- "down_minions": {"baz"},
- "timedout_minions": self.batch.timedout_minions,
- "metadata": self.batch.metadata,
- },
- "salt/batch/1235/done",
- ),
- )
+def test_ping_jid(batch):
+ assert batch.ping_jid == "1234"
- def test_batch__del__(self):
- batch = BatchAsync(MagicMock(), MagicMock(), MagicMock())
- event = MagicMock()
- batch.event = event
- batch.__del__()
- self.assertEqual(batch.local, None)
- self.assertEqual(batch.event, None)
- self.assertEqual(batch.ioloop, None)
-
- def test_batch_close_safe(self):
- batch = BatchAsync(MagicMock(), MagicMock(), MagicMock())
- event = MagicMock()
- batch.event = event
- batch.patterns = {
- ("salt/job/1234/ret/*", "find_job_return"),
- ("salt/job/4321/ret/*", "find_job_return"),
- }
- batch.close_safe()
- self.assertEqual(batch.local, None)
- self.assertEqual(batch.event, None)
- self.assertEqual(batch.ioloop, None)
- self.assertEqual(len(event.unsubscribe.mock_calls), 2)
- self.assertEqual(len(event.remove_event_handler.mock_calls), 1)
-
- @salt.ext.tornado.testing.gen_test
- def test_batch_next(self):
- self.batch.event = MagicMock()
- self.batch.opts["fun"] = "my.fun"
- self.batch.opts["arg"] = []
- self.batch._get_next = MagicMock(return_value={"foo", "bar"})
- self.batch.batch_size = 2
- future = salt.ext.tornado.gen.Future()
- future.set_result({"minions": ["foo", "bar"]})
- self.batch.local.run_job_async.return_value = future
- self.batch.run_next()
- self.assertEqual(
- self.batch.local.run_job_async.call_args[0],
- ({"foo", "bar"}, "my.fun", [], "list"),
- )
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.find_job, {"foo", "bar"}),
- )
- self.assertEqual(self.batch.active, {"bar", "foo"})
-
- def test_next_batch(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), {"foo", "bar"})
-
- def test_next_batch_one_done(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.done_minions = {"bar"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), {"foo"})
-
- def test_next_batch_one_done_one_active(self):
- self.batch.minions = {"foo", "bar", "baz"}
- self.batch.done_minions = {"bar"}
- self.batch.active = {"baz"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), {"foo"})
-
- def test_next_batch_one_done_one_active_one_timedout(self):
- self.batch.minions = {"foo", "bar", "baz", "faz"}
- self.batch.done_minions = {"bar"}
- self.batch.active = {"baz"}
- self.batch.timedout_minions = {"faz"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), {"foo"})
-
- def test_next_batch_bigger_size(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.batch_size = 3
- self.assertEqual(self.batch._get_next(), {"foo", "bar"})
-
- def test_next_batch_all_done(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.done_minions = {"foo", "bar"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), set())
-
- def test_next_batch_all_active(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.active = {"foo", "bar"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), set())
-
- def test_next_batch_all_timedout(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.timedout_minions = {"foo", "bar"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), set())
-
- def test_batch__event_handler_ping_return(self):
- self.batch.targeted_minions = {"foo"}
- self.batch.event = MagicMock(
- unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
- )
- self.batch.start()
- self.assertEqual(self.batch.minions, set())
- self.batch._BatchAsync__event_handler(MagicMock())
- self.assertEqual(self.batch.minions, {"foo"})
- self.assertEqual(self.batch.done_minions, set())
-
- def test_batch__event_handler_call_start_batch_when_all_pings_return(self):
- self.batch.targeted_minions = {"foo"}
- self.batch.event = MagicMock(
- unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
- )
- self.batch.start()
- self.batch._BatchAsync__event_handler(MagicMock())
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.start_batch,),
- )
- def test_batch__event_handler_not_call_start_batch_when_not_all_pings_return(self):
- self.batch.targeted_minions = {"foo", "bar"}
- self.batch.event = MagicMock(
- unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
- )
- self.batch.start()
- self.batch._BatchAsync__event_handler(MagicMock())
- self.assertEqual(len(self.batch.event.io_loop.spawn_callback.mock_calls), 0)
+def test_batch_jid(batch):
+ assert batch.batch_jid == "1235"
+
+
+def test_find_job_jid(batch):
+ assert batch.find_job_jid == "1236"
+
- def test_batch__event_handler_batch_run_return(self):
- self.batch.event = MagicMock(
- unpack=MagicMock(return_value=("salt/job/1235/ret/foo", {"id": "foo"}))
+def test_batch_size(batch):
+ """
+ Tests passing batch value as a number
+ """
+ batch.opts = {"batch": "2", "timeout": 5}
+ batch.minions = {"foo", "bar"}
+ batch.start_batch()
+ assert batch.batch_size == 2
+
+
+def test_batch_start_on_batch_presence_ping_timeout(batch):
+ # batch_async = BatchAsyncMock();
+ batch.event = MagicMock()
+ future = salt.ext.tornado.gen.Future()
+ future.set_result({"minions": ["foo", "bar"]})
+ batch.local.run_job_async.return_value = future
+ with patch("salt.ext.tornado.gen.sleep", return_value=future):
+ # ret = batch_async.start(batch)
+ ret = batch.start()
+ # assert start_batch is called later with batch_presence_ping_timeout as param
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.start_batch,)
+ # assert test.ping called
+ assert batch.local.run_job_async.call_args[0] == ("*", "test.ping", [], "glob")
+ # assert targeted_minions == all minions matched by tgt
+ assert batch.targeted_minions == {"foo", "bar"}
+
+
+def test_batch_start_on_gather_job_timeout(batch):
+ # batch_async = BatchAsyncMock();
+ batch.event = MagicMock()
+ future = salt.ext.tornado.gen.Future()
+ future.set_result({"minions": ["foo", "bar"]})
+ batch.local.run_job_async.return_value = future
+ batch.batch_presence_ping_timeout = None
+ with patch("salt.ext.tornado.gen.sleep", return_value=future):
+ # ret = batch_async.start(batch)
+ ret = batch.start()
+ # assert start_batch is called later with gather_job_timeout as param
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.start_batch,)
+
+
+def test_batch_fire_start_event(batch):
+ batch.minions = {"foo", "bar"}
+ batch.opts = {"batch": "2", "timeout": 5}
+ batch.event = MagicMock()
+ batch.metadata = {"mykey": "myvalue"}
+ batch.start_batch()
+ assert batch.event.fire_event.call_args[0] == (
+ {
+ "available_minions": {"foo", "bar"},
+ "down_minions": set(),
+ "metadata": batch.metadata,
+ },
+ "salt/batch/1235/start",
+ )
+
+
+def test_start_batch_calls_next(batch):
+ batch.run_next = MagicMock(return_value=MagicMock())
+ batch.event = MagicMock()
+ batch.start_batch()
+ assert batch.initialized
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.run_next,)
+
+
+def test_batch_fire_done_event(batch):
+ batch.targeted_minions = {"foo", "baz", "bar"}
+ batch.minions = {"foo", "bar"}
+ batch.done_minions = {"foo"}
+ batch.timedout_minions = {"bar"}
+ batch.event = MagicMock()
+ batch.metadata = {"mykey": "myvalue"}
+ old_event = batch.event
+ batch.end_batch()
+ assert old_event.fire_event.call_args[0] == (
+ {
+ "available_minions": {"foo", "bar"},
+ "done_minions": batch.done_minions,
+ "down_minions": {"baz"},
+ "timedout_minions": batch.timedout_minions,
+ "metadata": batch.metadata,
+ },
+ "salt/batch/1235/done",
+ )
+
+
+def test_batch__del__(batch):
+ batch = BatchAsync(MagicMock(), MagicMock(), MagicMock())
+ event = MagicMock()
+ batch.event = event
+ batch.__del__()
+ assert batch.local is None
+ assert batch.event is None
+ assert batch.ioloop is None
+
+
+def test_batch_close_safe(batch):
+ batch = BatchAsync(MagicMock(), MagicMock(), MagicMock())
+ event = MagicMock()
+ batch.event = event
+ batch.patterns = {
+ ("salt/job/1234/ret/*", "find_job_return"),
+ ("salt/job/4321/ret/*", "find_job_return"),
+ }
+ batch.close_safe()
+ assert batch.local is None
+ assert batch.event is None
+ assert batch.ioloop is None
+ assert len(event.unsubscribe.mock_calls) == 2
+ assert len(event.remove_event_handler.mock_calls) == 1
+
+
+def test_batch_next(batch):
+ batch.event = MagicMock()
+ batch.opts["fun"] = "my.fun"
+ batch.opts["arg"] = []
+ batch._get_next = MagicMock(return_value={"foo", "bar"})
+ batch.batch_size = 2
+ future = salt.ext.tornado.gen.Future()
+ future.set_result({"minions": ["foo", "bar"]})
+ batch.local.run_job_async.return_value = future
+ with patch("salt.ext.tornado.gen.sleep", return_value=future):
+ batch.run_next()
+ assert batch.local.run_job_async.call_args[0] == (
+ {"foo", "bar"},
+ "my.fun",
+ [],
+ "list",
)
- self.batch.start()
- self.batch.active = {"foo"}
- self.batch._BatchAsync__event_handler(MagicMock())
- self.assertEqual(self.batch.active, set())
- self.assertEqual(self.batch.done_minions, {"foo"})
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.schedule_next,),
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (
+ batch.find_job,
+ {"foo", "bar"},
)
+ assert batch.active == {"bar", "foo"}
+
- def test_batch__event_handler_find_job_return(self):
- self.batch.event = MagicMock(
- unpack=MagicMock(
- return_value=(
- "salt/job/1236/ret/foo",
- {"id": "foo", "return": "deadbeaf"},
- )
+def test_next_batch(batch):
+ batch.minions = {"foo", "bar"}
+ batch.batch_size = 2
+ assert batch._get_next() == {"foo", "bar"}
+
+
+def test_next_batch_one_done(batch):
+ batch.minions = {"foo", "bar"}
+ batch.done_minions = {"bar"}
+ batch.batch_size = 2
+ assert batch._get_next() == {"foo"}
+
+
+def test_next_batch_one_done_one_active(batch):
+ batch.minions = {"foo", "bar", "baz"}
+ batch.done_minions = {"bar"}
+ batch.active = {"baz"}
+ batch.batch_size = 2
+ assert batch._get_next() == {"foo"}
+
+
+def test_next_batch_one_done_one_active_one_timedout(batch):
+ batch.minions = {"foo", "bar", "baz", "faz"}
+ batch.done_minions = {"bar"}
+ batch.active = {"baz"}
+ batch.timedout_minions = {"faz"}
+ batch.batch_size = 2
+ assert batch._get_next() == {"foo"}
+
+
+def test_next_batch_bigger_size(batch):
+ batch.minions = {"foo", "bar"}
+ batch.batch_size = 3
+ assert batch._get_next() == {"foo", "bar"}
+
+
+def test_next_batch_all_done(batch):
+ batch.minions = {"foo", "bar"}
+ batch.done_minions = {"foo", "bar"}
+ batch.batch_size = 2
+ assert batch._get_next() == set()
+
+
+def test_next_batch_all_active(batch):
+ batch.minions = {"foo", "bar"}
+ batch.active = {"foo", "bar"}
+ batch.batch_size = 2
+ assert batch._get_next() == set()
+
+
+def test_next_batch_all_timedout(batch):
+ batch.minions = {"foo", "bar"}
+ batch.timedout_minions = {"foo", "bar"}
+ batch.batch_size = 2
+ assert batch._get_next() == set()
+
+
+def test_batch__event_handler_ping_return(batch):
+ batch.targeted_minions = {"foo"}
+ batch.event = MagicMock(
+ unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
+ )
+ batch.start()
+ assert batch.minions == set()
+ batch._BatchAsync__event_handler(MagicMock())
+ assert batch.minions == {"foo"}
+ assert batch.done_minions == set()
+
+
+def test_batch__event_handler_call_start_batch_when_all_pings_return(batch):
+ batch.targeted_minions = {"foo"}
+ batch.event = MagicMock(
+ unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
+ )
+ batch.start()
+ batch._BatchAsync__event_handler(MagicMock())
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.start_batch,)
+
+
+def test_batch__event_handler_not_call_start_batch_when_not_all_pings_return(batch):
+ batch.targeted_minions = {"foo", "bar"}
+ batch.event = MagicMock(
+ unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
+ )
+ batch.start()
+ batch._BatchAsync__event_handler(MagicMock())
+ assert len(batch.event.io_loop.spawn_callback.mock_calls) == 0
+
+
+def test_batch__event_handler_batch_run_return(batch):
+ batch.event = MagicMock(
+ unpack=MagicMock(return_value=("salt/job/1235/ret/foo", {"id": "foo"}))
+ )
+ batch.start()
+ batch.active = {"foo"}
+ batch._BatchAsync__event_handler(MagicMock())
+ assert batch.active == set()
+ assert batch.done_minions == {"foo"}
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.schedule_next,)
+
+
+def test_batch__event_handler_find_job_return(batch):
+ batch.event = MagicMock(
+ unpack=MagicMock(
+ return_value=(
+ "salt/job/1236/ret/foo",
+ {"id": "foo", "return": "deadbeaf"},
)
)
- self.batch.start()
- self.batch.patterns.add(("salt/job/1236/ret/*", "find_job_return"))
- self.batch._BatchAsync__event_handler(MagicMock())
- self.assertEqual(self.batch.find_job_returned, {"foo"})
-
- @salt.ext.tornado.testing.gen_test
- def test_batch_run_next_end_batch_when_no_next(self):
- self.batch.end_batch = MagicMock()
- self.batch._get_next = MagicMock(return_value={})
- self.batch.run_next()
- self.assertEqual(len(self.batch.end_batch.mock_calls), 1)
-
- @salt.ext.tornado.testing.gen_test
- def test_batch_find_job(self):
- self.batch.event = MagicMock()
- future = salt.ext.tornado.gen.Future()
- future.set_result({})
- self.batch.local.run_job_async.return_value = future
- self.batch.minions = {"foo", "bar"}
- self.batch.jid_gen = MagicMock(return_value="1234")
- salt.ext.tornado.gen.sleep = MagicMock(return_value=future)
- self.batch.find_job({"foo", "bar"})
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.check_find_job, {"foo", "bar"}, "1234"),
+ )
+ batch.start()
+ batch.patterns.add(("salt/job/1236/ret/*", "find_job_return"))
+ batch._BatchAsync__event_handler(MagicMock())
+ assert batch.find_job_returned == {"foo"}
+
+
+def test_batch_run_next_end_batch_when_no_next(batch):
+ batch.end_batch = MagicMock()
+ batch._get_next = MagicMock(return_value={})
+ batch.run_next()
+ assert len(batch.end_batch.mock_calls) == 1
+
+
+def test_batch_find_job(batch):
+ batch.event = MagicMock()
+ future = salt.ext.tornado.gen.Future()
+ future.set_result({})
+ batch.local.run_job_async.return_value = future
+ batch.minions = {"foo", "bar"}
+ batch.jid_gen = MagicMock(return_value="1234")
+ with patch("salt.ext.tornado.gen.sleep", return_value=future):
+ batch.find_job({"foo", "bar"})
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (
+ batch.check_find_job,
+ {"foo", "bar"},
+ "1234",
)
- @salt.ext.tornado.testing.gen_test
- def test_batch_find_job_with_done_minions(self):
- self.batch.done_minions = {"bar"}
- self.batch.event = MagicMock()
- future = salt.ext.tornado.gen.Future()
- future.set_result({})
- self.batch.local.run_job_async.return_value = future
- self.batch.minions = {"foo", "bar"}
- self.batch.jid_gen = MagicMock(return_value="1234")
- salt.ext.tornado.gen.sleep = MagicMock(return_value=future)
- self.batch.find_job({"foo", "bar"})
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.check_find_job, {"foo"}, "1234"),
- )
- def test_batch_check_find_job_did_not_return(self):
- self.batch.event = MagicMock()
- self.batch.active = {"foo"}
- self.batch.find_job_returned = set()
- self.batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
- self.batch.check_find_job({"foo"}, jid="1234")
- self.assertEqual(self.batch.find_job_returned, set())
- self.assertEqual(self.batch.active, set())
- self.assertEqual(len(self.batch.event.io_loop.add_callback.mock_calls), 0)
-
- def test_batch_check_find_job_did_return(self):
- self.batch.event = MagicMock()
- self.batch.find_job_returned = {"foo"}
- self.batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
- self.batch.check_find_job({"foo"}, jid="1234")
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.find_job, {"foo"}),
+def test_batch_find_job_with_done_minions(batch):
+ batch.done_minions = {"bar"}
+ batch.event = MagicMock()
+ future = salt.ext.tornado.gen.Future()
+ future.set_result({})
+ batch.local.run_job_async.return_value = future
+ batch.minions = {"foo", "bar"}
+ batch.jid_gen = MagicMock(return_value="1234")
+ with patch("salt.ext.tornado.gen.sleep", return_value=future):
+ batch.find_job({"foo", "bar"})
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (
+ batch.check_find_job,
+ {"foo"},
+ "1234",
)
- def test_batch_check_find_job_multiple_states(self):
- self.batch.event = MagicMock()
- # currently running minions
- self.batch.active = {"foo", "bar"}
- # minion is running and find_job returns
- self.batch.find_job_returned = {"foo"}
+def test_batch_check_find_job_did_not_return(batch):
+ batch.event = MagicMock()
+ batch.active = {"foo"}
+ batch.find_job_returned = set()
+ batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
+ batch.check_find_job({"foo"}, jid="1234")
+ assert batch.find_job_returned == set()
+ assert batch.active == set()
+ assert len(batch.event.io_loop.add_callback.mock_calls) == 0
- # minion started running but find_job did not return
- self.batch.timedout_minions = {"faz"}
- # minion finished
- self.batch.done_minions = {"baz"}
+def test_batch_check_find_job_did_return(batch):
+ batch.event = MagicMock()
+ batch.find_job_returned = {"foo"}
+ batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
+ batch.check_find_job({"foo"}, jid="1234")
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.find_job, {"foo"})
- # both not yet done but only 'foo' responded to find_job
- not_done = {"foo", "bar"}
- self.batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
- self.batch.check_find_job(not_done, jid="1234")
+def test_batch_check_find_job_multiple_states(batch):
+ batch.event = MagicMock()
+ # currently running minions
+ batch.active = {"foo", "bar"}
- # assert 'bar' removed from active
- self.assertEqual(self.batch.active, {"foo"})
+ # minion is running and find_job returns
+ batch.find_job_returned = {"foo"}
- # assert 'bar' added to timedout_minions
- self.assertEqual(self.batch.timedout_minions, {"bar", "faz"})
+ # minion started running but find_job did not return
+ batch.timedout_minions = {"faz"}
+
+ # minion finished
+ batch.done_minions = {"baz"}
+
+ # both not yet done but only 'foo' responded to find_job
+ not_done = {"foo", "bar"}
+
+ batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
+ batch.check_find_job(not_done, jid="1234")
+
+ # assert 'bar' removed from active
+ assert batch.active == {"foo"}
+
+ # assert 'bar' added to timedout_minions
+ assert batch.timedout_minions == {"bar", "faz"}
+
+ # assert 'find_job' schedueled again only for 'foo'
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.find_job, {"foo"})
- # assert 'find_job' schedueled again only for 'foo'
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.find_job, {"foo"}),
- )
- def test_only_on_run_next_is_scheduled(self):
- self.batch.event = MagicMock()
- self.batch.scheduled = True
- self.batch.schedule_next()
- self.assertEqual(len(self.batch.event.io_loop.spawn_callback.mock_calls), 0)
+def test_only_on_run_next_is_scheduled(batch):
+ batch.event = MagicMock()
+ batch.scheduled = True
+ batch.schedule_next()
+ assert len(batch.event.io_loop.spawn_callback.mock_calls) == 0
diff --git a/tests/unit/cli/test_support.py b/tests/unit/cli/test_support.py
index dc0e99bb3d..971a0f122b 100644
--- a/tests/unit/cli/test_support.py
+++ b/tests/unit/cli/test_support.py
@@ -14,7 +14,7 @@ from salt.cli.support.collector import SaltSupport, SupportDataCollector
from salt.cli.support.console import IndentOutput
from salt.utils.color import get_colors
from salt.utils.stringutils import to_bytes
-from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
+from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase, skipIf
try:
@@ -24,7 +24,6 @@ except ImportError:
@skipIf(not bool(pytest), "Pytest needs to be installed")
-@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltSupportIndentOutputTestCase(TestCase):
"""
Unit Tests for the salt-support indent output.
@@ -100,7 +99,6 @@ class SaltSupportIndentOutputTestCase(TestCase):
@skipIf(not bool(pytest), "Pytest needs to be installed")
-@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltSupportCollectorTestCase(TestCase):
"""
Collector tests.
@@ -232,7 +230,6 @@ class SaltSupportCollectorTestCase(TestCase):
@skipIf(not bool(pytest), "Pytest needs to be installed")
-@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltSupportRunnerTestCase(TestCase):
"""
Test runner class.
@@ -468,7 +465,6 @@ class SaltSupportRunnerTestCase(TestCase):
@skipIf(not bool(pytest), "Pytest needs to be installed")
-@skipIf(NO_MOCK, NO_MOCK_REASON)
class ProfileIntegrityTestCase(TestCase):
"""
Default profile integrity
diff --git a/tests/unit/modules/test_saltsupport.py b/tests/unit/modules/test_saltsupport.py
index 1715c68f4c..2afdd69b3e 100644
--- a/tests/unit/modules/test_saltsupport.py
+++ b/tests/unit/modules/test_saltsupport.py
@@ -8,7 +8,7 @@ import datetime
import salt.exceptions
from salt.modules import saltsupport
from tests.support.mixins import LoaderModuleMockMixin
-from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
+from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase, skipIf
try:
@@ -18,7 +18,6 @@ except ImportError:
@skipIf(not bool(pytest), "Pytest required")
-@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltSupportModuleTestCase(TestCase, LoaderModuleMockMixin):
"""
Test cases for salt.modules.support::SaltSupportModule
@@ -361,7 +360,6 @@ professor: Farnsworth
@skipIf(not bool(pytest), "Pytest required")
-@skipIf(NO_MOCK, NO_MOCK_REASON)
class LogCollectorTestCase(TestCase, LoaderModuleMockMixin):
"""
Test cases for salt.modules.support::LogCollector
--
2.41.0

View File

@ -0,0 +1,23 @@
From b80c0d515e8715c160f94124dff8b5b90e773cd0 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Tue, 9 Nov 2021 16:19:56 +0300
Subject: [PATCH] Fix the regression for yumnotify plugin (#456)
---
scripts/suse/yum/plugins/yumnotify.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/scripts/suse/yum/plugins/yumnotify.py b/scripts/suse/yum/plugins/yumnotify.py
index 0d117e8946..cec5256d20 100644
--- a/scripts/suse/yum/plugins/yumnotify.py
+++ b/scripts/suse/yum/plugins/yumnotify.py
@@ -63,4 +63,4 @@ def posttrans_hook(conduit):
)
)
except OSError as e:
- print("Unable to save the cookie file: %s" % (e), file=sys.stderr)
+ sys.stderr.write("Unable to save the cookie file: %s\n" % (e))
--
2.39.2

View File

@ -0,0 +1,154 @@
From 502354be32fcff9b0607f6e435ca8825a4c2cd56 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Thu, 3 Aug 2023 11:07:03 +0200
Subject: [PATCH] Fix the regression of user.present state when group is
unset (#589)
* Fix user.present state when group is unset
* Fix user unit test
---------
Co-authored-by: Megan Wilhite <mwilhite@vmware.com>
---
changelog/64211.fixed.md | 1 +
salt/states/user.py | 2 +-
tests/pytests/functional/states/test_user.py | 74 +++++++++++++++++++-
tests/pytests/unit/states/test_user.py | 2 +
4 files changed, 76 insertions(+), 3 deletions(-)
create mode 100644 changelog/64211.fixed.md
diff --git a/changelog/64211.fixed.md b/changelog/64211.fixed.md
new file mode 100644
index 0000000000..26b39acf02
--- /dev/null
+++ b/changelog/64211.fixed.md
@@ -0,0 +1 @@
+Fix user.present state when groups is unset to ensure the groups are unchanged, as documented.
diff --git a/salt/states/user.py b/salt/states/user.py
index ed2d5a05f4..929afb2cd1 100644
--- a/salt/states/user.py
+++ b/salt/states/user.py
@@ -100,7 +100,7 @@ def _changes(
change = {}
wanted_groups = sorted(set((groups or []) + (optional_groups or [])))
- if not remove_groups:
+ if not remove_groups or groups is None and not optional_groups:
wanted_groups = sorted(set(wanted_groups + lusr["groups"]))
if uid and lusr["uid"] != uid:
change["uid"] = uid
diff --git a/tests/pytests/functional/states/test_user.py b/tests/pytests/functional/states/test_user.py
index 09d34da168..96b1ec55c8 100644
--- a/tests/pytests/functional/states/test_user.py
+++ b/tests/pytests/functional/states/test_user.py
@@ -117,7 +117,6 @@ def test_user_present_when_home_dir_does_not_18843(states, existing_account):
ret = states.user.present(
name=existing_account.username,
home=existing_account.info.home,
- remove_groups=False,
)
assert ret.result is True
assert pathlib.Path(existing_account.info.home).is_dir()
@@ -228,7 +227,6 @@ def test_user_present_unicode(states, username, subtests):
roomnumber="①②③",
workphone="١٢٣٤",
homephone="६७८",
- remove_groups=False,
)
assert ret.result is True
@@ -429,3 +427,75 @@ def test_user_present_change_optional_groups(
user_info = modules.user.info(username)
assert user_info
assert user_info["groups"] == [group_1.name]
+
+
+@pytest.mark.skip_unless_on_linux(reason="underlying functionality only runs on Linux")
+def test_user_present_no_groups(modules, states, username):
+ """
+ test user.present when groups arg is not
+ included by the group is created in another
+ state. Re-run the states to ensure there are
+ not changes and it is idempotent.
+ """
+ groups = ["testgroup1", "testgroup2"]
+ try:
+ ret = states.group.present(name=username, gid=61121)
+ assert ret.result is True
+
+ ret = states.user.present(
+ name=username,
+ uid=61121,
+ gid=61121,
+ )
+ assert ret.result is True
+ assert ret.changes["groups"] == [username]
+ assert ret.changes["name"] == username
+
+ ret = states.group.present(
+ name=groups[0],
+ members=[username],
+ )
+ assert ret.changes["members"] == [username]
+
+ ret = states.group.present(
+ name=groups[1],
+ members=[username],
+ )
+ assert ret.changes["members"] == [username]
+
+ user_info = modules.user.info(username)
+ assert user_info
+ assert user_info["groups"] == [username, groups[0], groups[1]]
+
+ # run again, expecting no changes
+ ret = states.group.present(name=username)
+ assert ret.result is True
+ assert ret.changes == {}
+
+ ret = states.user.present(
+ name=username,
+ )
+ assert ret.result is True
+ assert ret.changes == {}
+
+ ret = states.group.present(
+ name=groups[0],
+ members=[username],
+ )
+ assert ret.result is True
+ assert ret.changes == {}
+
+ ret = states.group.present(
+ name=groups[1],
+ members=[username],
+ )
+ assert ret.result is True
+ assert ret.changes == {}
+
+ user_info = modules.user.info(username)
+ assert user_info
+ assert user_info["groups"] == [username, groups[0], groups[1]]
+ finally:
+ for group in groups:
+ ret = states.group.absent(name=group)
+ assert ret.result is True
diff --git a/tests/pytests/unit/states/test_user.py b/tests/pytests/unit/states/test_user.py
index 94e69d70ed..d50d16e3be 100644
--- a/tests/pytests/unit/states/test_user.py
+++ b/tests/pytests/unit/states/test_user.py
@@ -189,6 +189,8 @@ def test_present_uid_gid_change():
"user.chgid": Mock(),
"file.group_to_gid": mock_group_to_gid,
"file.gid_to_group": mock_gid_to_group,
+ "group.info": MagicMock(return_value=after),
+ "user.chgroups": MagicMock(return_value=True),
}
with patch.dict(user.__grains__, {"kernel": "Linux"}), patch.dict(
user.__salt__, dunder_salt
--
2.41.0

View File

@ -0,0 +1,26 @@
From c37992e305978e95da1ac0a40a8142f578271320 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Mon, 8 Nov 2021 17:43:02 +0300
Subject: [PATCH] Fix traceback.print_exc calls for test_pip_state (#432)
---
tests/unit/states/test_pip_state.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/unit/states/test_pip_state.py b/tests/unit/states/test_pip_state.py
index 5e4b6e0af1..981ad46a13 100644
--- a/tests/unit/states/test_pip_state.py
+++ b/tests/unit/states/test_pip_state.py
@@ -442,7 +442,7 @@ class PipStateInstallationErrorTest(TestCase):
sys.stdout.flush()
sys.exit(2)
except Exception as exc:
- traceback.print_exc(exc, file=sys.stdout)
+ traceback.print_exc(file=sys.stdout)
sys.stdout.flush()
sys.exit(3)
sys.exit(0)
--
2.39.2

View File

@ -0,0 +1,181 @@
From 027cbef223616f5ab6c73e60bcaa9f9e81a6ce67 Mon Sep 17 00:00:00 2001
From: Daniel Mach <daniel.mach@suse.com>
Date: Wed, 28 Jun 2023 16:39:42 +0200
Subject: [PATCH] Fix utf8 handling in 'pass' renderer and make it more
robust (#579)
* Migrate string formatting in 'pass' renderer to a f-string
* Fix utf8 handling in 'pass' renderer and make it more robust
---
changelog/64300.fixed.md | 1 +
salt/renderers/pass.py | 12 +--
tests/pytests/unit/renderers/test_pass.py | 99 +++++++++++++++++++++++
3 files changed, 103 insertions(+), 9 deletions(-)
create mode 100644 changelog/64300.fixed.md
diff --git a/changelog/64300.fixed.md b/changelog/64300.fixed.md
new file mode 100644
index 0000000000..4418db1d04
--- /dev/null
+++ b/changelog/64300.fixed.md
@@ -0,0 +1 @@
+Fix utf8 handling in 'pass' renderer
diff --git a/salt/renderers/pass.py b/salt/renderers/pass.py
index ba0f152c23..ae75bba443 100644
--- a/salt/renderers/pass.py
+++ b/salt/renderers/pass.py
@@ -145,23 +145,17 @@ def _fetch_secret(pass_path):
env["GNUPGHOME"] = pass_gnupghome
try:
- proc = Popen(cmd, stdout=PIPE, stderr=PIPE, env=env)
+ proc = Popen(cmd, stdout=PIPE, stderr=PIPE, env=env, encoding="utf-8")
pass_data, pass_error = proc.communicate()
pass_returncode = proc.returncode
- except OSError as e:
+ except (OSError, UnicodeDecodeError) as e:
pass_data, pass_error = "", str(e)
pass_returncode = 1
# The version of pass used during development sent output to
# stdout instead of stderr even though its returncode was non zero.
if pass_returncode or not pass_data:
- try:
- pass_error = pass_error.decode("utf-8")
- except (AttributeError, ValueError):
- pass
- msg = "Could not fetch secret '{}' from the password store: {}".format(
- pass_path, pass_error
- )
+ msg = f"Could not fetch secret '{pass_path}' from the password store: {pass_error}"
if pass_strict_fetch:
raise SaltRenderError(msg)
else:
diff --git a/tests/pytests/unit/renderers/test_pass.py b/tests/pytests/unit/renderers/test_pass.py
index 1e2ebb7ea8..f7c79e1fe1 100644
--- a/tests/pytests/unit/renderers/test_pass.py
+++ b/tests/pytests/unit/renderers/test_pass.py
@@ -1,8 +1,12 @@
import importlib
+import os
+import shutil
+import tempfile
import pytest
import salt.exceptions
+import salt.utils.files
from tests.support.mock import MagicMock, patch
# "pass" is a reserved keyword, we need to import it differently
@@ -19,6 +23,47 @@ def configure_loader_modules(master_opts):
}
+@pytest.fixture()
+def pass_executable(request):
+ tmp_dir = tempfile.mkdtemp(prefix="salt_pass_")
+ pass_path = os.path.join(tmp_dir, "pass")
+ with salt.utils.files.fopen(pass_path, "w") as f:
+ f.write("#!/bin/sh\n")
+ # return path path wrapped into unicode characters
+ # pass args ($1, $2) are ("show", <pass_path>)
+ f.write('echo "α>>> $2 <<<β"\n')
+ os.chmod(pass_path, 0o755)
+ yield pass_path
+ shutil.rmtree(tmp_dir)
+
+
+@pytest.fixture()
+def pass_executable_error(request):
+ tmp_dir = tempfile.mkdtemp(prefix="salt_pass_")
+ pass_path = os.path.join(tmp_dir, "pass")
+ with salt.utils.files.fopen(pass_path, "w") as f:
+ f.write("#!/bin/sh\n")
+ # return error message with unicode characters
+ f.write('echo "ERROR: αβγ" >&2\n')
+ f.write("exit 1\n")
+ os.chmod(pass_path, 0o755)
+ yield pass_path
+ shutil.rmtree(tmp_dir)
+
+
+@pytest.fixture()
+def pass_executable_invalid_utf8(request):
+ tmp_dir = tempfile.mkdtemp(prefix="salt_pass_")
+ pass_path = os.path.join(tmp_dir, "pass")
+ with salt.utils.files.fopen(pass_path, "wb") as f:
+ f.write(b"#!/bin/sh\n")
+ # return invalid utf-8 sequence
+ f.write(b'echo "\x80\x81"\n')
+ os.chmod(pass_path, 0o755)
+ yield pass_path
+ shutil.rmtree(tmp_dir)
+
+
# The default behavior is that if fetching a secret from pass fails,
# the value is passed through. Even the trailing newlines are preserved.
def test_passthrough():
@@ -161,3 +206,57 @@ def test_env():
call_args, call_kwargs = popen_mock.call_args_list[0]
assert call_kwargs["env"]["GNUPGHOME"] == config["pass_gnupghome"]
assert call_kwargs["env"]["PASSWORD_STORE_DIR"] == config["pass_dir"]
+
+
+@pytest.mark.skip_on_windows(reason="Not supported on Windows")
+def test_utf8(pass_executable):
+ config = {
+ "pass_variable_prefix": "pass:",
+ "pass_strict_fetch": True,
+ }
+ mocks = {
+ "_get_pass_exec": MagicMock(return_value=pass_executable),
+ }
+
+ pass_path = "pass:secret"
+ with patch.dict(pass_.__opts__, config), patch.dict(pass_.__dict__, mocks):
+ result = pass_.render(pass_path)
+ assert result == "α>>> secret <<<β"
+
+
+@pytest.mark.skip_on_windows(reason="Not supported on Windows")
+def test_utf8_error(pass_executable_error):
+ config = {
+ "pass_variable_prefix": "pass:",
+ "pass_strict_fetch": True,
+ }
+ mocks = {
+ "_get_pass_exec": MagicMock(return_value=pass_executable_error),
+ }
+
+ pass_path = "pass:secret"
+ with patch.dict(pass_.__opts__, config), patch.dict(pass_.__dict__, mocks):
+ with pytest.raises(
+ salt.exceptions.SaltRenderError,
+ match=r"Could not fetch secret 'secret' from the password store: ERROR: αβγ",
+ ):
+ result = pass_.render(pass_path)
+
+
+@pytest.mark.skip_on_windows(reason="Not supported on Windows")
+def test_invalid_utf8(pass_executable_invalid_utf8):
+ config = {
+ "pass_variable_prefix": "pass:",
+ "pass_strict_fetch": True,
+ }
+ mocks = {
+ "_get_pass_exec": MagicMock(return_value=pass_executable_invalid_utf8),
+ }
+
+ pass_path = "pass:secret"
+ with patch.dict(pass_.__opts__, config), patch.dict(pass_.__dict__, mocks):
+ with pytest.raises(
+ salt.exceptions.SaltRenderError,
+ match=r"Could not fetch secret 'secret' from the password store: 'utf-8' codec can't decode byte 0x80 in position 0: invalid start byte",
+ ):
+ result = pass_.render(pass_path)
--
2.41.0

View File

@ -0,0 +1,58 @@
From c0fae09e5a4f6997a60007d970c7c6a5614d9102 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 19 Apr 2023 10:41:28 +0100
Subject: [PATCH] Fix version detection and avoid building and testing
failures
---
salt/version.py | 20 ++------------------
1 file changed, 2 insertions(+), 18 deletions(-)
diff --git a/salt/version.py b/salt/version.py
index 43cb5f86f7..67719bd020 100644
--- a/salt/version.py
+++ b/salt/version.py
@@ -1,7 +1,6 @@
"""
Set up the version of Salt
"""
-import argparse
import operator
import os
import platform
@@ -78,7 +77,7 @@ class SaltVersionsInfo(type):
ALUMINIUM = SaltVersion("Aluminium" , info=3003, released=True)
SILICON = SaltVersion("Silicon" , info=3004, released=True)
PHOSPHORUS = SaltVersion("Phosphorus" , info=3005, released=True)
- SULFUR = SaltVersion("Sulfur" , info=(3006, 0), released=True)
+ SULFUR = SaltVersion("Sulfur" , info=(3006, 0))
CHLORINE = SaltVersion("Chlorine" , info=(3007, 0))
ARGON = SaltVersion("Argon" , info=(3008, 0))
POTASSIUM = SaltVersion("Potassium" , info=(3009, 0))
@@ -922,20 +921,5 @@ def versions_report(include_salt_cloud=False, include_extensions=True):
yield from info
-def _parser():
- parser = argparse.ArgumentParser()
- parser.add_argument(
- "--next-release", help="Return the next release", action="store_true"
- )
- # When pip installing we pass in other args to this script.
- # This allows us to catch those args but not use them
- parser.add_argument("unknown", nargs=argparse.REMAINDER)
- return parser.parse_args()
-
-
if __name__ == "__main__":
- args = _parser()
- if args.next_release:
- print(__saltstack_version__.next_release())
- else:
- print(__version__)
+ print(__version__)
--
2.39.2

View File

@ -0,0 +1,833 @@
From 7051f86bb48dbd618a7422d469f3aae4c6f18008 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 31 Aug 2023 10:41:53 +0100
Subject: [PATCH] Fixed gitfs cachedir_basename to avoid hash collisions
(#599)
(bsc#1193948, bsc#1214797, CVE-2023-20898)
Fix gitfs tests
It's `gitfs` not `gtfs`, plus some code fixes and cleanup
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
fix doc
wrap sha in base64
clean up cache name
stop branch collision
run pre
Co-authored-by: cmcmarrow <charles.mcmarrow.4@gmail.com>
---
changelog/cve-2023-20898.security.md | 1 +
salt/utils/gitfs.py | 83 ++++++-
tests/pytests/unit/utils/test_gitfs.py | 255 +++++++++++++++++++++
tests/unit/utils/test_gitfs.py | 305 ++++++-------------------
4 files changed, 403 insertions(+), 241 deletions(-)
create mode 100644 changelog/cve-2023-20898.security.md
create mode 100644 tests/pytests/unit/utils/test_gitfs.py
diff --git a/changelog/cve-2023-20898.security.md b/changelog/cve-2023-20898.security.md
new file mode 100644
index 0000000000..44f1729192
--- /dev/null
+++ b/changelog/cve-2023-20898.security.md
@@ -0,0 +1 @@
+Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions.
diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py
index 38e84f38aa..af61aa0dda 100644
--- a/salt/utils/gitfs.py
+++ b/salt/utils/gitfs.py
@@ -3,6 +3,7 @@ Classes which provide the shared base for GitFS, git_pillar, and winrepo
"""
+import base64
import contextlib
import copy
import errno
@@ -11,10 +12,12 @@ import glob
import hashlib
import io
import logging
+import multiprocessing
import os
import shlex
import shutil
import stat
+import string
import subprocess
import time
import weakref
@@ -22,6 +25,7 @@ from datetime import datetime
import salt.ext.tornado.ioloop
import salt.fileserver
+import salt.syspaths
import salt.utils.configparser
import salt.utils.data
import salt.utils.files
@@ -34,7 +38,6 @@ import salt.utils.stringutils
import salt.utils.url
import salt.utils.user
import salt.utils.versions
-import salt.syspaths
from salt.config import DEFAULT_MASTER_OPTS as _DEFAULT_MASTER_OPTS
from salt.exceptions import FileserverConfigError, GitLockError, get_error_message
from salt.utils.event import tagify
@@ -226,6 +229,10 @@ class GitProvider:
invoking the parent class' __init__.
"""
+ # master lock should only be locked for very short periods of times "seconds"
+ # the master lock should be used when ever git provider reads or writes to one if it locks
+ _master_lock = multiprocessing.Lock()
+
def __init__(
self,
opts,
@@ -452,13 +459,44 @@ class GitProvider:
failhard(self.role)
hash_type = getattr(hashlib, self.opts.get("hash_type", "md5"))
+ # Generate full id.
+ # Full id helps decrease the chances of collections in the gitfs cache.
+ try:
+ target = str(self.get_checkout_target())
+ except AttributeError:
+ target = ""
+ self._full_id = "-".join(
+ [
+ getattr(self, "name", ""),
+ self.id,
+ getattr(self, "env", ""),
+ getattr(self, "_root", ""),
+ self.role,
+ getattr(self, "base", ""),
+ getattr(self, "branch", ""),
+ target,
+ ]
+ )
# We loaded this data from yaml configuration files, so, its safe
# to use UTF-8
- self.hash = hash_type(self.id.encode("utf-8")).hexdigest()
- self.cachedir_basename = getattr(self, "name", self.hash)
+ base64_hash = str(
+ base64.b64encode(hash_type(self._full_id.encode("utf-8")).digest()),
+ encoding="ascii", # base64 only outputs ascii
+ ).replace(
+ "/", "_"
+ ) # replace "/" with "_" to not cause trouble with file system
+
+ # limit name length to 19, so we don't eat up all the path length for windows
+ # this is due to pygit2 limitations
+ # replace any unknown char with "_" to not cause trouble with file system
+ name_chars = string.ascii_letters + string.digits + "-"
+ cache_name = "".join(
+ c if c in name_chars else "_" for c in getattr(self, "name", "")[:19]
+ )
+
+ self.cachedir_basename = f"{cache_name}-{base64_hash}"
self.cachedir = salt.utils.path.join(cache_root, self.cachedir_basename)
self.linkdir = salt.utils.path.join(cache_root, "links", self.cachedir_basename)
-
if not os.path.isdir(self.cachedir):
os.makedirs(self.cachedir)
@@ -473,6 +511,12 @@ class GitProvider:
log.critical(msg, exc_info=True)
failhard(self.role)
+ def full_id(self):
+ return self._full_id
+
+ def get_cachedir_basename(self):
+ return self.cachedir_basename
+
def _get_envs_from_ref_paths(self, refs):
"""
Return the names of remote refs (stripped of the remote name) and tags
@@ -663,6 +707,19 @@ class GitProvider:
"""
Clear update.lk
"""
+ if self.__class__._master_lock.acquire(timeout=60) is False:
+ # if gitfs works right we should never see this timeout error.
+ log.error("gitfs master lock timeout!")
+ raise TimeoutError("gitfs master lock timeout!")
+ try:
+ return self._clear_lock(lock_type)
+ finally:
+ self.__class__._master_lock.release()
+
+ def _clear_lock(self, lock_type="update"):
+ """
+ Clear update.lk without MultiProcessing locks
+ """
lock_file = self._get_lock_file(lock_type=lock_type)
def _add_error(errlist, exc):
@@ -838,6 +895,20 @@ class GitProvider:
"""
Place a lock file if (and only if) it does not already exist.
"""
+ if self.__class__._master_lock.acquire(timeout=60) is False:
+ # if gitfs works right we should never see this timeout error.
+ log.error("gitfs master lock timeout!")
+ raise TimeoutError("gitfs master lock timeout!")
+ try:
+ return self.__lock(lock_type, failhard)
+ finally:
+ self.__class__._master_lock.release()
+
+ def __lock(self, lock_type="update", failhard=False):
+ """
+ Place a lock file if (and only if) it does not already exist.
+ Without MultiProcessing locks.
+ """
try:
fh_ = os.open(
self._get_lock_file(lock_type), os.O_CREAT | os.O_EXCL | os.O_WRONLY
@@ -904,9 +975,9 @@ class GitProvider:
lock_type,
lock_file,
)
- success, fail = self.clear_lock()
+ success, fail = self._clear_lock()
if success:
- return self._lock(lock_type="update", failhard=failhard)
+ return self.__lock(lock_type="update", failhard=failhard)
elif failhard:
raise
return
diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py
new file mode 100644
index 0000000000..e9915de412
--- /dev/null
+++ b/tests/pytests/unit/utils/test_gitfs.py
@@ -0,0 +1,255 @@
+import os
+import string
+import time
+
+import pytest
+
+import salt.fileserver.gitfs
+import salt.utils.gitfs
+from salt.exceptions import FileserverConfigError
+from tests.support.helpers import patched_environ
+from tests.support.mock import MagicMock, patch
+
+try:
+ HAS_PYGIT2 = (
+ salt.utils.gitfs.PYGIT2_VERSION
+ and salt.utils.gitfs.PYGIT2_VERSION >= salt.utils.gitfs.PYGIT2_MINVER
+ and salt.utils.gitfs.LIBGIT2_VERSION
+ and salt.utils.gitfs.LIBGIT2_VERSION >= salt.utils.gitfs.LIBGIT2_MINVER
+ )
+except AttributeError:
+ HAS_PYGIT2 = False
+
+
+if HAS_PYGIT2:
+ import pygit2
+
+
+@pytest.mark.parametrize(
+ "role_name,role_class",
+ (
+ ("gitfs", salt.utils.gitfs.GitFS),
+ ("git_pillar", salt.utils.gitfs.GitPillar),
+ ("winrepo", salt.utils.gitfs.WinRepo),
+ ),
+)
+def test_provider_case_insensitive_gitfs_provider(minion_opts, role_name, role_class):
+ """
+ Ensure that both lowercase and non-lowercase values are supported
+ """
+ provider = "GitPython"
+ key = "{}_provider".format(role_name)
+ with patch.object(role_class, "verify_gitpython", MagicMock(return_value=True)):
+ with patch.object(role_class, "verify_pygit2", MagicMock(return_value=False)):
+ args = [minion_opts, {}]
+ kwargs = {"init_remotes": False}
+ if role_name == "winrepo":
+ kwargs["cache_root"] = "/tmp/winrepo-dir"
+ with patch.dict(minion_opts, {key: provider}):
+ # Try to create an instance with uppercase letters in
+ # provider name. If it fails then a
+ # FileserverConfigError will be raised, so no assert is
+ # necessary.
+ role_class(*args, **kwargs)
+ # Now try to instantiate an instance with all lowercase
+ # letters. Again, no need for an assert here.
+ role_class(*args, **kwargs)
+
+
+@pytest.mark.parametrize(
+ "role_name,role_class",
+ (
+ ("gitfs", salt.utils.gitfs.GitFS),
+ ("git_pillar", salt.utils.gitfs.GitPillar),
+ ("winrepo", salt.utils.gitfs.WinRepo),
+ ),
+)
+def test_valid_provider_gitfs_provider(minion_opts, role_name, role_class):
+ """
+ Ensure that an invalid provider is not accepted, raising a
+ FileserverConfigError.
+ """
+
+ def _get_mock(verify, provider):
+ """
+ Return a MagicMock with the desired return value
+ """
+ return MagicMock(return_value=verify.endswith(provider))
+
+ key = "{}_provider".format(role_name)
+ for provider in salt.utils.gitfs.GIT_PROVIDERS:
+ verify = "verify_gitpython"
+ mock1 = _get_mock(verify, provider)
+ with patch.object(role_class, verify, mock1):
+ verify = "verify_pygit2"
+ mock2 = _get_mock(verify, provider)
+ with patch.object(role_class, verify, mock2):
+ args = [minion_opts, {}]
+ kwargs = {"init_remotes": False}
+ if role_name == "winrepo":
+ kwargs["cache_root"] = "/tmp/winrepo-dir"
+ with patch.dict(minion_opts, {key: provider}):
+ role_class(*args, **kwargs)
+ with patch.dict(minion_opts, {key: "foo"}):
+ # Set the provider name to a known invalid provider
+ # and make sure it raises an exception.
+ with pytest.raises(FileserverConfigError):
+ role_class(*args, **kwargs)
+
+
+@pytest.fixture
+def _prepare_remote_repository_pygit2(tmp_path):
+ remote = os.path.join(tmp_path, "pygit2-repo")
+ filecontent = "This is an empty README file"
+ filename = "README"
+ signature = pygit2.Signature(
+ "Dummy Commiter", "dummy@dummy.com", int(time.time()), 0
+ )
+ repository = pygit2.init_repository(remote, False)
+ builder = repository.TreeBuilder()
+ tree = builder.write()
+ commit = repository.create_commit(
+ "HEAD", signature, signature, "Create master branch", tree, []
+ )
+ repository.create_reference("refs/tags/simple_tag", commit)
+ with salt.utils.files.fopen(
+ os.path.join(repository.workdir, filename), "w"
+ ) as file:
+ file.write(filecontent)
+ blob = repository.create_blob_fromworkdir(filename)
+ builder = repository.TreeBuilder()
+ builder.insert(filename, blob, pygit2.GIT_FILEMODE_BLOB)
+ tree = builder.write()
+ repository.index.read()
+ repository.index.add(filename)
+ repository.index.write()
+ commit = repository.create_commit(
+ "HEAD",
+ signature,
+ signature,
+ "Added a README",
+ tree,
+ [repository.head.target],
+ )
+ repository.create_tag(
+ "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message"
+ )
+ return remote
+
+
+@pytest.fixture
+def _prepare_provider(tmp_path, minion_opts, _prepare_remote_repository_pygit2):
+ cache = tmp_path / "pygit2-repo-cache"
+ minion_opts.update(
+ {
+ "cachedir": str(cache),
+ "gitfs_disable_saltenv_mapping": False,
+ "gitfs_base": "master",
+ "gitfs_insecure_auth": False,
+ "gitfs_mountpoint": "",
+ "gitfs_passphrase": "",
+ "gitfs_password": "",
+ "gitfs_privkey": "",
+ "gitfs_provider": "pygit2",
+ "gitfs_pubkey": "",
+ "gitfs_ref_types": ["branch", "tag", "sha"],
+ "gitfs_refspecs": [
+ "+refs/heads/*:refs/remotes/origin/*",
+ "+refs/tags/*:refs/tags/*",
+ ],
+ "gitfs_root": "",
+ "gitfs_saltenv_blacklist": [],
+ "gitfs_saltenv_whitelist": [],
+ "gitfs_ssl_verify": True,
+ "gitfs_update_interval": 3,
+ "gitfs_user": "",
+ "verified_gitfs_provider": "pygit2",
+ }
+ )
+ per_remote_defaults = {
+ "base": "master",
+ "disable_saltenv_mapping": False,
+ "insecure_auth": False,
+ "ref_types": ["branch", "tag", "sha"],
+ "passphrase": "",
+ "mountpoint": "",
+ "password": "",
+ "privkey": "",
+ "pubkey": "",
+ "refspecs": [
+ "+refs/heads/*:refs/remotes/origin/*",
+ "+refs/tags/*:refs/tags/*",
+ ],
+ "root": "",
+ "saltenv_blacklist": [],
+ "saltenv_whitelist": [],
+ "ssl_verify": True,
+ "update_interval": 60,
+ "user": "",
+ }
+ per_remote_only = ("all_saltenvs", "name", "saltenv")
+ override_params = tuple(per_remote_defaults)
+ cache_root = cache / "gitfs"
+ role = "gitfs"
+ provider = salt.utils.gitfs.Pygit2(
+ minion_opts,
+ _prepare_remote_repository_pygit2,
+ per_remote_defaults,
+ per_remote_only,
+ override_params,
+ str(cache_root),
+ role,
+ )
+ return provider
+
+
+@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support")
+@pytest.mark.skip_on_windows(
+ reason="Skip Pygit2 on windows, due to pygit2 access error on windows"
+)
+def test_checkout_pygit2(_prepare_provider):
+ provider = _prepare_provider
+ provider.remotecallbacks = None
+ provider.credentials = None
+ provider.init_remote()
+ provider.fetch()
+ provider.branch = "master"
+ assert provider.cachedir in provider.checkout()
+ provider.branch = "simple_tag"
+ assert provider.cachedir in provider.checkout()
+ provider.branch = "annotated_tag"
+ assert provider.cachedir in provider.checkout()
+ provider.branch = "does_not_exist"
+ assert provider.checkout() is None
+
+
+@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support")
+@pytest.mark.skip_on_windows(
+ reason="Skip Pygit2 on windows, due to pygit2 access error on windows"
+)
+def test_checkout_pygit2_with_home_env_unset(_prepare_provider):
+ provider = _prepare_provider
+ provider.remotecallbacks = None
+ provider.credentials = None
+ with patched_environ(__cleanup__=["HOME"]):
+ assert "HOME" not in os.environ
+ provider.init_remote()
+ provider.fetch()
+ assert "HOME" in os.environ
+
+
+def test_full_id_pygit2(_prepare_provider):
+ assert _prepare_provider.full_id().startswith("-")
+ assert _prepare_provider.full_id().endswith("/pygit2-repo---gitfs-master--")
+
+
+@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support")
+@pytest.mark.skip_on_windows(
+ reason="Skip Pygit2 on windows, due to pygit2 access error on windows"
+)
+def test_get_cachedir_basename_pygit2(_prepare_provider):
+ basename = _prepare_provider.get_cachedir_basename()
+ assert len(basename) == 45
+ assert basename[0] == "-"
+ # check that a valid base64 is given '/' -> '_'
+ assert all(c in string.ascii_letters + string.digits + "+_=" for c in basename[1:])
diff --git a/tests/unit/utils/test_gitfs.py b/tests/unit/utils/test_gitfs.py
index 7c400b69af..6d8e97a239 100644
--- a/tests/unit/utils/test_gitfs.py
+++ b/tests/unit/utils/test_gitfs.py
@@ -2,37 +2,20 @@
These only test the provider selection and verification logic, they do not init
any remotes.
"""
-import os
-import shutil
-from time import time
+
+import tempfile
import pytest
+import salt.ext.tornado.ioloop
import salt.fileserver.gitfs
import salt.utils.files
import salt.utils.gitfs
+import salt.utils.path
import salt.utils.platform
-import tests.support.paths
-from salt.exceptions import FileserverConfigError
-from tests.support.helpers import patched_environ
from tests.support.mixins import AdaptedConfigurationTestCaseMixin
-from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase
-try:
- HAS_PYGIT2 = (
- salt.utils.gitfs.PYGIT2_VERSION
- and salt.utils.gitfs.PYGIT2_VERSION >= salt.utils.gitfs.PYGIT2_MINVER
- and salt.utils.gitfs.LIBGIT2_VERSION
- and salt.utils.gitfs.LIBGIT2_VERSION >= salt.utils.gitfs.LIBGIT2_MINVER
- )
-except AttributeError:
- HAS_PYGIT2 = False
-
-
-if HAS_PYGIT2:
- import pygit2
-
def _clear_instance_map():
try:
@@ -45,6 +28,9 @@ def _clear_instance_map():
class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin):
def setUp(self):
+ self._tmp_dir = tempfile.TemporaryDirectory()
+ tmp_name = self._tmp_dir.name
+
class MockedProvider(
salt.utils.gitfs.GitProvider
): # pylint: disable=abstract-method
@@ -71,6 +57,7 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin):
)
def init_remote(self):
+ self.gitdir = salt.utils.path.join(tmp_name, ".git")
self.repo = True
new = False
return new
@@ -107,6 +94,7 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin):
for remote in self.main_class.remotes:
remote.fetched = False
del self.main_class
+ self._tmp_dir.cleanup()
def test_update_all(self):
self.main_class.update()
@@ -126,226 +114,73 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin):
self.assertTrue(self.main_class.remotes[0].fetched)
self.assertFalse(self.main_class.remotes[1].fetched)
-
-class TestGitFSProvider(TestCase):
- def setUp(self):
- self.opts = {"cachedir": "/tmp/gitfs-test-cache"}
-
- def tearDown(self):
- self.opts = None
-
- def test_provider_case_insensitive(self):
- """
- Ensure that both lowercase and non-lowercase values are supported
- """
- provider = "GitPython"
- for role_name, role_class in (
- ("gitfs", salt.utils.gitfs.GitFS),
- ("git_pillar", salt.utils.gitfs.GitPillar),
- ("winrepo", salt.utils.gitfs.WinRepo),
- ):
-
- key = "{}_provider".format(role_name)
- with patch.object(
- role_class, "verify_gitpython", MagicMock(return_value=True)
- ):
- with patch.object(
- role_class, "verify_pygit2", MagicMock(return_value=False)
- ):
- args = [self.opts, {}]
- kwargs = {"init_remotes": False}
- if role_name == "winrepo":
- kwargs["cache_root"] = "/tmp/winrepo-dir"
- with patch.dict(self.opts, {key: provider}):
- # Try to create an instance with uppercase letters in
- # provider name. If it fails then a
- # FileserverConfigError will be raised, so no assert is
- # necessary.
- role_class(*args, **kwargs)
- # Now try to instantiate an instance with all lowercase
- # letters. Again, no need for an assert here.
- role_class(*args, **kwargs)
-
- def test_valid_provider(self):
- """
- Ensure that an invalid provider is not accepted, raising a
- FileserverConfigError.
- """
-
- def _get_mock(verify, provider):
- """
- Return a MagicMock with the desired return value
- """
- return MagicMock(return_value=verify.endswith(provider))
-
- for role_name, role_class in (
- ("gitfs", salt.utils.gitfs.GitFS),
- ("git_pillar", salt.utils.gitfs.GitPillar),
- ("winrepo", salt.utils.gitfs.WinRepo),
- ):
- key = "{}_provider".format(role_name)
- for provider in salt.utils.gitfs.GIT_PROVIDERS:
- verify = "verify_gitpython"
- mock1 = _get_mock(verify, provider)
- with patch.object(role_class, verify, mock1):
- verify = "verify_pygit2"
- mock2 = _get_mock(verify, provider)
- with patch.object(role_class, verify, mock2):
- args = [self.opts, {}]
- kwargs = {"init_remotes": False}
- if role_name == "winrepo":
- kwargs["cache_root"] = "/tmp/winrepo-dir"
-
- with patch.dict(self.opts, {key: provider}):
- role_class(*args, **kwargs)
-
- with patch.dict(self.opts, {key: "foo"}):
- # Set the provider name to a known invalid provider
- # and make sure it raises an exception.
- self.assertRaises(
- FileserverConfigError, role_class, *args, **kwargs
- )
-
-
-@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support")
-@pytest.mark.skip_on_windows(
- reason="Skip Pygit2 on windows, due to pygit2 access error on windows"
-)
-class TestPygit2(TestCase):
- def _prepare_remote_repository(self, path):
- shutil.rmtree(path, ignore_errors=True)
-
- filecontent = "This is an empty README file"
- filename = "README"
-
- signature = pygit2.Signature(
- "Dummy Commiter", "dummy@dummy.com", int(time()), 0
+ def test_full_id(self):
+ self.assertEqual(
+ self.main_class.remotes[0].full_id(), "-file://repo1.git---gitfs-master--"
)
- repository = pygit2.init_repository(path, False)
- builder = repository.TreeBuilder()
- tree = builder.write()
- commit = repository.create_commit(
- "HEAD", signature, signature, "Create master branch", tree, []
+ def test_full_id_with_name(self):
+ self.assertEqual(
+ self.main_class.remotes[1].full_id(),
+ "repo2-file://repo2.git---gitfs-master--",
)
- repository.create_reference("refs/tags/simple_tag", commit)
- with salt.utils.files.fopen(
- os.path.join(repository.workdir, filename), "w"
- ) as file:
- file.write(filecontent)
-
- blob = repository.create_blob_fromworkdir(filename)
- builder = repository.TreeBuilder()
- builder.insert(filename, blob, pygit2.GIT_FILEMODE_BLOB)
- tree = builder.write()
-
- repository.index.read()
- repository.index.add(filename)
- repository.index.write()
-
- commit = repository.create_commit(
- "HEAD",
- signature,
- signature,
- "Added a README",
- tree,
- [repository.head.target],
- )
- repository.create_tag(
- "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message"
+ def test_get_cachedir_basename(self):
+ self.assertEqual(
+ self.main_class.remotes[0].get_cachedir_basename(),
+ "-jXhnbGDemchtZwTwaD2s6VOaVvs98a7w+AtiYlmOVb0=",
)
- def _prepare_cache_repository(self, remote, cache):
- opts = {
- "cachedir": cache,
- "__role": "minion",
- "gitfs_disable_saltenv_mapping": False,
- "gitfs_base": "master",
- "gitfs_insecure_auth": False,
- "gitfs_mountpoint": "",
- "gitfs_passphrase": "",
- "gitfs_password": "",
- "gitfs_privkey": "",
- "gitfs_provider": "pygit2",
- "gitfs_pubkey": "",
- "gitfs_ref_types": ["branch", "tag", "sha"],
- "gitfs_refspecs": [
- "+refs/heads/*:refs/remotes/origin/*",
- "+refs/tags/*:refs/tags/*",
- ],
- "gitfs_root": "",
- "gitfs_saltenv_blacklist": [],
- "gitfs_saltenv_whitelist": [],
- "gitfs_ssl_verify": True,
- "gitfs_update_interval": 3,
- "gitfs_user": "",
- "verified_gitfs_provider": "pygit2",
- }
- per_remote_defaults = {
- "base": "master",
- "disable_saltenv_mapping": False,
- "insecure_auth": False,
- "ref_types": ["branch", "tag", "sha"],
- "passphrase": "",
- "mountpoint": "",
- "password": "",
- "privkey": "",
- "pubkey": "",
- "refspecs": [
- "+refs/heads/*:refs/remotes/origin/*",
- "+refs/tags/*:refs/tags/*",
- ],
- "root": "",
- "saltenv_blacklist": [],
- "saltenv_whitelist": [],
- "ssl_verify": True,
- "update_interval": 60,
- "user": "",
- }
- per_remote_only = ("all_saltenvs", "name", "saltenv")
- override_params = tuple(per_remote_defaults.keys())
- cache_root = os.path.join(cache, "gitfs")
- role = "gitfs"
- shutil.rmtree(cache_root, ignore_errors=True)
- provider = salt.utils.gitfs.Pygit2(
- opts,
- remote,
- per_remote_defaults,
- per_remote_only,
- override_params,
- cache_root,
- role,
+ def test_get_cachedir_base_with_name(self):
+ self.assertEqual(
+ self.main_class.remotes[1].get_cachedir_basename(),
+ "repo2-nuezpiDtjQRFC0ZJDByvi+F6Vb8ZhfoH41n_KFxTGsU=",
)
- return provider
- def test_checkout(self):
- remote = os.path.join(tests.support.paths.TMP, "pygit2-repo")
- cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache")
- self._prepare_remote_repository(remote)
- provider = self._prepare_cache_repository(remote, cache)
- provider.remotecallbacks = None
- provider.credentials = None
- provider.init_remote()
- provider.fetch()
- provider.branch = "master"
- self.assertIn(provider.cachedir, provider.checkout())
- provider.branch = "simple_tag"
- self.assertIn(provider.cachedir, provider.checkout())
- provider.branch = "annotated_tag"
- self.assertIn(provider.cachedir, provider.checkout())
- provider.branch = "does_not_exist"
- self.assertIsNone(provider.checkout())
+ def test_git_provider_mp_lock(self):
+ """
+ Check that lock is released after provider.lock()
+ """
+ provider = self.main_class.remotes[0]
+ provider.lock()
+ # check that lock has been released
+ self.assertTrue(provider._master_lock.acquire(timeout=5))
+ provider._master_lock.release()
- def test_checkout_with_home_env_unset(self):
- remote = os.path.join(tests.support.paths.TMP, "pygit2-repo")
- cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache")
- self._prepare_remote_repository(remote)
- provider = self._prepare_cache_repository(remote, cache)
- provider.remotecallbacks = None
- provider.credentials = None
- with patched_environ(__cleanup__=["HOME"]):
- self.assertTrue("HOME" not in os.environ)
- provider.init_remote()
- provider.fetch()
- self.assertTrue("HOME" in os.environ)
+ def test_git_provider_mp_clear_lock(self):
+ """
+ Check that lock is released after provider.clear_lock()
+ """
+ provider = self.main_class.remotes[0]
+ provider.clear_lock()
+ # check that lock has been released
+ self.assertTrue(provider._master_lock.acquire(timeout=5))
+ provider._master_lock.release()
+
+ @pytest.mark.slow_test
+ def test_git_provider_mp_lock_timeout(self):
+ """
+ Check that lock will time out if master lock is locked.
+ """
+ provider = self.main_class.remotes[0]
+ # Hijack the lock so git provider is fooled into thinking another instance is doing somthing.
+ self.assertTrue(provider._master_lock.acquire(timeout=5))
+ try:
+ # git provider should raise timeout error to avoid lock race conditions
+ self.assertRaises(TimeoutError, provider.lock)
+ finally:
+ provider._master_lock.release()
+
+ @pytest.mark.slow_test
+ def test_git_provider_mp_clear_lock_timeout(self):
+ """
+ Check that clear lock will time out if master lock is locked.
+ """
+ provider = self.main_class.remotes[0]
+ # Hijack the lock so git provider is fooled into thinking another instance is doing somthing.
+ self.assertTrue(provider._master_lock.acquire(timeout=5))
+ try:
+ # git provider should raise timeout error to avoid lock race conditions
+ self.assertRaises(TimeoutError, provider.clear_lock)
+ finally:
+ provider._master_lock.release()
--
2.41.0

View File

@ -0,0 +1,44 @@
From 4996f423f14369fad14a9e6d2d3b8bd750c77fc7 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Tue, 5 Apr 2022 12:04:46 +0300
Subject: [PATCH] Fixes for Python 3.10 (#502)
* Use collections.abc.Mapping instead collections.Mapping in state
---
salt/state.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/salt/state.py b/salt/state.py
index ab84cb8b4d..489424a083 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -12,7 +12,6 @@ The data sent to the state calls is as follows:
"""
-import collections
import copy
import datetime
import fnmatch
@@ -27,6 +26,8 @@ import sys
import time
import traceback
+from collections.abc import Mapping
+
import salt.channel.client
import salt.fileclient
import salt.loader
@@ -3513,7 +3514,7 @@ class State:
"""
for chunk in high:
state = high[chunk]
- if not isinstance(state, collections.Mapping):
+ if not isinstance(state, Mapping):
continue
for state_ref in state:
needs_default = True
--
2.39.2

BIN
html.tar.bz2 (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -0,0 +1,138 @@
From 4f459d670886a8f4a410fdbd1ec595477d45e4e2 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 18 Jan 2022 17:10:37 +0100
Subject: [PATCH] Include aliases in the fqdns grains
Add UT for "is_fqdn"
Add "is_fqdn" check to the network utils
Bugfix: include FQDNs aliases
Deprecate UnitTest assertion in favour of built-in assert keyword
Add UT for fqdns aliases
Leverage cached interfaces, if any.
Implement network.fqdns module function (bsc#1134860) (#172)
* Duplicate fqdns logic in module.network
* Move _get_interfaces to utils.network
* Reuse network.fqdns in grains.core.fqdns
* Return empty list when fqdns grains is disabled
Co-authored-by: Eric Siebigteroth <eric.siebigteroth@suse.de>
---
salt/modules/network.py | 5 +++-
salt/utils/network.py | 16 +++++++++++
tests/pytests/unit/modules/test_network.py | 4 +--
tests/unit/utils/test_network.py | 32 ++++++++++++++++++++++
4 files changed, 54 insertions(+), 3 deletions(-)
diff --git a/salt/modules/network.py b/salt/modules/network.py
index 524b1b74fa..f959dbf97b 100644
--- a/salt/modules/network.py
+++ b/salt/modules/network.py
@@ -2096,7 +2096,10 @@ def fqdns():
# https://sourceware.org/bugzilla/show_bug.cgi?id=19329
time.sleep(random.randint(5, 25) / 1000)
try:
- return [socket.getfqdn(socket.gethostbyaddr(ip)[0])]
+ name, aliaslist, addresslist = socket.gethostbyaddr(ip)
+ return [socket.getfqdn(name)] + [
+ als for als in aliaslist if salt.utils.network.is_fqdn(als)
+ ]
except socket.herror as err:
if err.errno in (0, HOST_NOT_FOUND, NO_DATA):
# No FQDN for this IP address, so we don't need to know this all the time.
diff --git a/salt/utils/network.py b/salt/utils/network.py
index 2bea2cf129..6ec993a678 100644
--- a/salt/utils/network.py
+++ b/salt/utils/network.py
@@ -2372,3 +2372,19 @@ def ip_bracket(addr, strip=False):
addr = addr.rstrip("]")
addr = ipaddress.ip_address(addr)
return ("[{}]" if addr.version == 6 and not strip else "{}").format(addr)
+
+
+def is_fqdn(hostname):
+ """
+ Verify if hostname conforms to be a FQDN.
+
+ :param hostname: text string with the name of the host
+ :return: bool, True if hostname is correct FQDN, False otherwise
+ """
+
+ compliant = re.compile(r"(?!-)[A-Z\d\-\_]{1,63}(?<!-)$", re.IGNORECASE)
+ return (
+ "." in hostname
+ and len(hostname) < 0xFF
+ and all(compliant.match(x) for x in hostname.rstrip(".").split("."))
+ )
diff --git a/tests/pytests/unit/modules/test_network.py b/tests/pytests/unit/modules/test_network.py
index 81035434b6..3f31391f44 100644
--- a/tests/pytests/unit/modules/test_network.py
+++ b/tests/pytests/unit/modules/test_network.py
@@ -29,7 +29,7 @@ def fake_fqdn():
with patch("socket.getfqdn", autospec=True, return_value=fqdn), patch(
"socket.gethostbyaddr",
autospec=True,
- return_value=("fnord", "fnord fnord"),
+ return_value=("fnord", ["fnord fnord"], []),
):
yield fqdn
@@ -89,7 +89,7 @@ def test_fqdns_should_return_sorted_unique_domains(fake_ips):
with patch("socket.getfqdn", autospec=True, side_effect=fake_domains), patch(
"socket.gethostbyaddr",
autospec=True,
- return_value=("fnord", "fnord fnord"),
+ return_value=("fnord", ["fnord fnord"], []),
):
actual_fqdns = networkmod.fqdns()
assert actual_fqdns == {
diff --git a/tests/unit/utils/test_network.py b/tests/unit/utils/test_network.py
index f7d3972930..cdb1ca19ca 100644
--- a/tests/unit/utils/test_network.py
+++ b/tests/unit/utils/test_network.py
@@ -1311,3 +1311,35 @@ class NetworkTestCase(TestCase):
ip_addr_obj = ipaddress.ip_address(test_ipv4)
self.assertEqual(test_ipv4, network.ip_bracket(ip_addr_obj))
+
+ def test_is_fqdn(self):
+ """
+ Test is_fqdn function passes possible FQDN names.
+
+ :return: None
+ """
+ for fqdn in [
+ "host.domain.com",
+ "something.with.the.dots.still.ok",
+ "UPPERCASE.ALSO.SHOULD.WORK",
+ "MiXeD.CaSe.AcCePtAbLe",
+ "123.host.com",
+ "host123.com",
+ "some_underscore.com",
+ "host-here.com",
+ ]:
+ assert network.is_fqdn(fqdn)
+
+ def test_is_not_fqdn(self):
+ """
+ Test is_fqdn function rejects FQDN names.
+
+ :return: None
+ """
+ for fqdn in [
+ "hostname",
+ "/some/path",
+ "$variable.here",
+ "verylonghostname.{}".format("domain" * 45),
+ ]:
+ assert not network.is_fqdn(fqdn)
--
2.39.2

View File

@ -0,0 +1,66 @@
From 01a670dad69e03bd8bf2da76a6a81e847af20aab Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 25 Jan 2022 17:12:47 +0100
Subject: [PATCH] info_installed works without status attr now
If 'status' was excluded via attr, info_installed was no longer able to
detect if a package was installed or not. Now info_installed adds the
'status' for the 'lowpkg.info' request again.
---
salt/modules/aptpkg.py | 9 +++++++++
tests/pytests/unit/modules/test_aptpkg.py | 18 ++++++++++++++++++
2 files changed, 27 insertions(+)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 938e37cc9e..3289f6604d 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -3461,6 +3461,15 @@ def info_installed(*names, **kwargs):
failhard = kwargs.pop("failhard", True)
kwargs.pop("errors", None) # Only for compatibility with RPM
attr = kwargs.pop("attr", None) # Package attributes to return
+
+ # status is needed to see if a package is installed. So we have to add it,
+ # even if it's excluded via attr parameter. Otherwise all packages are
+ # returned.
+ if attr:
+ attr_list = set(attr.split(","))
+ attr_list.add("status")
+ attr = ",".join(attr_list)
+
all_versions = kwargs.pop(
"all_versions", False
) # This is for backward compatible structure only
diff --git a/tests/pytests/unit/modules/test_aptpkg.py b/tests/pytests/unit/modules/test_aptpkg.py
index 4226957eeb..eb72447c3a 100644
--- a/tests/pytests/unit/modules/test_aptpkg.py
+++ b/tests/pytests/unit/modules/test_aptpkg.py
@@ -385,6 +385,24 @@ def test_info_installed_attr(lowpkg_info_var):
assert ret["wget"] == expected_pkg
+def test_info_installed_attr_without_status(lowpkg_info_var):
+ """
+ Test info_installed 'attr' for inclusion of 'status' attribute.
+
+ Since info_installed should only return installed packages, we need to
+ call __salt__['lowpkg.info'] with the 'status' attribute even if the user
+ is not asking for it in 'attr'. Otherwise info_installed would not be able
+ to check if the package is installed and would return everything.
+
+ :return:
+ """
+ mock = MagicMock(return_value=lowpkg_info_var)
+ with patch.dict(aptpkg.__salt__, {"lowpkg.info": mock}):
+ aptpkg.info_installed("wget", attr="version")
+ assert "status" in mock.call_args.kwargs["attr"]
+ assert "version" in mock.call_args.kwargs["attr"]
+
+
def test_info_installed_all_versions(lowpkg_info_var):
"""
Test info_installed 'all_versions'.
--
2.39.2

View File

@ -0,0 +1,32 @@
From 1de8313e55317a62c36a1a6262e7b9463544d69c Mon Sep 17 00:00:00 2001
From: Can Bulut Bayburt <1103552+cbbayburt@users.noreply.github.com>
Date: Wed, 4 Dec 2019 15:59:46 +0100
Subject: [PATCH] Let salt-ssh use 'platform-python' binary in RHEL8
(#191)
RHEL/CentOS 8 has an internal Python interpreter called 'platform-python'
included in the base setup.
Add this binary to the list of Python executables to look for when
creating the sh shim.
---
salt/client/ssh/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index 88365a6099..049baff51a 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -146,7 +146,7 @@ if [ "$SUDO" ] && [ "$SUDO_USER" ]
then SUDO="$SUDO -u $SUDO_USER"
fi
EX_PYTHON_INVALID={EX_THIN_PYTHON_INVALID}
-PYTHON_CMDS="python3 python27 python2.7 python26 python2.6 python2 python /usr/libexec/platform-python"
+PYTHON_CMDS="python3 /usr/libexec/platform-python python27 python2.7 python26 python2.6 python2 python"
for py_cmd in $PYTHON_CMDS
do
if command -v "$py_cmd" >/dev/null 2>&1 && "$py_cmd" -c "import sys; sys.exit(not (sys.version_info >= (2, 6)));"
--
2.39.2

View File

@ -0,0 +1,28 @@
From f9731227e7af0b1bf0a54993e0cac890225517f6 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 16 Nov 2018 10:54:12 +0100
Subject: [PATCH] Make aptpkg.list_repos compatible on enabled/disabled
output
---
salt/modules/aptpkg.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index f68b1907e8..8e89744b5e 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -1919,6 +1919,9 @@ def list_repos(**kwargs):
repo["file"] = source.file
repo["comps"] = getattr(source, "comps", [])
repo["disabled"] = source.disabled
+ repo["enabled"] = not repo[
+ "disabled"
+ ] # This is for compatibility with the other modules
repo["dist"] = source.dist
repo["type"] = source.type
repo["uri"] = source.uri
--
2.39.2

View File

@ -0,0 +1,37 @@
From 53a5a62191b81c6838c3041cf95ffeb12fbab5b5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 19 Jun 2023 15:35:41 +0100
Subject: [PATCH] Make master_tops compatible with Salt 3000 and older
minions (bsc#1212516) (bsc#1212517) (#587)
---
salt/master.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/salt/master.py b/salt/master.py
index da1eb8cef5..fc243ef674 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -1213,6 +1213,7 @@ class AESFuncs(TransportMethods):
"_dir_list",
"_symlink_list",
"_file_envs",
+ "_ext_nodes", # To keep compatibility with old Salt minion versions
)
def __init__(self, opts, context=None):
@@ -1412,6 +1413,9 @@ class AESFuncs(TransportMethods):
return {}
return self.masterapi._master_tops(load, skip_verify=True)
+ # Needed so older minions can request master_tops
+ _ext_nodes = _master_tops
+
def _master_opts(self, load):
"""
Return the master options to the minion
--
2.41.0

View File

@ -0,0 +1,33 @@
From d2b4c8170d7ff30bf33623fcbbb6ebb6d7af934e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 25 Mar 2020 13:09:52 +0000
Subject: [PATCH] Make setup.py script to not require setuptools > 9.1
---
setup.py | 8 --------
1 file changed, 8 deletions(-)
diff --git a/setup.py b/setup.py
index e60f1b7085..8ca8a66d45 100755
--- a/setup.py
+++ b/setup.py
@@ -632,14 +632,6 @@ class Install(install):
install.finalize_options(self)
def run(self):
- if LooseVersion(setuptools.__version__) < LooseVersion("9.1"):
- sys.stderr.write(
- "\n\nInstalling Salt requires setuptools >= 9.1\n"
- "Available setuptools version is {}\n\n".format(setuptools.__version__)
- )
- sys.stderr.flush()
- sys.exit(1)
-
# Let's set the running_salt_install attribute so we can add
# _version.txt in the build command
self.distribution.running_salt_install = True
--
2.39.2

View File

@ -0,0 +1,204 @@
From 5ea4add5c8e2bed50b9825edfff7565e5f6124f3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 22 Aug 2023 12:57:44 +0100
Subject: [PATCH] Make sure configured user is properly set by Salt
(bsc#1210994) (#596)
* Make sure Salt user and env is validated before daemon init
* Ensure HOME is always present in env and set according to pwuser
* Set User to salt in salt-master.service files
* Return proper exitcode if user is not valid
* Fix environment also for salt-ssh command
* Increase start_timeout to avoid test to be flaky
---
pkg/common/salt-master.service | 1 +
pkg/old/deb/salt-master.service | 1 +
pkg/old/suse/salt-master.service | 1 +
salt/cli/daemons.py | 27 +++++++++++++++++++
salt/cli/ssh.py | 8 ++++++
salt/utils/verify.py | 4 +--
.../integration/cli/test_salt_minion.py | 4 +--
7 files changed, 42 insertions(+), 4 deletions(-)
diff --git a/pkg/common/salt-master.service b/pkg/common/salt-master.service
index 377c87afeb..257ecc283f 100644
--- a/pkg/common/salt-master.service
+++ b/pkg/common/salt-master.service
@@ -8,6 +8,7 @@ LimitNOFILE=100000
Type=notify
NotifyAccess=all
ExecStart=/usr/bin/salt-master
+User=salt
[Install]
WantedBy=multi-user.target
diff --git a/pkg/old/deb/salt-master.service b/pkg/old/deb/salt-master.service
index b5d0cdd22c..f9dca296b4 100644
--- a/pkg/old/deb/salt-master.service
+++ b/pkg/old/deb/salt-master.service
@@ -7,6 +7,7 @@ LimitNOFILE=16384
Type=notify
NotifyAccess=all
ExecStart=/usr/bin/salt-master
+User=salt
[Install]
WantedBy=multi-user.target
diff --git a/pkg/old/suse/salt-master.service b/pkg/old/suse/salt-master.service
index 9e002d16ca..caabca511c 100644
--- a/pkg/old/suse/salt-master.service
+++ b/pkg/old/suse/salt-master.service
@@ -8,6 +8,7 @@ LimitNOFILE=100000
Type=simple
ExecStart=/usr/bin/salt-master
TasksMax=infinity
+User=salt
[Install]
WantedBy=multi-user.target
diff --git a/salt/cli/daemons.py b/salt/cli/daemons.py
index ecc05c919e..c9ee9ced91 100644
--- a/salt/cli/daemons.py
+++ b/salt/cli/daemons.py
@@ -7,6 +7,7 @@ import logging
import os
import warnings
+import salt.defaults.exitcodes
import salt.utils.kinds as kinds
from salt.exceptions import SaltClientError, SaltSystemExit, get_error_message
from salt.utils import migrations
@@ -73,6 +74,16 @@ class DaemonsMixin: # pylint: disable=no-init
self.__class__.__name__,
)
+ def verify_user(self):
+ """
+ Verify Salt configured user for Salt and shutdown daemon if not valid.
+
+ :return:
+ """
+ if not check_user(self.config["user"]):
+ self.action_log_info("Cannot switch to configured user for Salt. Exiting")
+ self.shutdown(salt.defaults.exitcodes.EX_NOUSER)
+
def action_log_info(self, action):
"""
Say daemon starting.
@@ -178,6 +189,10 @@ class Master(
self.config["interface"] = ip_bracket(self.config["interface"])
migrations.migrate_paths(self.config)
+ # Ensure configured user is valid and environment is properly set
+ # before initializating rest of the stack.
+ self.verify_user()
+
# Late import so logging works correctly
import salt.master
@@ -290,6 +305,10 @@ class Minion(
transport = self.config.get("transport").lower()
+ # Ensure configured user is valid and environment is properly set
+ # before initializating rest of the stack.
+ self.verify_user()
+
try:
# Late import so logging works correctly
import salt.minion
@@ -478,6 +497,10 @@ class ProxyMinion(
self.action_log_info("An instance is already running. Exiting")
self.shutdown(1)
+ # Ensure configured user is valid and environment is properly set
+ # before initializating rest of the stack.
+ self.verify_user()
+
# TODO: AIO core is separate from transport
# Late import so logging works correctly
import salt.minion
@@ -576,6 +599,10 @@ class Syndic(
self.action_log_info('Setting up "{}"'.format(self.config["id"]))
+ # Ensure configured user is valid and environment is properly set
+ # before initializating rest of the stack.
+ self.verify_user()
+
# Late import so logging works correctly
import salt.minion
diff --git a/salt/cli/ssh.py b/salt/cli/ssh.py
index 6048cb5f58..672f32b8c0 100644
--- a/salt/cli/ssh.py
+++ b/salt/cli/ssh.py
@@ -1,7 +1,9 @@
import sys
import salt.client.ssh
+import salt.defaults.exitcodes
import salt.utils.parsers
+from salt.utils.verify import check_user
class SaltSSH(salt.utils.parsers.SaltSSHOptionParser):
@@ -15,5 +17,11 @@ class SaltSSH(salt.utils.parsers.SaltSSHOptionParser):
# that won't be used anyways with -H or --hosts
self.parse_args()
+ if not check_user(self.config["user"]):
+ self.exit(
+ salt.defaults.exitcodes.EX_NOUSER,
+ "Cannot switch to configured user for Salt. Exiting",
+ )
+
ssh = salt.client.ssh.SSH(self.config)
ssh.run()
diff --git a/salt/utils/verify.py b/salt/utils/verify.py
index 879128f231..7899fbe538 100644
--- a/salt/utils/verify.py
+++ b/salt/utils/verify.py
@@ -335,8 +335,8 @@ def check_user(user):
# We could just reset the whole environment but let's just override
# the variables we can get from pwuser
- if "HOME" in os.environ:
- os.environ["HOME"] = pwuser.pw_dir
+ # We ensure HOME is always present and set according to pwuser
+ os.environ["HOME"] = pwuser.pw_dir
if "SHELL" in os.environ:
os.environ["SHELL"] = pwuser.pw_shell
diff --git a/tests/pytests/integration/cli/test_salt_minion.py b/tests/pytests/integration/cli/test_salt_minion.py
index c0d6013474..bde2dd51d7 100644
--- a/tests/pytests/integration/cli/test_salt_minion.py
+++ b/tests/pytests/integration/cli/test_salt_minion.py
@@ -41,7 +41,7 @@ def test_exit_status_unknown_user(salt_master, minion_id):
factory = salt_master.salt_minion_daemon(
minion_id, overrides={"user": "unknown-user"}
)
- factory.start(start_timeout=10, max_start_attempts=1)
+ factory.start(start_timeout=30, max_start_attempts=1)
assert exc.value.process_result.returncode == salt.defaults.exitcodes.EX_NOUSER
assert "The user is not available." in exc.value.process_result.stderr
@@ -53,7 +53,7 @@ def test_exit_status_unknown_argument(salt_master, minion_id):
"""
with pytest.raises(FactoryNotStarted) as exc:
factory = salt_master.salt_minion_daemon(minion_id)
- factory.start("--unknown-argument", start_timeout=10, max_start_attempts=1)
+ factory.start("--unknown-argument", start_timeout=30, max_start_attempts=1)
assert exc.value.process_result.returncode == salt.defaults.exitcodes.EX_USAGE
assert "Usage" in exc.value.process_result.stderr
--
2.41.0

View File

@ -0,0 +1,850 @@
From a1fc5287d501a1ecdbd259e5bbdd4f7d5d06dd13 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Fri, 28 Apr 2023 09:41:28 +0200
Subject: [PATCH] Make sure the file client is destroyed upon used
Backport of https://github.com/saltstack/salt/pull/64113
---
salt/client/ssh/wrapper/saltcheck.py | 108 +++----
salt/fileclient.py | 11 -
salt/modules/dockermod.py | 17 +-
salt/pillar/__init__.py | 6 +-
salt/states/ansiblegate.py | 11 +-
salt/utils/asynchronous.py | 2 +-
salt/utils/jinja.py | 53 ++-
salt/utils/mako.py | 7 +
salt/utils/templates.py | 303 +++++++++---------
.../integration/states/test_include.py | 40 +++
.../utils/jinja/test_salt_cache_loader.py | 47 ++-
11 files changed, 330 insertions(+), 275 deletions(-)
create mode 100644 tests/pytests/integration/states/test_include.py
diff --git a/salt/client/ssh/wrapper/saltcheck.py b/salt/client/ssh/wrapper/saltcheck.py
index d47b5cf6883..b0b94593809 100644
--- a/salt/client/ssh/wrapper/saltcheck.py
+++ b/salt/client/ssh/wrapper/saltcheck.py
@@ -9,6 +9,7 @@ import tarfile
import tempfile
from contextlib import closing
+import salt.fileclient
import salt.utils.files
import salt.utils.json
import salt.utils.url
@@ -28,65 +29,62 @@ def update_master_cache(states, saltenv="base"):
# Setup for copying states to gendir
gendir = tempfile.mkdtemp()
trans_tar = salt.utils.files.mkstemp()
- if "cp.fileclient_{}".format(id(__opts__)) not in __context__:
- __context__[
- "cp.fileclient_{}".format(id(__opts__))
- ] = salt.fileclient.get_file_client(__opts__)
-
- # generate cp.list_states output and save to gendir
- cp_output = salt.utils.json.dumps(__salt__["cp.list_states"]())
- cp_output_file = os.path.join(gendir, "cp_output.txt")
- with salt.utils.files.fopen(cp_output_file, "w") as fp:
- fp.write(cp_output)
-
- # cp state directories to gendir
- already_processed = []
- sls_list = salt.utils.args.split_input(states)
- for state_name in sls_list:
- # generate low data for each state and save to gendir
- state_low_file = os.path.join(gendir, state_name + ".low")
- state_low_output = salt.utils.json.dumps(
- __salt__["state.show_low_sls"](state_name)
- )
- with salt.utils.files.fopen(state_low_file, "w") as fp:
- fp.write(state_low_output)
-
- state_name = state_name.replace(".", os.sep)
- if state_name in already_processed:
- log.debug("Already cached state for %s", state_name)
- else:
- file_copy_file = os.path.join(gendir, state_name + ".copy")
- log.debug("copying %s to %s", state_name, gendir)
- qualified_name = salt.utils.url.create(state_name, saltenv)
- # Duplicate cp.get_dir to gendir
- copy_result = __context__["cp.fileclient_{}".format(id(__opts__))].get_dir(
- qualified_name, gendir, saltenv
+ with salt.fileclient.get_file_client(__opts__) as cp_fileclient:
+
+ # generate cp.list_states output and save to gendir
+ cp_output = salt.utils.json.dumps(__salt__["cp.list_states"]())
+ cp_output_file = os.path.join(gendir, "cp_output.txt")
+ with salt.utils.files.fopen(cp_output_file, "w") as fp:
+ fp.write(cp_output)
+
+ # cp state directories to gendir
+ already_processed = []
+ sls_list = salt.utils.args.split_input(states)
+ for state_name in sls_list:
+ # generate low data for each state and save to gendir
+ state_low_file = os.path.join(gendir, state_name + ".low")
+ state_low_output = salt.utils.json.dumps(
+ __salt__["state.show_low_sls"](state_name)
)
- if copy_result:
- copy_result = [dir.replace(gendir, state_cache) for dir in copy_result]
- copy_result_output = salt.utils.json.dumps(copy_result)
- with salt.utils.files.fopen(file_copy_file, "w") as fp:
- fp.write(copy_result_output)
- already_processed.append(state_name)
+ with salt.utils.files.fopen(state_low_file, "w") as fp:
+ fp.write(state_low_output)
+
+ state_name = state_name.replace(".", os.sep)
+ if state_name in already_processed:
+ log.debug("Already cached state for %s", state_name)
else:
- # If files were not copied, assume state.file.sls was given and just copy state
- state_name = os.path.dirname(state_name)
file_copy_file = os.path.join(gendir, state_name + ".copy")
- if state_name in already_processed:
- log.debug("Already cached state for %s", state_name)
+ log.debug("copying %s to %s", state_name, gendir)
+ qualified_name = salt.utils.url.create(state_name, saltenv)
+ # Duplicate cp.get_dir to gendir
+ copy_result = cp_fileclient.get_dir(qualified_name, gendir, saltenv)
+ if copy_result:
+ copy_result = [
+ dir.replace(gendir, state_cache) for dir in copy_result
+ ]
+ copy_result_output = salt.utils.json.dumps(copy_result)
+ with salt.utils.files.fopen(file_copy_file, "w") as fp:
+ fp.write(copy_result_output)
+ already_processed.append(state_name)
else:
- qualified_name = salt.utils.url.create(state_name, saltenv)
- copy_result = __context__[
- "cp.fileclient_{}".format(id(__opts__))
- ].get_dir(qualified_name, gendir, saltenv)
- if copy_result:
- copy_result = [
- dir.replace(gendir, state_cache) for dir in copy_result
- ]
- copy_result_output = salt.utils.json.dumps(copy_result)
- with salt.utils.files.fopen(file_copy_file, "w") as fp:
- fp.write(copy_result_output)
- already_processed.append(state_name)
+ # If files were not copied, assume state.file.sls was given and just copy state
+ state_name = os.path.dirname(state_name)
+ file_copy_file = os.path.join(gendir, state_name + ".copy")
+ if state_name in already_processed:
+ log.debug("Already cached state for %s", state_name)
+ else:
+ qualified_name = salt.utils.url.create(state_name, saltenv)
+ copy_result = cp_fileclient.get_dir(
+ qualified_name, gendir, saltenv
+ )
+ if copy_result:
+ copy_result = [
+ dir.replace(gendir, state_cache) for dir in copy_result
+ ]
+ copy_result_output = salt.utils.json.dumps(copy_result)
+ with salt.utils.files.fopen(file_copy_file, "w") as fp:
+ fp.write(copy_result_output)
+ already_processed.append(state_name)
# turn gendir into tarball and remove gendir
try:
diff --git a/salt/fileclient.py b/salt/fileclient.py
index fef5154a0be..f01a86dd0d4 100644
--- a/salt/fileclient.py
+++ b/salt/fileclient.py
@@ -849,7 +849,6 @@ class Client:
kwargs.pop("env")
kwargs["saltenv"] = saltenv
- url_data = urllib.parse.urlparse(url)
sfn = self.cache_file(url, saltenv, cachedir=cachedir)
if not sfn or not os.path.exists(sfn):
return ""
@@ -1165,13 +1164,8 @@ class RemoteClient(Client):
if not salt.utils.platform.is_windows():
hash_server, stat_server = self.hash_and_stat_file(path, saltenv)
- try:
- mode_server = stat_server[0]
- except (IndexError, TypeError):
- mode_server = None
else:
hash_server = self.hash_file(path, saltenv)
- mode_server = None
# Check if file exists on server, before creating files and
# directories
@@ -1214,13 +1208,8 @@ class RemoteClient(Client):
if dest2check and os.path.isfile(dest2check):
if not salt.utils.platform.is_windows():
hash_local, stat_local = self.hash_and_stat_file(dest2check, saltenv)
- try:
- mode_local = stat_local[0]
- except (IndexError, TypeError):
- mode_local = None
else:
hash_local = self.hash_file(dest2check, saltenv)
- mode_local = None
if hash_local == hash_server:
return dest2check
diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py
index f7344b66ac6..69b722f0c95 100644
--- a/salt/modules/dockermod.py
+++ b/salt/modules/dockermod.py
@@ -6667,14 +6667,6 @@ def script_retcode(
)["retcode"]
-def _mk_fileclient():
- """
- Create a file client and add it to the context.
- """
- if "cp.fileclient" not in __context__:
- __context__["cp.fileclient"] = salt.fileclient.get_file_client(__opts__)
-
-
def _generate_tmp_path():
return os.path.join("/tmp", "salt.docker.{}".format(uuid.uuid4().hex[:6]))
@@ -6688,11 +6680,10 @@ def _prepare_trans_tar(name, sls_opts, mods=None, pillar=None, extra_filerefs=""
# reuse it from salt.ssh, however this function should
# be somewhere else
refs = salt.client.ssh.state.lowstate_file_refs(chunks, extra_filerefs)
- _mk_fileclient()
- trans_tar = salt.client.ssh.state.prep_trans_tar(
- __context__["cp.fileclient"], chunks, refs, pillar, name
- )
- return trans_tar
+ with salt.fileclient.get_file_client(__opts__) as fileclient:
+ return salt.client.ssh.state.prep_trans_tar(
+ fileclient, chunks, refs, pillar, name
+ )
def _compile_state(sls_opts, mods=None):
diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py
index 0dfab4cc579..26312b3bd53 100644
--- a/salt/pillar/__init__.py
+++ b/salt/pillar/__init__.py
@@ -9,7 +9,6 @@ import logging
import os
import sys
import traceback
-import uuid
import salt.channel.client
import salt.ext.tornado.gen
@@ -1351,6 +1350,11 @@ class Pillar:
if hasattr(self, "_closing") and self._closing:
return
self._closing = True
+ if self.client:
+ try:
+ self.client.destroy()
+ except AttributeError:
+ pass
# pylint: disable=W1701
def __del__(self):
diff --git a/salt/states/ansiblegate.py b/salt/states/ansiblegate.py
index 7fd4deb6c2a..9abd418c42c 100644
--- a/salt/states/ansiblegate.py
+++ b/salt/states/ansiblegate.py
@@ -32,12 +32,10 @@ state:
- state: installed
"""
-
import logging
import os
import sys
-# Import salt modules
import salt.fileclient
import salt.utils.decorators.path
from salt.utils.decorators import depends
@@ -108,13 +106,6 @@ def __virtual__():
return __virtualname__
-def _client():
- """
- Get a fileclient
- """
- return salt.fileclient.get_file_client(__opts__)
-
-
def _changes(plays):
"""
Find changes in ansible return data
@@ -171,7 +162,7 @@ def playbooks(name, rundir=None, git_repo=None, git_kwargs=None, ansible_kwargs=
}
if git_repo:
if not isinstance(rundir, str) or not os.path.isdir(rundir):
- with _client() as client:
+ with salt.fileclient.get_file_client(__opts__) as client:
rundir = client._extrn_path(git_repo, "base")
log.trace("rundir set to %s", rundir)
if not isinstance(git_kwargs, dict):
diff --git a/salt/utils/asynchronous.py b/salt/utils/asynchronous.py
index 2a858feee98..0c645bbc3bb 100644
--- a/salt/utils/asynchronous.py
+++ b/salt/utils/asynchronous.py
@@ -131,7 +131,7 @@ class SyncWrapper:
result = io_loop.run_sync(lambda: getattr(self.obj, key)(*args, **kwargs))
results.append(True)
results.append(result)
- except Exception as exc: # pylint: disable=broad-except
+ except Exception: # pylint: disable=broad-except
results.append(False)
results.append(sys.exc_info())
diff --git a/salt/utils/jinja.py b/salt/utils/jinja.py
index fcc5aec497e..a6a8a279605 100644
--- a/salt/utils/jinja.py
+++ b/salt/utils/jinja.py
@@ -58,19 +58,6 @@ class SaltCacheLoader(BaseLoader):
and only loaded once per loader instance.
"""
- _cached_pillar_client = None
- _cached_client = None
-
- @classmethod
- def shutdown(cls):
- for attr in ("_cached_client", "_cached_pillar_client"):
- client = getattr(cls, attr, None)
- if client is not None:
- # PillarClient and LocalClient objects do not have a destroy method
- if hasattr(client, "destroy"):
- client.destroy()
- setattr(cls, attr, None)
-
def __init__(
self,
opts,
@@ -93,8 +80,7 @@ class SaltCacheLoader(BaseLoader):
log.debug("Jinja search path: %s", self.searchpath)
self.cached = []
self._file_client = _file_client
- # Instantiate the fileclient
- self.file_client()
+ self._close_file_client = _file_client is None
def file_client(self):
"""
@@ -108,18 +94,10 @@ class SaltCacheLoader(BaseLoader):
or not hasattr(self._file_client, "opts")
or self._file_client.opts["file_roots"] != self.opts["file_roots"]
):
- attr = "_cached_pillar_client" if self.pillar_rend else "_cached_client"
- cached_client = getattr(self, attr, None)
- if (
- cached_client is None
- or not hasattr(cached_client, "opts")
- or cached_client.opts["file_roots"] != self.opts["file_roots"]
- ):
- cached_client = salt.fileclient.get_file_client(
- self.opts, self.pillar_rend
- )
- setattr(SaltCacheLoader, attr, cached_client)
- self._file_client = cached_client
+ self._file_client = salt.fileclient.get_file_client(
+ self.opts, self.pillar_rend
+ )
+ self._close_file_client = True
return self._file_client
def cache_file(self, template):
@@ -221,6 +199,27 @@ class SaltCacheLoader(BaseLoader):
# there is no template file within searchpaths
raise TemplateNotFound(template)
+ def destroy(self):
+ if self._close_file_client is False:
+ return
+ if self._file_client is None:
+ return
+ file_client = self._file_client
+ self._file_client = None
+
+ try:
+ file_client.destroy()
+ except AttributeError:
+ # PillarClient and LocalClient objects do not have a destroy method
+ pass
+
+ def __enter__(self):
+ self.file_client()
+ return self
+
+ def __exit__(self, *args):
+ self.destroy()
+
class PrintableDict(OrderedDict):
"""
diff --git a/salt/utils/mako.py b/salt/utils/mako.py
index 69618de9837..037d5d86deb 100644
--- a/salt/utils/mako.py
+++ b/salt/utils/mako.py
@@ -97,3 +97,10 @@ if HAS_MAKO:
self.cache[fpath] = self.file_client().get_file(
fpath, "", True, self.saltenv
)
+
+ def destroy(self):
+ if self.client:
+ try:
+ self.client.destroy()
+ except AttributeError:
+ pass
diff --git a/salt/utils/templates.py b/salt/utils/templates.py
index 4947b820a36..4a8adf2a14f 100644
--- a/salt/utils/templates.py
+++ b/salt/utils/templates.py
@@ -362,163 +362,169 @@ def render_jinja_tmpl(tmplstr, context, tmplpath=None):
elif tmplstr.endswith("\n"):
newline = "\n"
- if not saltenv:
- if tmplpath:
- loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath))
- else:
- loader = salt.utils.jinja.SaltCacheLoader(
- opts,
- saltenv,
- pillar_rend=context.get("_pillar_rend", False),
- _file_client=file_client,
- )
+ try:
+ if not saltenv:
+ if tmplpath:
+ loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath))
+ else:
+ loader = salt.utils.jinja.SaltCacheLoader(
+ opts,
+ saltenv,
+ pillar_rend=context.get("_pillar_rend", False),
+ _file_client=file_client,
+ )
- env_args = {"extensions": [], "loader": loader}
-
- if hasattr(jinja2.ext, "with_"):
- env_args["extensions"].append("jinja2.ext.with_")
- if hasattr(jinja2.ext, "do"):
- env_args["extensions"].append("jinja2.ext.do")
- if hasattr(jinja2.ext, "loopcontrols"):
- env_args["extensions"].append("jinja2.ext.loopcontrols")
- env_args["extensions"].append(salt.utils.jinja.SerializerExtension)
-
- opt_jinja_env = opts.get("jinja_env", {})
- opt_jinja_sls_env = opts.get("jinja_sls_env", {})
-
- opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {}
- opt_jinja_sls_env = opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {}
-
- # Pass through trim_blocks and lstrip_blocks Jinja parameters
- # trim_blocks removes newlines around Jinja blocks
- # lstrip_blocks strips tabs and spaces from the beginning of
- # line to the start of a block.
- if opts.get("jinja_trim_blocks", False):
- log.debug("Jinja2 trim_blocks is enabled")
- log.warning(
- "jinja_trim_blocks is deprecated and will be removed in a future release,"
- " please use jinja_env and/or jinja_sls_env instead"
- )
- opt_jinja_env["trim_blocks"] = True
- opt_jinja_sls_env["trim_blocks"] = True
- if opts.get("jinja_lstrip_blocks", False):
- log.debug("Jinja2 lstrip_blocks is enabled")
- log.warning(
- "jinja_lstrip_blocks is deprecated and will be removed in a future release,"
- " please use jinja_env and/or jinja_sls_env instead"
- )
- opt_jinja_env["lstrip_blocks"] = True
- opt_jinja_sls_env["lstrip_blocks"] = True
-
- def opt_jinja_env_helper(opts, optname):
- for k, v in opts.items():
- k = k.lower()
- if hasattr(jinja2.defaults, k.upper()):
- log.debug("Jinja2 environment %s was set to %s by %s", k, v, optname)
- env_args[k] = v
- else:
- log.warning("Jinja2 environment %s is not recognized", k)
+ env_args = {"extensions": [], "loader": loader}
- if "sls" in context and context["sls"] != "":
- opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env")
- else:
- opt_jinja_env_helper(opt_jinja_env, "jinja_env")
+ if hasattr(jinja2.ext, "with_"):
+ env_args["extensions"].append("jinja2.ext.with_")
+ if hasattr(jinja2.ext, "do"):
+ env_args["extensions"].append("jinja2.ext.do")
+ if hasattr(jinja2.ext, "loopcontrols"):
+ env_args["extensions"].append("jinja2.ext.loopcontrols")
+ env_args["extensions"].append(salt.utils.jinja.SerializerExtension)
- if opts.get("allow_undefined", False):
- jinja_env = jinja2.sandbox.SandboxedEnvironment(**env_args)
- else:
- jinja_env = jinja2.sandbox.SandboxedEnvironment(
- undefined=jinja2.StrictUndefined, **env_args
- )
+ opt_jinja_env = opts.get("jinja_env", {})
+ opt_jinja_sls_env = opts.get("jinja_sls_env", {})
- indent_filter = jinja_env.filters.get("indent")
- jinja_env.tests.update(JinjaTest.salt_jinja_tests)
- jinja_env.filters.update(JinjaFilter.salt_jinja_filters)
- if salt.utils.jinja.JINJA_VERSION >= Version("2.11"):
- # Use the existing indent filter on Jinja versions where it's not broken
- jinja_env.filters["indent"] = indent_filter
- jinja_env.globals.update(JinjaGlobal.salt_jinja_globals)
-
- # globals
- jinja_env.globals["odict"] = OrderedDict
- jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context
-
- jinja_env.tests["list"] = salt.utils.data.is_list
-
- decoded_context = {}
- for key, value in context.items():
- if not isinstance(value, str):
- if isinstance(value, NamedLoaderContext):
- decoded_context[key] = value.value()
- else:
- decoded_context[key] = value
- continue
+ opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {}
+ opt_jinja_sls_env = (
+ opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {}
+ )
- try:
- decoded_context[key] = salt.utils.stringutils.to_unicode(
- value, encoding=SLS_ENCODING
+ # Pass through trim_blocks and lstrip_blocks Jinja parameters
+ # trim_blocks removes newlines around Jinja blocks
+ # lstrip_blocks strips tabs and spaces from the beginning of
+ # line to the start of a block.
+ if opts.get("jinja_trim_blocks", False):
+ log.debug("Jinja2 trim_blocks is enabled")
+ log.warning(
+ "jinja_trim_blocks is deprecated and will be removed in a future release,"
+ " please use jinja_env and/or jinja_sls_env instead"
)
- except UnicodeDecodeError as ex:
- log.debug(
- "Failed to decode using default encoding (%s), trying system encoding",
- SLS_ENCODING,
+ opt_jinja_env["trim_blocks"] = True
+ opt_jinja_sls_env["trim_blocks"] = True
+ if opts.get("jinja_lstrip_blocks", False):
+ log.debug("Jinja2 lstrip_blocks is enabled")
+ log.warning(
+ "jinja_lstrip_blocks is deprecated and will be removed in a future release,"
+ " please use jinja_env and/or jinja_sls_env instead"
)
- decoded_context[key] = salt.utils.data.decode(value)
+ opt_jinja_env["lstrip_blocks"] = True
+ opt_jinja_sls_env["lstrip_blocks"] = True
+
+ def opt_jinja_env_helper(opts, optname):
+ for k, v in opts.items():
+ k = k.lower()
+ if hasattr(jinja2.defaults, k.upper()):
+ log.debug(
+ "Jinja2 environment %s was set to %s by %s", k, v, optname
+ )
+ env_args[k] = v
+ else:
+ log.warning("Jinja2 environment %s is not recognized", k)
- jinja_env.globals.update(decoded_context)
- try:
- template = jinja_env.from_string(tmplstr)
- output = template.render(**decoded_context)
- except jinja2.exceptions.UndefinedError as exc:
- trace = traceback.extract_tb(sys.exc_info()[2])
- line, out = _get_jinja_error(trace, context=decoded_context)
- if not line:
- tmplstr = ""
- raise SaltRenderError("Jinja variable {}{}".format(exc, out), line, tmplstr)
- except (
- jinja2.exceptions.TemplateRuntimeError,
- jinja2.exceptions.TemplateSyntaxError,
- jinja2.exceptions.SecurityError,
- ) as exc:
- trace = traceback.extract_tb(sys.exc_info()[2])
- line, out = _get_jinja_error(trace, context=decoded_context)
- if not line:
- tmplstr = ""
- raise SaltRenderError(
- "Jinja syntax error: {}{}".format(exc, out), line, tmplstr
- )
- except (SaltInvocationError, CommandExecutionError) as exc:
- trace = traceback.extract_tb(sys.exc_info()[2])
- line, out = _get_jinja_error(trace, context=decoded_context)
- if not line:
- tmplstr = ""
- raise SaltRenderError(
- "Problem running salt function in Jinja template: {}{}".format(exc, out),
- line,
- tmplstr,
- )
- except Exception as exc: # pylint: disable=broad-except
- tracestr = traceback.format_exc()
- trace = traceback.extract_tb(sys.exc_info()[2])
- line, out = _get_jinja_error(trace, context=decoded_context)
- if not line:
- tmplstr = ""
+ if "sls" in context and context["sls"] != "":
+ opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env")
else:
- tmplstr += "\n{}".format(tracestr)
- log.debug("Jinja Error")
- log.debug("Exception:", exc_info=True)
- log.debug("Out: %s", out)
- log.debug("Line: %s", line)
- log.debug("TmplStr: %s", tmplstr)
- log.debug("TraceStr: %s", tracestr)
+ opt_jinja_env_helper(opt_jinja_env, "jinja_env")
- raise SaltRenderError(
- "Jinja error: {}{}".format(exc, out), line, tmplstr, trace=tracestr
- )
+ if opts.get("allow_undefined", False):
+ jinja_env = jinja2.sandbox.SandboxedEnvironment(**env_args)
+ else:
+ jinja_env = jinja2.sandbox.SandboxedEnvironment(
+ undefined=jinja2.StrictUndefined, **env_args
+ )
+
+ indent_filter = jinja_env.filters.get("indent")
+ jinja_env.tests.update(JinjaTest.salt_jinja_tests)
+ jinja_env.filters.update(JinjaFilter.salt_jinja_filters)
+ if salt.utils.jinja.JINJA_VERSION >= Version("2.11"):
+ # Use the existing indent filter on Jinja versions where it's not broken
+ jinja_env.filters["indent"] = indent_filter
+ jinja_env.globals.update(JinjaGlobal.salt_jinja_globals)
+
+ # globals
+ jinja_env.globals["odict"] = OrderedDict
+ jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context
+
+ jinja_env.tests["list"] = salt.utils.data.is_list
+
+ decoded_context = {}
+ for key, value in context.items():
+ if not isinstance(value, str):
+ if isinstance(value, NamedLoaderContext):
+ decoded_context[key] = value.value()
+ else:
+ decoded_context[key] = value
+ continue
+
+ try:
+ decoded_context[key] = salt.utils.stringutils.to_unicode(
+ value, encoding=SLS_ENCODING
+ )
+ except UnicodeDecodeError:
+ log.debug(
+ "Failed to decode using default encoding (%s), trying system encoding",
+ SLS_ENCODING,
+ )
+ decoded_context[key] = salt.utils.data.decode(value)
+
+ jinja_env.globals.update(decoded_context)
+ try:
+ template = jinja_env.from_string(tmplstr)
+ output = template.render(**decoded_context)
+ except jinja2.exceptions.UndefinedError as exc:
+ trace = traceback.extract_tb(sys.exc_info()[2])
+ line, out = _get_jinja_error(trace, context=decoded_context)
+ if not line:
+ tmplstr = ""
+ raise SaltRenderError("Jinja variable {}{}".format(exc, out), line, tmplstr)
+ except (
+ jinja2.exceptions.TemplateRuntimeError,
+ jinja2.exceptions.TemplateSyntaxError,
+ jinja2.exceptions.SecurityError,
+ ) as exc:
+ trace = traceback.extract_tb(sys.exc_info()[2])
+ line, out = _get_jinja_error(trace, context=decoded_context)
+ if not line:
+ tmplstr = ""
+ raise SaltRenderError(
+ "Jinja syntax error: {}{}".format(exc, out), line, tmplstr
+ )
+ except (SaltInvocationError, CommandExecutionError) as exc:
+ trace = traceback.extract_tb(sys.exc_info()[2])
+ line, out = _get_jinja_error(trace, context=decoded_context)
+ if not line:
+ tmplstr = ""
+ raise SaltRenderError(
+ "Problem running salt function in Jinja template: {}{}".format(
+ exc, out
+ ),
+ line,
+ tmplstr,
+ )
+ except Exception as exc: # pylint: disable=broad-except
+ tracestr = traceback.format_exc()
+ trace = traceback.extract_tb(sys.exc_info()[2])
+ line, out = _get_jinja_error(trace, context=decoded_context)
+ if not line:
+ tmplstr = ""
+ else:
+ tmplstr += "\n{}".format(tracestr)
+ log.debug("Jinja Error")
+ log.debug("Exception:", exc_info=True)
+ log.debug("Out: %s", out)
+ log.debug("Line: %s", line)
+ log.debug("TmplStr: %s", tmplstr)
+ log.debug("TraceStr: %s", tracestr)
+
+ raise SaltRenderError(
+ "Jinja error: {}{}".format(exc, out), line, tmplstr, trace=tracestr
+ )
finally:
- if loader and hasattr(loader, "_file_client"):
- if hasattr(loader._file_client, "destroy"):
- loader._file_client.destroy()
+ if loader and isinstance(loader, salt.utils.jinja.SaltCacheLoader):
+ loader.destroy()
# Workaround a bug in Jinja that removes the final newline
# (https://github.com/mitsuhiko/jinja2/issues/75)
@@ -569,9 +575,8 @@ def render_mako_tmpl(tmplstr, context, tmplpath=None):
except Exception: # pylint: disable=broad-except
raise SaltRenderError(mako.exceptions.text_error_template().render())
finally:
- if lookup and hasattr(lookup, "_file_client"):
- if hasattr(lookup._file_client, "destroy"):
- lookup._file_client.destroy()
+ if lookup and isinstance(lookup, SaltMakoTemplateLookup):
+ lookup.destroy()
def render_wempy_tmpl(tmplstr, context, tmplpath=None):
diff --git a/tests/pytests/integration/states/test_include.py b/tests/pytests/integration/states/test_include.py
new file mode 100644
index 00000000000..f814328c5e4
--- /dev/null
+++ b/tests/pytests/integration/states/test_include.py
@@ -0,0 +1,40 @@
+"""
+Integration tests for the jinja includes in states
+"""
+import logging
+
+import pytest
+
+log = logging.getLogger(__name__)
+
+
+@pytest.mark.slow_test
+def test_issue_64111(salt_master, salt_minion, salt_call_cli):
+ # This needs to be an integration test. A functional test does not trigger
+ # the issue fixed.
+
+ macros_jinja = """
+ {% macro a_jinja_macro(arg) -%}
+ {{ arg }}
+ {%- endmacro %}
+ """
+
+ init_sls = """
+ include:
+ - common.file1
+ """
+
+ file1_sls = """
+ {% from 'common/macros.jinja' import a_jinja_macro with context %}
+
+ a state id:
+ cmd.run:
+ - name: echo {{ a_jinja_macro("hello world") }}
+ """
+ tf = salt_master.state_tree.base.temp_file
+
+ with tf("common/macros.jinja", macros_jinja):
+ with tf("common/init.sls", init_sls):
+ with tf("common/file1.sls", file1_sls):
+ ret = salt_call_cli.run("state.apply", "common")
+ assert ret.returncode == 0
diff --git a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py
index 38c5ce5b724..e0f5fa158ff 100644
--- a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py
+++ b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py
@@ -15,7 +15,7 @@ import salt.utils.json # pylint: disable=unused-import
import salt.utils.stringutils # pylint: disable=unused-import
import salt.utils.yaml # pylint: disable=unused-import
from salt.utils.jinja import SaltCacheLoader
-from tests.support.mock import Mock, patch
+from tests.support.mock import Mock, call, patch
@pytest.fixture
@@ -224,14 +224,45 @@ def test_file_client_kwarg(minion_opts, mock_file_client):
assert loader._file_client is mock_file_client
-def test_cache_loader_shutdown(minion_opts, mock_file_client):
+def test_cache_loader_passed_file_client(minion_opts, mock_file_client):
"""
The shudown method can be called without raising an exception when the
file_client does not have a destroy method
"""
- assert not hasattr(mock_file_client, "destroy")
- mock_file_client.opts = minion_opts
- loader = SaltCacheLoader(minion_opts, _file_client=mock_file_client)
- assert loader._file_client is mock_file_client
- # Shutdown method should not raise any exceptions
- loader.shutdown()
+ # Test SaltCacheLoader creating and destroying the file client created
+ file_client = Mock()
+ with patch("salt.fileclient.get_file_client", return_value=file_client):
+ loader = SaltCacheLoader(minion_opts)
+ assert loader._file_client is None
+ with loader:
+ assert loader._file_client is file_client
+ assert loader._file_client is None
+ assert file_client.mock_calls == [call.destroy()]
+
+ # Test SaltCacheLoader reusing the file client passed
+ file_client = Mock()
+ file_client.opts = {"file_roots": minion_opts["file_roots"]}
+ with patch("salt.fileclient.get_file_client", return_value=Mock()):
+ loader = SaltCacheLoader(minion_opts, _file_client=file_client)
+ assert loader._file_client is file_client
+ with loader:
+ assert loader._file_client is file_client
+ assert loader._file_client is file_client
+ assert file_client.mock_calls == []
+
+ # Test SaltCacheLoader creating a client even though a file client was
+ # passed because the "file_roots" option is different, and, as such,
+ # the destroy method on the new file client is called, but not on the
+ # file client passed in.
+ file_client = Mock()
+ file_client.opts = {"file_roots": ""}
+ new_file_client = Mock()
+ with patch("salt.fileclient.get_file_client", return_value=new_file_client):
+ loader = SaltCacheLoader(minion_opts, _file_client=file_client)
+ assert loader._file_client is file_client
+ with loader:
+ assert loader._file_client is not file_client
+ assert loader._file_client is new_file_client
+ assert loader._file_client is None
+ assert file_client.mock_calls == []
+ assert new_file_client.mock_calls == [call.destroy()]
--
2.40.0

View File

@ -0,0 +1,480 @@
From c1408333364ac25ff5d316afa9674f7687217b0c Mon Sep 17 00:00:00 2001
From: Dominik Gedon <dgedon@suse.de>
Date: Thu, 3 Aug 2023 11:08:21 +0200
Subject: [PATCH] Mark Salt 3006 as released (#586)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* Mark Salt 3006 as released
Without this, commands like
```
salt '*' salt_version.equal 'Sulfur'
```
will not work properly and return False although Salt 3006 is used.
Signed-off-by: Dominik Gedon <dominik.gedon@suse.com>
* Fix detection of Salt codename by salt_version module
* Fix mess with version detection bad version definition
* Add some new and fix unit tests
* Fix SaltStackVersion string for new versions format
* Do not crash when passing numbers to 'salt_version.get_release_number'
* Fix salt_version execution module documentation
---------
Signed-off-by: Dominik Gedon <dominik.gedon@suse.com>
Co-authored-by: Pablo Suárez Hernández <psuarezhernandez@suse.com>
---
salt/modules/salt_version.py | 8 +-
salt/version.py | 218 +++++++++---------
.../pytests/unit/modules/test_salt_version.py | 55 ++++-
tests/pytests/unit/test_version.py | 10 +-
4 files changed, 176 insertions(+), 115 deletions(-)
diff --git a/salt/modules/salt_version.py b/salt/modules/salt_version.py
index 1b5421fee4..99dae5f61a 100644
--- a/salt/modules/salt_version.py
+++ b/salt/modules/salt_version.py
@@ -20,7 +20,7 @@ A simple example might be something like the following:
.. code-block:: jinja
{# a boolean check #}
- {% set option_deprecated = salt['salt_version.less_than']("3001") %}
+ {% set option_deprecated = salt['salt_version.less_than']("Sodium") %}
{% if option_deprecated %}
<use old syntax>
@@ -35,6 +35,7 @@ import logging
import salt.utils.versions
import salt.version
+from salt.exceptions import CommandExecutionError
log = logging.getLogger(__name__)
@@ -51,7 +52,7 @@ def __virtual__():
def get_release_number(name):
"""
Returns the release number of a given release code name in a
- ``MAJOR.PATCH`` format.
+ ``MAJOR.PATCH`` format (for Salt versions < 3000) or ``MAJOR`` for newer Salt versions.
If the release name has not been given an assigned release number, the
function returns a string. If the release cannot be found, it returns
@@ -66,6 +67,9 @@ def get_release_number(name):
salt '*' salt_version.get_release_number 'Oxygen'
"""
+ if not isinstance(name, str):
+ raise CommandExecutionError("'name' argument must be a string")
+
name = name.lower()
version_map = salt.version.SaltStackVersion.LNAMES
version = version_map.get(name)
diff --git a/salt/version.py b/salt/version.py
index 67719bd020..44372830b2 100644
--- a/salt/version.py
+++ b/salt/version.py
@@ -77,109 +77,109 @@ class SaltVersionsInfo(type):
ALUMINIUM = SaltVersion("Aluminium" , info=3003, released=True)
SILICON = SaltVersion("Silicon" , info=3004, released=True)
PHOSPHORUS = SaltVersion("Phosphorus" , info=3005, released=True)
- SULFUR = SaltVersion("Sulfur" , info=(3006, 0))
- CHLORINE = SaltVersion("Chlorine" , info=(3007, 0))
- ARGON = SaltVersion("Argon" , info=(3008, 0))
- POTASSIUM = SaltVersion("Potassium" , info=(3009, 0))
- CALCIUM = SaltVersion("Calcium" , info=(3010, 0))
- SCANDIUM = SaltVersion("Scandium" , info=(3011, 0))
- TITANIUM = SaltVersion("Titanium" , info=(3012, 0))
- VANADIUM = SaltVersion("Vanadium" , info=(3013, 0))
- CHROMIUM = SaltVersion("Chromium" , info=(3014, 0))
- MANGANESE = SaltVersion("Manganese" , info=(3015, 0))
- IRON = SaltVersion("Iron" , info=(3016, 0))
- COBALT = SaltVersion("Cobalt" , info=(3017, 0))
- NICKEL = SaltVersion("Nickel" , info=(3018, 0))
- COPPER = SaltVersion("Copper" , info=(3019, 0))
- ZINC = SaltVersion("Zinc" , info=(3020, 0))
- GALLIUM = SaltVersion("Gallium" , info=(3021, 0))
- GERMANIUM = SaltVersion("Germanium" , info=(3022, 0))
- ARSENIC = SaltVersion("Arsenic" , info=(3023, 0))
- SELENIUM = SaltVersion("Selenium" , info=(3024, 0))
- BROMINE = SaltVersion("Bromine" , info=(3025, 0))
- KRYPTON = SaltVersion("Krypton" , info=(3026, 0))
- RUBIDIUM = SaltVersion("Rubidium" , info=(3027, 0))
- STRONTIUM = SaltVersion("Strontium" , info=(3028, 0))
- YTTRIUM = SaltVersion("Yttrium" , info=(3029, 0))
- ZIRCONIUM = SaltVersion("Zirconium" , info=(3030, 0))
- NIOBIUM = SaltVersion("Niobium" , info=(3031, 0))
- MOLYBDENUM = SaltVersion("Molybdenum" , info=(3032, 0))
- TECHNETIUM = SaltVersion("Technetium" , info=(3033, 0))
- RUTHENIUM = SaltVersion("Ruthenium" , info=(3034, 0))
- RHODIUM = SaltVersion("Rhodium" , info=(3035, 0))
- PALLADIUM = SaltVersion("Palladium" , info=(3036, 0))
- SILVER = SaltVersion("Silver" , info=(3037, 0))
- CADMIUM = SaltVersion("Cadmium" , info=(3038, 0))
- INDIUM = SaltVersion("Indium" , info=(3039, 0))
- TIN = SaltVersion("Tin" , info=(3040, 0))
- ANTIMONY = SaltVersion("Antimony" , info=(3041, 0))
- TELLURIUM = SaltVersion("Tellurium" , info=(3042, 0))
- IODINE = SaltVersion("Iodine" , info=(3043, 0))
- XENON = SaltVersion("Xenon" , info=(3044, 0))
- CESIUM = SaltVersion("Cesium" , info=(3045, 0))
- BARIUM = SaltVersion("Barium" , info=(3046, 0))
- LANTHANUM = SaltVersion("Lanthanum" , info=(3047, 0))
- CERIUM = SaltVersion("Cerium" , info=(3048, 0))
- PRASEODYMIUM = SaltVersion("Praseodymium" , info=(3049, 0))
- NEODYMIUM = SaltVersion("Neodymium" , info=(3050, 0))
- PROMETHIUM = SaltVersion("Promethium" , info=(3051, 0))
- SAMARIUM = SaltVersion("Samarium" , info=(3052, 0))
- EUROPIUM = SaltVersion("Europium" , info=(3053, 0))
- GADOLINIUM = SaltVersion("Gadolinium" , info=(3054, 0))
- TERBIUM = SaltVersion("Terbium" , info=(3055, 0))
- DYSPROSIUM = SaltVersion("Dysprosium" , info=(3056, 0))
- HOLMIUM = SaltVersion("Holmium" , info=(3057, 0))
- ERBIUM = SaltVersion("Erbium" , info=(3058, 0))
- THULIUM = SaltVersion("Thulium" , info=(3059, 0))
- YTTERBIUM = SaltVersion("Ytterbium" , info=(3060, 0))
- LUTETIUM = SaltVersion("Lutetium" , info=(3061, 0))
- HAFNIUM = SaltVersion("Hafnium" , info=(3062, 0))
- TANTALUM = SaltVersion("Tantalum" , info=(3063, 0))
- TUNGSTEN = SaltVersion("Tungsten" , info=(3064, 0))
- RHENIUM = SaltVersion("Rhenium" , info=(3065, 0))
- OSMIUM = SaltVersion("Osmium" , info=(3066, 0))
- IRIDIUM = SaltVersion("Iridium" , info=(3067, 0))
- PLATINUM = SaltVersion("Platinum" , info=(3068, 0))
- GOLD = SaltVersion("Gold" , info=(3069, 0))
- MERCURY = SaltVersion("Mercury" , info=(3070, 0))
- THALLIUM = SaltVersion("Thallium" , info=(3071, 0))
- LEAD = SaltVersion("Lead" , info=(3072, 0))
- BISMUTH = SaltVersion("Bismuth" , info=(3073, 0))
- POLONIUM = SaltVersion("Polonium" , info=(3074, 0))
- ASTATINE = SaltVersion("Astatine" , info=(3075, 0))
- RADON = SaltVersion("Radon" , info=(3076, 0))
- FRANCIUM = SaltVersion("Francium" , info=(3077, 0))
- RADIUM = SaltVersion("Radium" , info=(3078, 0))
- ACTINIUM = SaltVersion("Actinium" , info=(3079, 0))
- THORIUM = SaltVersion("Thorium" , info=(3080, 0))
- PROTACTINIUM = SaltVersion("Protactinium" , info=(3081, 0))
- URANIUM = SaltVersion("Uranium" , info=(3082, 0))
- NEPTUNIUM = SaltVersion("Neptunium" , info=(3083, 0))
- PLUTONIUM = SaltVersion("Plutonium" , info=(3084, 0))
- AMERICIUM = SaltVersion("Americium" , info=(3085, 0))
- CURIUM = SaltVersion("Curium" , info=(3086, 0))
- BERKELIUM = SaltVersion("Berkelium" , info=(3087, 0))
- CALIFORNIUM = SaltVersion("Californium" , info=(3088, 0))
- EINSTEINIUM = SaltVersion("Einsteinium" , info=(3089, 0))
- FERMIUM = SaltVersion("Fermium" , info=(3090, 0))
- MENDELEVIUM = SaltVersion("Mendelevium" , info=(3091, 0))
- NOBELIUM = SaltVersion("Nobelium" , info=(3092, 0))
- LAWRENCIUM = SaltVersion("Lawrencium" , info=(3093, 0))
- RUTHERFORDIUM = SaltVersion("Rutherfordium", info=(3094, 0))
- DUBNIUM = SaltVersion("Dubnium" , info=(3095, 0))
- SEABORGIUM = SaltVersion("Seaborgium" , info=(3096, 0))
- BOHRIUM = SaltVersion("Bohrium" , info=(3097, 0))
- HASSIUM = SaltVersion("Hassium" , info=(3098, 0))
- MEITNERIUM = SaltVersion("Meitnerium" , info=(3099, 0))
- DARMSTADTIUM = SaltVersion("Darmstadtium" , info=(3100, 0))
- ROENTGENIUM = SaltVersion("Roentgenium" , info=(3101, 0))
- COPERNICIUM = SaltVersion("Copernicium" , info=(3102, 0))
- NIHONIUM = SaltVersion("Nihonium" , info=(3103, 0))
- FLEROVIUM = SaltVersion("Flerovium" , info=(3104, 0))
- MOSCOVIUM = SaltVersion("Moscovium" , info=(3105, 0))
- LIVERMORIUM = SaltVersion("Livermorium" , info=(3106, 0))
- TENNESSINE = SaltVersion("Tennessine" , info=(3107, 0))
- OGANESSON = SaltVersion("Oganesson" , info=(3108, 0))
+ SULFUR = SaltVersion("Sulfur" , info=3006, released=True)
+ CHLORINE = SaltVersion("Chlorine" , info=3007)
+ ARGON = SaltVersion("Argon" , info=3008)
+ POTASSIUM = SaltVersion("Potassium" , info=3009)
+ CALCIUM = SaltVersion("Calcium" , info=3010)
+ SCANDIUM = SaltVersion("Scandium" , info=3011)
+ TITANIUM = SaltVersion("Titanium" , info=3012)
+ VANADIUM = SaltVersion("Vanadium" , info=3013)
+ CHROMIUM = SaltVersion("Chromium" , info=3014)
+ MANGANESE = SaltVersion("Manganese" , info=3015)
+ IRON = SaltVersion("Iron" , info=3016)
+ COBALT = SaltVersion("Cobalt" , info=3017)
+ NICKEL = SaltVersion("Nickel" , info=3018)
+ COPPER = SaltVersion("Copper" , info=3019)
+ ZINC = SaltVersion("Zinc" , info=3020)
+ GALLIUM = SaltVersion("Gallium" , info=3021)
+ GERMANIUM = SaltVersion("Germanium" , info=3022)
+ ARSENIC = SaltVersion("Arsenic" , info=3023)
+ SELENIUM = SaltVersion("Selenium" , info=3024)
+ BROMINE = SaltVersion("Bromine" , info=3025)
+ KRYPTON = SaltVersion("Krypton" , info=3026)
+ RUBIDIUM = SaltVersion("Rubidium" , info=3027)
+ STRONTIUM = SaltVersion("Strontium" , info=3028)
+ YTTRIUM = SaltVersion("Yttrium" , info=3029)
+ ZIRCONIUM = SaltVersion("Zirconium" , info=3030)
+ NIOBIUM = SaltVersion("Niobium" , info=3031)
+ MOLYBDENUM = SaltVersion("Molybdenum" , info=3032)
+ TECHNETIUM = SaltVersion("Technetium" , info=3033)
+ RUTHENIUM = SaltVersion("Ruthenium" , info=3034)
+ RHODIUM = SaltVersion("Rhodium" , info=3035)
+ PALLADIUM = SaltVersion("Palladium" , info=3036)
+ SILVER = SaltVersion("Silver" , info=3037)
+ CADMIUM = SaltVersion("Cadmium" , info=3038)
+ INDIUM = SaltVersion("Indium" , info=3039)
+ TIN = SaltVersion("Tin" , info=3040)
+ ANTIMONY = SaltVersion("Antimony" , info=3041)
+ TELLURIUM = SaltVersion("Tellurium" , info=3042)
+ IODINE = SaltVersion("Iodine" , info=3043)
+ XENON = SaltVersion("Xenon" , info=3044)
+ CESIUM = SaltVersion("Cesium" , info=3045)
+ BARIUM = SaltVersion("Barium" , info=3046)
+ LANTHANUM = SaltVersion("Lanthanum" , info=3047)
+ CERIUM = SaltVersion("Cerium" , info=3048)
+ PRASEODYMIUM = SaltVersion("Praseodymium" , info=3049)
+ NEODYMIUM = SaltVersion("Neodymium" , info=3050)
+ PROMETHIUM = SaltVersion("Promethium" , info=3051)
+ SAMARIUM = SaltVersion("Samarium" , info=3052)
+ EUROPIUM = SaltVersion("Europium" , info=3053)
+ GADOLINIUM = SaltVersion("Gadolinium" , info=3054)
+ TERBIUM = SaltVersion("Terbium" , info=3055)
+ DYSPROSIUM = SaltVersion("Dysprosium" , info=3056)
+ HOLMIUM = SaltVersion("Holmium" , info=3057)
+ ERBIUM = SaltVersion("Erbium" , info=3058)
+ THULIUM = SaltVersion("Thulium" , info=3059)
+ YTTERBIUM = SaltVersion("Ytterbium" , info=3060)
+ LUTETIUM = SaltVersion("Lutetium" , info=3061)
+ HAFNIUM = SaltVersion("Hafnium" , info=3062)
+ TANTALUM = SaltVersion("Tantalum" , info=3063)
+ TUNGSTEN = SaltVersion("Tungsten" , info=3064)
+ RHENIUM = SaltVersion("Rhenium" , info=3065)
+ OSMIUM = SaltVersion("Osmium" , info=3066)
+ IRIDIUM = SaltVersion("Iridium" , info=3067)
+ PLATINUM = SaltVersion("Platinum" , info=3068)
+ GOLD = SaltVersion("Gold" , info=3069)
+ MERCURY = SaltVersion("Mercury" , info=3070)
+ THALLIUM = SaltVersion("Thallium" , info=3071)
+ LEAD = SaltVersion("Lead" , info=3072)
+ BISMUTH = SaltVersion("Bismuth" , info=3073)
+ POLONIUM = SaltVersion("Polonium" , info=3074)
+ ASTATINE = SaltVersion("Astatine" , info=3075)
+ RADON = SaltVersion("Radon" , info=3076)
+ FRANCIUM = SaltVersion("Francium" , info=3077)
+ RADIUM = SaltVersion("Radium" , info=3078)
+ ACTINIUM = SaltVersion("Actinium" , info=3079)
+ THORIUM = SaltVersion("Thorium" , info=3080)
+ PROTACTINIUM = SaltVersion("Protactinium" , info=3081)
+ URANIUM = SaltVersion("Uranium" , info=3082)
+ NEPTUNIUM = SaltVersion("Neptunium" , info=3083)
+ PLUTONIUM = SaltVersion("Plutonium" , info=3084)
+ AMERICIUM = SaltVersion("Americium" , info=3085)
+ CURIUM = SaltVersion("Curium" , info=3086)
+ BERKELIUM = SaltVersion("Berkelium" , info=3087)
+ CALIFORNIUM = SaltVersion("Californium" , info=3088)
+ EINSTEINIUM = SaltVersion("Einsteinium" , info=3089)
+ FERMIUM = SaltVersion("Fermium" , info=3090)
+ MENDELEVIUM = SaltVersion("Mendelevium" , info=3091)
+ NOBELIUM = SaltVersion("Nobelium" , info=3092)
+ LAWRENCIUM = SaltVersion("Lawrencium" , info=3093)
+ RUTHERFORDIUM = SaltVersion("Rutherfordium", info=3094)
+ DUBNIUM = SaltVersion("Dubnium" , info=3095)
+ SEABORGIUM = SaltVersion("Seaborgium" , info=3096)
+ BOHRIUM = SaltVersion("Bohrium" , info=3097)
+ HASSIUM = SaltVersion("Hassium" , info=3098)
+ MEITNERIUM = SaltVersion("Meitnerium" , info=3099)
+ DARMSTADTIUM = SaltVersion("Darmstadtium" , info=3100)
+ ROENTGENIUM = SaltVersion("Roentgenium" , info=3101)
+ COPERNICIUM = SaltVersion("Copernicium" , info=3102)
+ NIHONIUM = SaltVersion("Nihonium" , info=3103)
+ FLEROVIUM = SaltVersion("Flerovium" , info=3104)
+ MOSCOVIUM = SaltVersion("Moscovium" , info=3105)
+ LIVERMORIUM = SaltVersion("Livermorium" , info=3106)
+ TENNESSINE = SaltVersion("Tennessine" , info=3107)
+ OGANESSON = SaltVersion("Oganesson" , info=3108)
# <---- Please refrain from fixing whitespace -----------------------------------
# The idea is to keep this readable.
# -------------------------------------------------------------------------------
@@ -323,9 +323,7 @@ class SaltStackVersion:
self.mbugfix = mbugfix
self.pre_type = pre_type
self.pre_num = pre_num
- if self.can_have_dot_zero(major):
- vnames_key = (major, 0)
- elif self.new_version(major):
+ if self.new_version(major):
vnames_key = (major,)
else:
vnames_key = (major, minor)
@@ -476,8 +474,12 @@ class SaltStackVersion:
version_string = self.string
if self.sse:
version_string += " Enterprise"
- if (self.major, self.minor) in self.RMATCH:
- version_string += " ({})".format(self.RMATCH[(self.major, self.minor)])
+ if self.new_version(self.major):
+ rmatch_key = (self.major,)
+ else:
+ rmatch_key = (self.major, self.minor)
+ if rmatch_key in self.RMATCH:
+ version_string += " ({})".format(self.RMATCH[rmatch_key])
return version_string
@property
diff --git a/tests/pytests/unit/modules/test_salt_version.py b/tests/pytests/unit/modules/test_salt_version.py
index 6d734f6a76..4b7a7cd073 100644
--- a/tests/pytests/unit/modules/test_salt_version.py
+++ b/tests/pytests/unit/modules/test_salt_version.py
@@ -2,8 +2,11 @@
Unit tests for salt/modules/salt_version.py
"""
+import pytest
+
import salt.modules.salt_version as salt_version
import salt.version
+from salt.exceptions import CommandExecutionError
from tests.support.mock import MagicMock, patch
@@ -21,7 +24,7 @@ def test_mocked_objects():
for k, v in salt.version.SaltStackVersion.LNAMES.items():
assert k == k.lower()
assert isinstance(v, tuple)
- if sv.new_version(major=v[0]) and not sv.can_have_dot_zero(major=v[0]):
+ if sv.new_version(major=v[0]):
assert len(v) == 1
else:
assert len(v) == 2
@@ -64,6 +67,13 @@ def test_get_release_number_success_new_version():
assert salt_version.get_release_number("Neon") == "3000"
+def test_get_release_number_success_new_version_with_dot():
+ """
+ Test that a version is returned for new versioning (3006)
+ """
+ assert salt_version.get_release_number("Sulfur") == "3006"
+
+
def test_equal_success():
"""
Test that the current version is equal to the codename
@@ -83,6 +93,16 @@ def test_equal_success_new_version():
assert salt_version.equal("foo") is True
+def test_equal_success_new_version_with_dot():
+ """
+ Test that the current version is equal to the codename
+ while using the new versioning
+ """
+ with patch("salt.version.SaltStackVersion", MagicMock(return_value="3006.1")):
+ with patch("salt.version.SaltStackVersion.LNAMES", {"foo": (3006,)}):
+ assert salt_version.equal("foo") is True
+
+
def test_equal_older_codename():
"""
Test that when an older codename is passed in, the function returns False.
@@ -142,6 +162,17 @@ def test_greater_than_success_new_version():
assert salt_version.greater_than("Nitrogen") is True
+def test_greater_than_success_new_version_with_dot():
+ """
+ Test that the current version is newer than the codename
+ """
+ with patch(
+ "salt.modules.salt_version.get_release_number", MagicMock(return_value="3000")
+ ):
+ with patch("salt.version.SaltStackVersion", MagicMock(return_value="3006.0")):
+ assert salt_version.greater_than("Neon") is True
+
+
def test_greater_than_with_equal_codename():
"""
Test that when an equal codename is passed in, the function returns False.
@@ -200,6 +231,28 @@ def test_less_than_success_new_version():
assert salt_version.less_than("Fluorine") is True
+def test_less_than_success_new_version_with_dot():
+ """
+ Test that when a newer codename is passed in, the function returns True
+ using new version
+ """
+ with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
+ with patch(
+ "salt.modules.salt_version.get_release_number",
+ MagicMock(return_value="3006"),
+ ):
+ assert salt_version.less_than("Fluorine") is True
+
+
+def test_less_than_do_not_crash_when_input_is_a_number():
+ """
+ Test that less_than do not crash when unexpected inputs
+ """
+ with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
+ with pytest.raises(CommandExecutionError):
+ salt_version.less_than(1234)
+
+
def test_less_than_with_equal_codename():
"""
Test that when an equal codename is passed in, the function returns False.
diff --git a/tests/pytests/unit/test_version.py b/tests/pytests/unit/test_version.py
index 73befea4cf..1cb94c619c 100644
--- a/tests/pytests/unit/test_version.py
+++ b/tests/pytests/unit/test_version.py
@@ -187,7 +187,7 @@ def test_string_new_version_minor():
ver = SaltStackVersion(major=maj_ver, minor=min_ver)
assert ver.minor == min_ver
assert not ver.bugfix
- assert ver.string == "{}.{}".format(maj_ver, min_ver)
+ assert ver.string == f"{maj_ver}.{min_ver}"
def test_string_new_version_minor_as_string():
@@ -201,13 +201,13 @@ def test_string_new_version_minor_as_string():
ver = SaltStackVersion(major=maj_ver, minor=min_ver)
assert ver.minor == int(min_ver)
assert not ver.bugfix
- assert ver.string == "{}.{}".format(maj_ver, min_ver)
+ assert ver.string == f"{maj_ver}.{min_ver}"
# This only seems to happen on a cloned repo without its tags
maj_ver = "3000"
min_ver = ""
ver = SaltStackVersion(major=maj_ver, minor=min_ver)
- assert ver.minor is None, "{!r} is not {!r}".format(ver.minor, min_ver)
+ assert ver.minor is None, f"{ver.minor!r} is not {min_ver!r}"
assert not ver.bugfix
assert ver.string == maj_ver
@@ -222,7 +222,7 @@ def test_string_old_version():
min_ver = "2"
ver = SaltStackVersion(major=maj_ver, minor=min_ver)
assert ver.bugfix == 0
- assert ver.string == "{}.{}.0".format(maj_ver, min_ver)
+ assert ver.string == f"{maj_ver}.{min_ver}.0"
@pytest.mark.parametrize(
@@ -537,6 +537,8 @@ def test_versions_report_no_extensions_available():
("3000.1", "3000.1", "Neon"),
("3005", "3005", "Phosphorus"),
("3006", "3006.0", "Sulfur"),
+ ("3006.0", "3006.0", "Sulfur"),
+ ("3006.1", "3006.1", "Sulfur"),
("3015.1", "3015.1", "Manganese"),
("3109.3", "3109.3", None),
],
--
2.41.0

View File

@ -0,0 +1,276 @@
From bd671b53de8933732e2108624d7dfb6f9b183f38 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <Victor.Zhestkov@suse.com>
Date: Fri, 28 Oct 2022 13:20:13 +0300
Subject: [PATCH] Pass the context to pillar ext modules
* Pass __context__ to ext pillar
* Add test for passing the context to pillar ext module
* Align the test and pillar to prevent failing test
---
salt/master.py | 7 ++-
salt/pillar/__init__.py | 16 +++++-
tests/pytests/unit/test_master.py | 91 ++++++++++++++++++++++++++++++-
3 files changed, 108 insertions(+), 6 deletions(-)
diff --git a/salt/master.py b/salt/master.py
index a0552fa232..da1eb8cef5 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -964,6 +964,7 @@ class MWorker(salt.utils.process.SignalHandlingProcess):
self.k_mtime = 0
self.stats = collections.defaultdict(lambda: {"mean": 0, "runs": 0})
self.stat_clock = time.time()
+ self.context = {}
# We need __setstate__ and __getstate__ to also pickle 'SMaster.secrets'.
# Otherwise, 'SMaster.secrets' won't be copied over to the spawned process
@@ -1151,7 +1152,7 @@ class MWorker(salt.utils.process.SignalHandlingProcess):
self.key,
)
self.clear_funcs.connect()
- self.aes_funcs = AESFuncs(self.opts)
+ self.aes_funcs = AESFuncs(self.opts, context=self.context)
salt.utils.crypt.reinit_crypto()
self.__bind()
@@ -1214,7 +1215,7 @@ class AESFuncs(TransportMethods):
"_file_envs",
)
- def __init__(self, opts):
+ def __init__(self, opts, context=None):
"""
Create a new AESFuncs
@@ -1224,6 +1225,7 @@ class AESFuncs(TransportMethods):
:returns: Instance for handling AES operations
"""
self.opts = opts
+ self.context = context
self.event = salt.utils.event.get_master_event(
self.opts, self.opts["sock_dir"], listen=False
)
@@ -1611,6 +1613,7 @@ class AESFuncs(TransportMethods):
pillarenv=load.get("pillarenv"),
extra_minion_data=load.get("extra_minion_data"),
clean_cache=load.get("clean_cache"),
+ context=self.context,
)
data = pillar.compile_pillar()
self.fs_.update_opts()
diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py
index 5a3f5388b4..0dfab4cc57 100644
--- a/salt/pillar/__init__.py
+++ b/salt/pillar/__init__.py
@@ -46,6 +46,7 @@ def get_pillar(
pillarenv=None,
extra_minion_data=None,
clean_cache=False,
+ context=None,
):
"""
Return the correct pillar driver based on the file_client option
@@ -82,6 +83,7 @@ def get_pillar(
pillarenv=pillarenv,
clean_cache=clean_cache,
extra_minion_data=extra_minion_data,
+ context=context,
)
return ptype(
opts,
@@ -93,6 +95,7 @@ def get_pillar(
pillar_override=pillar_override,
pillarenv=pillarenv,
extra_minion_data=extra_minion_data,
+ context=context,
)
@@ -281,7 +284,7 @@ class AsyncRemotePillar(RemotePillarMixin):
raise salt.ext.tornado.gen.Return(ret_pillar)
def destroy(self):
- if self._closing:
+ if hasattr(self, "_closing") and self._closing:
return
self._closing = True
@@ -310,6 +313,7 @@ class RemotePillar(RemotePillarMixin):
pillar_override=None,
pillarenv=None,
extra_minion_data=None,
+ context=None,
):
self.opts = opts
self.opts["saltenv"] = saltenv
@@ -334,6 +338,7 @@ class RemotePillar(RemotePillarMixin):
merge_lists=True,
)
self._closing = False
+ self.context = context
def compile_pillar(self):
"""
@@ -407,6 +412,7 @@ class PillarCache:
pillarenv=None,
extra_minion_data=None,
clean_cache=False,
+ context=None,
):
# Yes, we need all of these because we need to route to the Pillar object
# if we have no cache. This is another refactor target.
@@ -434,6 +440,8 @@ class PillarCache:
minion_cache_path=self._minion_cache_path(minion_id),
)
+ self.context = context
+
def _minion_cache_path(self, minion_id):
"""
Return the path to the cache file for the minion.
@@ -458,6 +466,7 @@ class PillarCache:
pillar_override=self.pillar_override,
pillarenv=self.pillarenv,
extra_minion_data=self.extra_minion_data,
+ context=self.context,
)
return fresh_pillar.compile_pillar()
@@ -533,6 +542,7 @@ class Pillar:
pillar_override=None,
pillarenv=None,
extra_minion_data=None,
+ context=None,
):
self.minion_id = minion_id
self.ext = ext
@@ -571,7 +581,7 @@ class Pillar:
if opts.get("pillar_source_merging_strategy"):
self.merge_strategy = opts["pillar_source_merging_strategy"]
- self.ext_pillars = salt.loader.pillars(ext_pillar_opts, self.functions)
+ self.ext_pillars = salt.loader.pillars(ext_pillar_opts, self.functions, context=context)
self.ignored_pillars = {}
self.pillar_override = pillar_override or {}
if not isinstance(self.pillar_override, dict):
@@ -1338,7 +1348,7 @@ class Pillar:
"""
This method exist in order to be API compatible with RemotePillar
"""
- if self._closing:
+ if hasattr(self, "_closing") and self._closing:
return
self._closing = True
diff --git a/tests/pytests/unit/test_master.py b/tests/pytests/unit/test_master.py
index cd11d217c7..98c796912a 100644
--- a/tests/pytests/unit/test_master.py
+++ b/tests/pytests/unit/test_master.py
@@ -4,7 +4,7 @@ import pytest
import salt.master
import salt.utils.platform
-from tests.support.mock import patch
+from tests.support.mock import MagicMock, patch
@pytest.fixture
@@ -160,3 +160,92 @@ def test_when_syndic_return_processes_load_then_correct_values_should_be_returne
with patch.object(encrypted_requests, "_return", autospec=True) as fake_return:
encrypted_requests._syndic_return(payload)
fake_return.assert_called_with(expected_return)
+
+
+def test_mworker_pass_context():
+ """
+ Test of passing the __context__ to pillar ext module loader
+ """
+ req_channel_mock = MagicMock()
+ local_client_mock = MagicMock()
+
+ opts = {
+ "req_server_niceness": None,
+ "mworker_niceness": None,
+ "sock_dir": "/tmp",
+ "conf_file": "/tmp/fake_conf",
+ "transport": "zeromq",
+ "fileserver_backend": ["roots"],
+ "file_client": "local",
+ "pillar_cache": False,
+ "state_top": "top.sls",
+ "pillar_roots": {},
+ }
+
+ data = {
+ "id": "MINION_ID",
+ "grains": {},
+ "saltenv": None,
+ "pillarenv": None,
+ "pillar_override": {},
+ "extra_minion_data": {},
+ "ver": "2",
+ "cmd": "_pillar",
+ }
+
+ test_context = {"testing": 123}
+
+ def mworker_bind_mock():
+ mworker.aes_funcs.run_func(data["cmd"], data)
+
+ with patch("salt.client.get_local_client", local_client_mock), patch(
+ "salt.master.ClearFuncs", MagicMock()
+ ), patch("salt.minion.MasterMinion", MagicMock()), patch(
+ "salt.utils.verify.valid_id", return_value=True
+ ), patch(
+ "salt.loader.matchers", MagicMock()
+ ), patch(
+ "salt.loader.render", MagicMock()
+ ), patch(
+ "salt.loader.utils", MagicMock()
+ ), patch(
+ "salt.loader.fileserver", MagicMock()
+ ), patch(
+ "salt.loader.minion_mods", MagicMock()
+ ), patch(
+ "salt.loader._module_dirs", MagicMock()
+ ), patch(
+ "salt.loader.LazyLoader", MagicMock()
+ ) as loadler_pillars_mock:
+ mworker = salt.master.MWorker(opts, {}, {}, [req_channel_mock])
+
+ with patch.object(mworker, "_MWorker__bind", mworker_bind_mock), patch.dict(
+ mworker.context, test_context
+ ):
+ mworker.run()
+ assert (
+ loadler_pillars_mock.call_args_list[0][1].get("pack").get("__context__")
+ == test_context
+ )
+
+ loadler_pillars_mock.reset_mock()
+
+ opts.update(
+ {
+ "pillar_cache": True,
+ "pillar_cache_backend": "file",
+ "pillar_cache_ttl": 1000,
+ "cachedir": "/tmp",
+ }
+ )
+
+ mworker = salt.master.MWorker(opts, {}, {}, [req_channel_mock])
+
+ with patch.object(mworker, "_MWorker__bind", mworker_bind_mock), patch.dict(
+ mworker.context, test_context
+ ), patch("salt.utils.cache.CacheFactory.factory", MagicMock()):
+ mworker.run()
+ assert (
+ loadler_pillars_mock.call_args_list[0][1].get("pack").get("__context__")
+ == test_context
+ )
--
2.39.2

View File

@ -0,0 +1,240 @@
From 90236c844cfce7da8beb7a570be19a8677c60820 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Tue, 12 Apr 2022 10:06:43 +0300
Subject: [PATCH] Prevent affection of SSH.opts with LazyLoader
(bsc#1197637)
* Prevent affection SSH.opts with LazyLoader
* Restore parsed targets
* Fix test_ssh unit tests
Adjust unit tests
---
salt/client/ssh/__init__.py | 19 +++++++++-------
.../pytests/unit/client/ssh/test_password.py | 4 +++-
.../unit/client/ssh/test_return_events.py | 2 +-
tests/pytests/unit/client/ssh/test_ssh.py | 22 +++++++++----------
4 files changed, 26 insertions(+), 21 deletions(-)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index a527c03de6..d5a679821e 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -224,15 +224,16 @@ class SSH(MultiprocessingStateMixin):
ROSTER_UPDATE_FLAG = "#__needs_update"
def __init__(self, opts, context=None):
+ self.opts = copy.deepcopy(opts)
+ self.sopts = copy.deepcopy(self.opts)
self.__parsed_rosters = {SSH.ROSTER_UPDATE_FLAG: True}
- pull_sock = os.path.join(opts["sock_dir"], "master_event_pull.ipc")
+ pull_sock = os.path.join(self.opts["sock_dir"], "master_event_pull.ipc")
if os.path.exists(pull_sock) and zmq:
self.event = salt.utils.event.get_event(
- "master", opts["sock_dir"], opts=opts, listen=False
+ "master", self.opts["sock_dir"], opts=self.opts, listen=False
)
else:
self.event = None
- self.opts = opts
if self.opts["regen_thin"]:
self.opts["ssh_wipe"] = True
if not salt.utils.path.which("ssh"):
@@ -243,7 +244,7 @@ class SSH(MultiprocessingStateMixin):
" to run. Exiting."
),
)
- self.opts["_ssh_version"] = ssh_version()
+ self.sopts["_ssh_version"] = ssh_version()
self.tgt_type = (
self.opts["selected_target_option"]
if self.opts["selected_target_option"]
@@ -339,6 +340,9 @@ class SSH(MultiprocessingStateMixin):
self.opts["cachedir"], "salt-ssh.session.lock"
)
self.ssh_session_grace_time = int(self.opts.get("ssh_session_grace_time", 1))
+ self.sopts["tgt"] = copy.deepcopy(self.opts["tgt"])
+ self.sopts["ssh_cli_tgt"] = copy.deepcopy(self.opts["ssh_cli_tgt"])
+ self.opts = self.sopts
# __setstate__ and __getstate__ are only used on spawning platforms.
def __setstate__(self, state):
@@ -607,7 +611,6 @@ class SSH(MultiprocessingStateMixin):
Spin up the needed threads or processes and execute the subsequent
routines
"""
- opts = copy.deepcopy(self.opts)
que = multiprocessing.Queue()
running = {}
targets_queue = deque(self.targets.keys())
@@ -618,7 +621,7 @@ class SSH(MultiprocessingStateMixin):
if not self.targets:
log.error("No matching targets found in roster.")
break
- if len(running) < opts.get("ssh_max_procs", 25) and not init:
+ if len(running) < self.opts.get("ssh_max_procs", 25) and not init:
if targets_queue:
host = targets_queue.popleft()
else:
@@ -682,7 +685,7 @@ class SSH(MultiprocessingStateMixin):
continue
args = (
que,
- opts,
+ self.opts,
host,
self.targets[host],
mine,
@@ -776,7 +779,7 @@ class SSH(MultiprocessingStateMixin):
if len(rets) >= len(self.targets):
break
# Sleep when limit or all threads started
- if len(running) >= opts.get("ssh_max_procs", 25) or len(
+ if len(running) >= self.opts.get("ssh_max_procs", 25) or len(
self.targets
) >= len(running):
time.sleep(0.1)
diff --git a/tests/pytests/unit/client/ssh/test_password.py b/tests/pytests/unit/client/ssh/test_password.py
index 8a7794d2f4..0ca28d022e 100644
--- a/tests/pytests/unit/client/ssh/test_password.py
+++ b/tests/pytests/unit/client/ssh/test_password.py
@@ -27,6 +27,8 @@ def test_password_failure(temp_salt_master, tmp_path):
opts["argv"] = ["test.ping"]
opts["selected_target_option"] = "glob"
opts["tgt"] = "localhost"
+ opts["ssh_cli_tgt"] = "localhost"
+ opts["_ssh_version"] = "foobar"
opts["arg"] = []
roster = str(tmp_path / "roster")
handle_ssh_ret = [
@@ -44,7 +46,7 @@ def test_password_failure(temp_salt_master, tmp_path):
"salt.client.ssh.SSH.handle_ssh", MagicMock(return_value=handle_ssh_ret)
), patch("salt.client.ssh.SSH.key_deploy", MagicMock(return_value=expected)), patch(
"salt.output.display_output", display_output
- ):
+ ), patch("salt.client.ssh.ssh_version", MagicMock(return_value="foobar")):
client = ssh.SSH(opts)
ret = next(client.run_iter())
with pytest.raises(SystemExit):
diff --git a/tests/pytests/unit/client/ssh/test_return_events.py b/tests/pytests/unit/client/ssh/test_return_events.py
index 1f0b0dbf33..18714741b9 100644
--- a/tests/pytests/unit/client/ssh/test_return_events.py
+++ b/tests/pytests/unit/client/ssh/test_return_events.py
@@ -43,7 +43,7 @@ def test_not_missing_fun_calling_wfuncs(temp_salt_master, tmp_path):
assert "localhost" in ret
assert "fun" in ret["localhost"]
client.run()
- display_output.assert_called_once_with(expected, "nested", opts)
+ display_output.assert_called_once_with(expected, "nested", client.opts)
assert ret is handle_ssh_ret[0]
assert len(client.event.fire_event.call_args_list) == 2
assert "fun" in client.event.fire_event.call_args_list[0][0][0]
diff --git a/tests/pytests/unit/client/ssh/test_ssh.py b/tests/pytests/unit/client/ssh/test_ssh.py
index 2be96ab195..377aad9998 100644
--- a/tests/pytests/unit/client/ssh/test_ssh.py
+++ b/tests/pytests/unit/client/ssh/test_ssh.py
@@ -148,7 +148,7 @@ def test_expand_target_ip_address(opts, roster):
MagicMock(return_value=salt.utils.yaml.safe_load(roster)),
):
client._expand_target()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
def test_expand_target_no_host(opts, tmp_path):
@@ -171,7 +171,7 @@ def test_expand_target_no_host(opts, tmp_path):
assert opts["tgt"] == user + host
with patch("salt.roster.get_roster_file", MagicMock(return_value=roster_file)):
client._expand_target()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
def test_expand_target_dns(opts, roster):
@@ -192,7 +192,7 @@ def test_expand_target_dns(opts, roster):
MagicMock(return_value=salt.utils.yaml.safe_load(roster)),
):
client._expand_target()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
def test_expand_target_no_user(opts, roster):
@@ -204,7 +204,7 @@ def test_expand_target_no_user(opts, roster):
with patch("salt.utils.network.is_reachable_host", MagicMock(return_value=False)):
client = ssh.SSH(opts)
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
with patch(
"salt.roster.get_roster_file", MagicMock(return_value="/etc/salt/roster")
@@ -213,7 +213,7 @@ def test_expand_target_no_user(opts, roster):
MagicMock(return_value=salt.utils.yaml.safe_load(roster)),
):
client._expand_target()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
def test_update_targets_ip_address(opts):
@@ -228,7 +228,7 @@ def test_update_targets_ip_address(opts):
client = ssh.SSH(opts)
assert opts["tgt"] == user + host
client._update_targets()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
assert client.targets[host]["user"] == user.split("@")[0]
@@ -244,7 +244,7 @@ def test_update_targets_dns(opts):
client = ssh.SSH(opts)
assert opts["tgt"] == user + host
client._update_targets()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
assert client.targets[host]["user"] == user.split("@")[0]
@@ -259,7 +259,7 @@ def test_update_targets_no_user(opts):
client = ssh.SSH(opts)
assert opts["tgt"] == host
client._update_targets()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
def test_update_expand_target_dns(opts, roster):
@@ -281,7 +281,7 @@ def test_update_expand_target_dns(opts, roster):
):
client._expand_target()
client._update_targets()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
assert client.targets[host]["user"] == user.split("@")[0]
@@ -299,7 +299,7 @@ def test_parse_tgt(opts):
client = ssh.SSH(opts)
assert client.parse_tgt["hostname"] == host
assert client.parse_tgt["user"] == user.split("@")[0]
- assert opts.get("ssh_cli_tgt") == user + host
+ assert client.opts.get("ssh_cli_tgt") == user + host
def test_parse_tgt_no_user(opts):
@@ -316,7 +316,7 @@ def test_parse_tgt_no_user(opts):
client = ssh.SSH(opts)
assert client.parse_tgt["hostname"] == host
assert client.parse_tgt["user"] == opts["ssh_user"]
- assert opts.get("ssh_cli_tgt") == host
+ assert client.opts.get("ssh_cli_tgt") == host
def test_extra_filerefs(tmp_path, opts):
--
2.39.2

View File

@ -0,0 +1,102 @@
From 4240f0d5ffbc46c557885c5a28d1f2fd0b4c5e48 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Mon, 8 Nov 2021 17:42:36 +0300
Subject: [PATCH] Prevent pkg plugins errors on missing cookie path
(bsc#1186738) - 3002.2 (#415)
* Prevent pkg plugins errors on missing cookie path (bsc#1186738)
* Narrowing down exception handling
* Modify for Python 3 only
* Fix yumnotify
---
scripts/suse/yum/plugins/README.md | 2 +-
scripts/suse/yum/plugins/yumnotify.py | 17 +++++++++++++----
scripts/suse/zypper/plugins/commit/zyppnotify | 18 ++++++++++++------
3 files changed, 26 insertions(+), 11 deletions(-)
diff --git a/scripts/suse/yum/plugins/README.md b/scripts/suse/yum/plugins/README.md
index cb3abd2260..3515845b31 100644
--- a/scripts/suse/yum/plugins/README.md
+++ b/scripts/suse/yum/plugins/README.md
@@ -11,7 +11,7 @@ Configuration files are going to:
Plugin itself goes to:
- `/usr/share/yum-plugins/[name].conf`
+ `/usr/share/yum-plugins/[name].py`
## Permissions
diff --git a/scripts/suse/yum/plugins/yumnotify.py b/scripts/suse/yum/plugins/yumnotify.py
index 4e137191a0..0d117e8946 100644
--- a/scripts/suse/yum/plugins/yumnotify.py
+++ b/scripts/suse/yum/plugins/yumnotify.py
@@ -5,6 +5,7 @@
import hashlib
import os
+import sys
from yum.plugins import TYPE_CORE
@@ -51,7 +52,15 @@ def posttrans_hook(conduit):
"""
# Integrate Yum with Salt
if "SALT_RUNNING" not in os.environ:
- with open(CK_PATH, "w") as ck_fh:
- ck_fh.write(
- "{chksum} {mtime}\n".format(chksum=_get_checksum(), mtime=_get_mtime())
- )
+ try:
+ ck_dir = os.path.dirname(CK_PATH)
+ if not os.path.exists(ck_dir):
+ os.makedirs(ck_dir)
+ with open(CK_PATH, "w") as ck_fh:
+ ck_fh.write(
+ "{chksum} {mtime}\n".format(
+ chksum=_get_checksum(), mtime=_get_mtime()
+ )
+ )
+ except OSError as e:
+ print("Unable to save the cookie file: %s" % (e), file=sys.stderr)
diff --git a/scripts/suse/zypper/plugins/commit/zyppnotify b/scripts/suse/zypper/plugins/commit/zyppnotify
index bacbc8b97e..e3528e87a9 100755
--- a/scripts/suse/zypper/plugins/commit/zyppnotify
+++ b/scripts/suse/zypper/plugins/commit/zyppnotify
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/python3
#
# Copyright (c) 2016 SUSE Linux LLC
# All Rights Reserved.
@@ -55,12 +55,18 @@ class DriftDetector(Plugin):
Hook when plugin closes Zypper's transaction.
"""
if "SALT_RUNNING" not in os.environ:
- with open(self.ck_path, "w") as ck_fh:
- ck_fh.write(
- "{chksum} {mtime}\n".format(
- chksum=self._get_checksum(), mtime=self._get_mtime()
+ try:
+ ck_dir = os.path.dirname(self.ck_path)
+ if not os.path.exists(ck_dir):
+ os.makedirs(ck_dir)
+ with open(self.ck_path, "w") as ck_fh:
+ ck_fh.write(
+ "{chksum} {mtime}\n".format(
+ chksum=self._get_checksum(), mtime=self._get_mtime()
+ )
)
- )
+ except OSError as e:
+ print("Unable to save the cookie file: %s" % (e), file=sys.stderr)
self.ack()
--
2.39.2

View File

@ -0,0 +1,68 @@
From 4ea91a61abbb6ef001f057685370454c85b72c3a Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Mon, 21 Aug 2023 13:04:32 +0200
Subject: [PATCH] Prevent possible exceptions on
salt.utils.user.get_group_dict (bsc#1212794)
* Prevent KeyError on calling grp.getgrnam in case of improper group
* Add test of calling salt.utils.user.get_group_dict
for the user having improper duplicate group
* Update tests/pytests/functional/utils/user/test_get_group_dict.py
Co-authored-by: Pedro Algarvio <pedro@algarvio.me>
---------
Co-authored-by: Pedro Algarvio <pedro@algarvio.me>
---
salt/utils/user.py | 6 +++++-
.../utils/user/test_get_group_dict.py | 17 +++++++++++++++++
2 files changed, 22 insertions(+), 1 deletion(-)
create mode 100644 tests/pytests/functional/utils/user/test_get_group_dict.py
diff --git a/salt/utils/user.py b/salt/utils/user.py
index 9763667443..2f1ca65cf9 100644
--- a/salt/utils/user.py
+++ b/salt/utils/user.py
@@ -352,7 +352,11 @@ def get_group_dict(user=None, include_default=True):
group_dict = {}
group_names = get_group_list(user, include_default=include_default)
for group in group_names:
- group_dict.update({group: grp.getgrnam(group).gr_gid})
+ try:
+ group_dict.update({group: grp.getgrnam(group).gr_gid})
+ except KeyError:
+ # In case if imporer duplicate group was returned by get_group_list
+ pass
return group_dict
diff --git a/tests/pytests/functional/utils/user/test_get_group_dict.py b/tests/pytests/functional/utils/user/test_get_group_dict.py
new file mode 100644
index 0000000000..653d664607
--- /dev/null
+++ b/tests/pytests/functional/utils/user/test_get_group_dict.py
@@ -0,0 +1,17 @@
+import logging
+
+import pytest
+
+import salt.utils.platform
+import salt.utils.user
+from tests.support.mock import patch
+
+log = logging.getLogger(__name__)
+
+pytestmark = [
+ pytest.mark.skip_unless_on_linux(reason="Should only run in platforms which have duplicate GID support"),
+]
+def test_get_group_dict_with_improper_duplicate_root_group():
+ with patch("salt.utils.user.get_group_list", return_value=["+", "root"]):
+ group_list = salt.utils.user.get_group_dict("root")
+ assert group_list == {"root": 0}
--
2.41.0

View File

@ -0,0 +1,33 @@
From 1b4e382856e1d5d8ef95890aec5a8e5e07254708 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 28 Feb 2022 14:25:43 +0000
Subject: [PATCH] Prevent shell injection via pre_flight_script_args
(#497)
Add tests around preflight script args
Readjust logic to validate script args
Use RLock to prevent issues in single threads
---
salt/_logging/impl.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/_logging/impl.py b/salt/_logging/impl.py
index e050f43caf..2d1a276cb8 100644
--- a/salt/_logging/impl.py
+++ b/salt/_logging/impl.py
@@ -107,7 +107,7 @@ DFLT_LOG_FMT_LOGFILE = "%(asctime)s,%(msecs)03d [%(name)-17s:%(lineno)-4d][%(lev
# LOG_LOCK is used to prevent deadlocks on using logging
# in combination with multiprocessing with salt-api
-LOG_LOCK = threading.Lock()
+LOG_LOCK = threading.RLock()
class SaltLogRecord(logging.LogRecord):
--
2.39.2

View File

@ -0,0 +1,29 @@
From ce0fedf25dea7eb63ccff8f9b90a9a35571a5f9d Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Thu, 7 Nov 2019 15:11:49 +0100
Subject: [PATCH] Read repo info without using interpolation
(bsc#1135656)
---
salt/modules/zypperpkg.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 6adf5f9aa3..d8220a1fdd 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -1155,7 +1155,9 @@ def _get_repo_info(alias, repos_cfg=None, root=None):
Get one repo meta-data.
"""
try:
- meta = dict((repos_cfg or _get_configured_repos(root=root)).items(alias))
+ meta = dict(
+ (repos_cfg or _get_configured_repos(root=root)).items(alias, raw=True)
+ )
meta["alias"] = alias
for key, val in meta.items():
if val in ["0", "1"]:
--
2.39.2

View File

@ -0,0 +1,137 @@
From a1a8b5a886705e5f005cb7ab067e42095066ef80 Mon Sep 17 00:00:00 2001
From: Jochen Breuer <jbreuer@suse.de>
Date: Fri, 30 Aug 2019 14:20:06 +0200
Subject: [PATCH] Restore default behaviour of pkg list return
The default behaviour for pkg list return was to not include patches,
even when installing patches. Only the packages where returned. There
is now parameter to also return patches if that is needed.
Co-authored-by: Mihai Dinca <mdinca@suse.de>
---
salt/modules/zypperpkg.py | 34 +++++++++++++++++++++++++---------
1 file changed, 25 insertions(+), 9 deletions(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 9d16fcb0b1..6adf5f9aa3 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -1456,8 +1456,10 @@ def refresh_db(force=None, root=None):
return ret
-def _find_types(pkgs):
+def _detect_includes(pkgs, inclusion_detection):
"""Form a package names list, find prefixes of packages types."""
+ if not inclusion_detection:
+ return None
return sorted({pkg.split(":", 1)[0] for pkg in pkgs if len(pkg.split(":", 1)) == 2})
@@ -1473,6 +1475,7 @@ def install(
ignore_repo_failure=False,
no_recommends=False,
root=None,
+ inclusion_detection=False,
**kwargs
):
"""
@@ -1588,6 +1591,9 @@ def install(
.. versionadded:: 2018.3.0
+ inclusion_detection:
+ Detect ``includes`` based on ``sources``
+ By default packages are always included
Returns a dict containing the new package names and versions::
@@ -1663,7 +1669,8 @@ def install(
diff_attr = kwargs.get("diff_attr")
- includes = _find_types(targets)
+ includes = _detect_includes(targets, inclusion_detection)
+
old = (
list_pkgs(attr=diff_attr, root=root, includes=includes)
if not downloadonly
@@ -1964,7 +1971,7 @@ def upgrade(
return ret
-def _uninstall(name=None, pkgs=None, root=None):
+def _uninstall(inclusion_detection, name=None, pkgs=None, root=None):
"""
Remove and purge do identical things but with different Zypper commands,
this function performs the common logic.
@@ -1974,7 +1981,7 @@ def _uninstall(name=None, pkgs=None, root=None):
except MinionError as exc:
raise CommandExecutionError(exc)
- includes = _find_types(pkg_params.keys())
+ includes = _detect_includes(pkg_params.keys(), inclusion_detection)
old = list_pkgs(root=root, includes=includes)
targets = []
for target in pkg_params:
@@ -2037,7 +2044,7 @@ def normalize_name(name):
def remove(
- name=None, pkgs=None, root=None, **kwargs
+ name=None, pkgs=None, root=None, inclusion_detection=False, **kwargs
): # pylint: disable=unused-argument
"""
.. versionchanged:: 2015.8.12,2016.3.3,2016.11.0
@@ -2069,8 +2076,11 @@ def remove(
root
Operate on a different root directory.
- .. versionadded:: 0.16.0
+ inclusion_detection:
+ Detect ``includes`` based on ``pkgs``
+ By default packages are always included
+ .. versionadded:: 0.16.0
Returns a dict containing the changes.
@@ -2082,10 +2092,12 @@ def remove(
salt '*' pkg.remove <package1>,<package2>,<package3>
salt '*' pkg.remove pkgs='["foo", "bar"]'
"""
- return _uninstall(name=name, pkgs=pkgs, root=root)
+ return _uninstall(inclusion_detection, name=name, pkgs=pkgs, root=root)
-def purge(name=None, pkgs=None, root=None, **kwargs): # pylint: disable=unused-argument
+def purge(
+ name=None, pkgs=None, root=None, inclusion_detection=False, **kwargs
+): # pylint: disable=unused-argument
"""
.. versionchanged:: 2015.8.12,2016.3.3,2016.11.0
On minions running systemd>=205, `systemd-run(1)`_ is now used to
@@ -2117,6 +2129,10 @@ def purge(name=None, pkgs=None, root=None, **kwargs): # pylint: disable=unused-
root
Operate on a different root directory.
+ inclusion_detection:
+ Detect ``includes`` based on ``pkgs``
+ By default packages are always included
+
.. versionadded:: 0.16.0
@@ -2130,7 +2146,7 @@ def purge(name=None, pkgs=None, root=None, **kwargs): # pylint: disable=unused-
salt '*' pkg.purge <package1>,<package2>,<package3>
salt '*' pkg.purge pkgs='["foo", "bar"]'
"""
- return _uninstall(name=name, pkgs=pkgs, root=root)
+ return _uninstall(inclusion_detection, name=name, pkgs=pkgs, root=root)
def list_holds(pattern=None, full=True, root=None, **kwargs):
--
2.39.2

View File

@ -0,0 +1,31 @@
From ceaf42a67d21cb6fa723339559c85be969e67308 Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Thu, 13 Dec 2018 12:17:35 +0100
Subject: [PATCH] Return the expected powerpc os arch (bsc#1117995)
---
salt/utils/pkg/rpm.py | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/salt/utils/pkg/rpm.py b/salt/utils/pkg/rpm.py
index f9975f8dff..147447ba75 100644
--- a/salt/utils/pkg/rpm.py
+++ b/salt/utils/pkg/rpm.py
@@ -69,9 +69,10 @@ def get_osarch():
stderr=subprocess.PIPE,
).communicate()[0]
else:
- ret = "".join([x for x in platform.uname()[-2:] if x][-1:])
-
- return salt.utils.stringutils.to_str(ret).strip() or "unknown"
+ ret = "".join(list(filter(None, platform.uname()[-2:]))[-1:])
+ ret = salt.utils.stringutils.to_str(ret).strip() or "unknown"
+ ARCH_FIXES_MAPPING = {"powerpc64le": "ppc64le"}
+ return ARCH_FIXES_MAPPING.get(ret, ret)
def check_32(arch, osarch=None):
--
2.39.2

View File

@ -0,0 +1,216 @@
From 76f2b98a3a9b9a49903a4d3b47dca0f2311bd7af Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 18 Jan 2022 19:07:34 +0100
Subject: [PATCH] Revert "Fixing a use case when multiple inotify beacons
are defined but when notifications are fired the configuration fron the first
beacon are used." Revert "Adding a util function to remove hidden (options
starting with underscore) from the beacon configuration. This is used when
the beacons loop through the configuration, eg. status beacon, and expect
certain options."
This reverts commit 68a891ab2fe53ebf329b9c83b875f3575e87e266.
This reverts commit 66c58dedf8c364eaeb35c5adce8bcc8fe5c1219a.
---
salt/beacons/__init__.py | 1 -
salt/beacons/diskusage.py | 3 ---
salt/beacons/inotify.py | 24 ++++++++--------------
salt/beacons/napalm_beacon.py | 6 ++----
salt/beacons/status.py | 4 ----
tests/pytests/unit/beacons/test_inotify.py | 5 +----
tests/pytests/unit/test_beacons.py | 17 ---------------
7 files changed, 11 insertions(+), 49 deletions(-)
diff --git a/salt/beacons/__init__.py b/salt/beacons/__init__.py
index b346c2a648..90918cba5b 100644
--- a/salt/beacons/__init__.py
+++ b/salt/beacons/__init__.py
@@ -94,7 +94,6 @@ class Beacon:
log.error("Configuration for beacon must be a list.")
continue
- b_config[mod].append({"_beacon_name": mod})
fun_str = "{}.beacon".format(beacon_name)
if fun_str in self.beacons:
runonce = self._determine_beacon_config(
diff --git a/salt/beacons/diskusage.py b/salt/beacons/diskusage.py
index 5be33ff975..0b8d7c53e1 100644
--- a/salt/beacons/diskusage.py
+++ b/salt/beacons/diskusage.py
@@ -8,7 +8,6 @@ Beacon to monitor disk usage.
import logging
import re
-import salt.utils.beacons
import salt.utils.platform
try:
@@ -83,8 +82,6 @@ def beacon(config):
it will override the previously defined threshold.
"""
- whitelist = []
- config = salt.utils.beacons.remove_hidden_options(config, whitelist)
parts = psutil.disk_partitions(all=True)
ret = []
for mounts in config:
diff --git a/salt/beacons/inotify.py b/salt/beacons/inotify.py
index 283b84fdc7..0dc60662a6 100644
--- a/salt/beacons/inotify.py
+++ b/salt/beacons/inotify.py
@@ -67,19 +67,17 @@ def _get_notifier(config):
"""
Check the context for the notifier and construct it if not present
"""
- beacon_name = config.get("_beacon_name", "inotify")
- notifier = "{}.notifier".format(beacon_name)
- if notifier not in __context__:
+ if "inotify.notifier" not in __context__:
__context__["inotify.queue"] = collections.deque()
wm = pyinotify.WatchManager()
- __context__[notifier] = pyinotify.Notifier(wm, _enqueue)
+ __context__["inotify.notifier"] = pyinotify.Notifier(wm, _enqueue)
if (
"coalesce" in config
and isinstance(config["coalesce"], bool)
and config["coalesce"]
):
- __context__[notifier].coalesce_events()
- return __context__[notifier]
+ __context__["inotify.notifier"].coalesce_events()
+ return __context__["inotify.notifier"]
def validate(config):
@@ -239,9 +237,6 @@ def beacon(config):
being at the Notifier level in pyinotify.
"""
- whitelist = ["_beacon_name"]
- config = salt.utils.beacons.remove_hidden_options(config, whitelist)
-
config = salt.utils.beacons.list_to_dict(config)
ret = []
@@ -264,7 +259,7 @@ def beacon(config):
break
path = os.path.dirname(path)
- excludes = config["files"].get(path, {}).get("exclude", "")
+ excludes = config["files"][path].get("exclude", "")
if excludes and isinstance(excludes, list):
for exclude in excludes:
@@ -351,9 +346,6 @@ def beacon(config):
def close(config):
- config = salt.utils.beacons.list_to_dict(config)
- beacon_name = config.get("_beacon_name", "inotify")
- notifier = "{}.notifier".format(beacon_name)
- if notifier in __context__:
- __context__[notifier].stop()
- del __context__[notifier]
+ if "inotify.notifier" in __context__:
+ __context__["inotify.notifier"].stop()
+ del __context__["inotify.notifier"]
diff --git a/salt/beacons/napalm_beacon.py b/salt/beacons/napalm_beacon.py
index 122d56edb7..692fbe07aa 100644
--- a/salt/beacons/napalm_beacon.py
+++ b/salt/beacons/napalm_beacon.py
@@ -168,9 +168,10 @@ with a NTP server at a stratum level greater than 5.
"""
import logging
+
+# Import Python std lib
import re
-import salt.utils.beacons
import salt.utils.napalm
log = logging.getLogger(__name__)
@@ -306,9 +307,6 @@ def beacon(config):
"""
Watch napalm function and fire events.
"""
- whitelist = []
- config = salt.utils.beacons.remove_hidden_options(config, whitelist)
-
log.debug("Executing napalm beacon with config:")
log.debug(config)
ret = []
diff --git a/salt/beacons/status.py b/salt/beacons/status.py
index aa5aa13b47..e2c3177ea8 100644
--- a/salt/beacons/status.py
+++ b/salt/beacons/status.py
@@ -91,7 +91,6 @@ import datetime
import logging
import salt.exceptions
-import salt.utils.beacons
import salt.utils.platform
log = logging.getLogger(__name__)
@@ -119,9 +118,6 @@ def beacon(config):
log.debug(config)
ctime = datetime.datetime.utcnow().isoformat()
- whitelist = []
- config = salt.utils.beacons.remove_hidden_options(config, whitelist)
-
if not config:
config = [
{
diff --git a/tests/pytests/unit/beacons/test_inotify.py b/tests/pytests/unit/beacons/test_inotify.py
index 30a9a91db4..678a528529 100644
--- a/tests/pytests/unit/beacons/test_inotify.py
+++ b/tests/pytests/unit/beacons/test_inotify.py
@@ -263,7 +263,6 @@ def test_multi_files_exclude(tmp_path):
# Check __get_notifier and ensure that the right bits are in __context__
-# including a beacon_name specific notifier is found.
def test__get_notifier():
config = {
"files": {
@@ -293,10 +292,8 @@ def test__get_notifier():
},
},
"coalesce": True,
- "beacon_module": "inotify",
- "_beacon_name": "httpd.inotify",
}
ret = inotify._get_notifier(config)
assert "inotify.queue" in inotify.__context__
- assert "httpd.inotify.notifier" in inotify.__context__
+ assert "inotify.notifier" in inotify.__context__
diff --git a/tests/pytests/unit/test_beacons.py b/tests/pytests/unit/test_beacons.py
index 217cd5c6a4..855e271d7d 100644
--- a/tests/pytests/unit/test_beacons.py
+++ b/tests/pytests/unit/test_beacons.py
@@ -104,20 +104,3 @@ def test_beacon_module(minion_opts):
}
]
assert ret == _expected
-
- # Ensure that "beacon_name" is available in the call to the beacon function
- name = "ps.beacon"
- mocked = {name: MagicMock(return_value=_expected)}
- mocked[name].__globals__ = {}
- calls = [
- call(
- [
- {"processes": {"apache2": "stopped"}},
- {"beacon_module": "ps"},
- {"_beacon_name": "watch_apache"},
- ]
- )
- ]
- with patch.object(beacon, "beacons", mocked) as patched:
- beacon.process(minion_opts["beacons"], minion_opts["grains"])
- patched[name].assert_has_calls(calls)
--
2.39.2

View File

@ -0,0 +1,418 @@
From 3cc2aee2290bd9a4fba9e0cebe3b65370aa76af6 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 1 Sep 2023 08:22:44 +0100
Subject: [PATCH] Revert usage of long running REQ channel (bsc#1213960,
bsc#1213630, bsc#1213257)
* Revert usage of long running REQ channel (bsc#1213960, bsc#1213630, bsc#1213257)
This reverts commits:
https://github.com/saltstack/salt/commit/a99ffb557b8a1142225d4925aba4a5e493923d2f
https://github.com/saltstack/salt/commit/80ae5188807550e7592fa12d8661ee83c4313ec8
https://github.com/saltstack/salt/commit/3c7e1ec1f08abd7cd1ba78ad7880acb6ba6fdce7
https://github.com/saltstack/salt/commit/171926cc57618b51bf3fdc042b62212e681180fc
From this PR: https://github.com/saltstack/salt/pull/61468
See: https://github.com/saltstack/salt/issues/62959#issuecomment-1658335432
* Revert "Fix linter"
This reverts commit d09d2d3f31e06c554b4ed869b7dc4f8b8bce5dc9.
* Revert "add a regression test"
This reverts commit b2c32be0a80c92585a9063409c42895357bb0dbe.
* Fix failing tests after reverting commits
---
doc/topics/development/architecture.rst | 8 +-
salt/channel/client.py | 9 +--
salt/minion.py | 47 +++--------
salt/transport/ipc.py | 5 +-
salt/utils/asynchronous.py | 2 +-
.../transport/server/test_req_channel.py | 16 ++--
tests/pytests/unit/test_minion.py | 79 ++++---------------
7 files changed, 39 insertions(+), 127 deletions(-)
diff --git a/doc/topics/development/architecture.rst b/doc/topics/development/architecture.rst
index 17400db001..1c717092f8 100644
--- a/doc/topics/development/architecture.rst
+++ b/doc/topics/development/architecture.rst
@@ -220,15 +220,11 @@ the received message.
4) The new minion thread is created. The _thread_return() function starts up
and actually calls out to the requested function contained in the job.
5) The requested function runs and returns a result. [Still in thread.]
-6) The result of the function that's run is published on the minion's local event bus with event
-tag "__master_req_channel_payload" [Still in thread.]
+6) The result of the function that's run is encrypted and returned to the
+master's ReqServer (TCP 4506 on master). [Still in thread.]
7) Thread exits. Because the main thread was only blocked for the time that it
took to initialize the worker thread, many other requests could have been
received and processed during this time.
-8) Minion event handler gets the event with tag "__master_req_channel_payload"
-and sends the payload to master's ReqServer (TCP 4506 on master), via the long-running async request channel
-that was opened when minion first started up.
-
A Note on ClearFuncs vs. AESFuncs
diff --git a/salt/channel/client.py b/salt/channel/client.py
index e5b073ccdb..76d7a8e5b9 100644
--- a/salt/channel/client.py
+++ b/salt/channel/client.py
@@ -98,7 +98,6 @@ class AsyncReqChannel:
"_crypted_transfer",
"_uncrypted_transfer",
"send",
- "connect",
]
close_methods = [
"close",
@@ -128,7 +127,7 @@ class AsyncReqChannel:
else:
auth = None
- transport = salt.transport.request_client(opts, io_loop=io_loop)
+ transport = salt.transport.request_client(opts, io_loop)
return cls(opts, transport, auth)
def __init__(self, opts, transport, auth, **kwargs):
@@ -271,10 +270,6 @@ class AsyncReqChannel:
raise salt.ext.tornado.gen.Return(ret)
- @salt.ext.tornado.gen.coroutine
- def connect(self):
- yield self.transport.connect()
-
@salt.ext.tornado.gen.coroutine
def send(self, load, tries=3, timeout=60, raw=False):
"""
@@ -295,7 +290,7 @@ class AsyncReqChannel:
ret = yield self._crypted_transfer(load, timeout=timeout, raw=raw)
break
except Exception as exc: # pylint: disable=broad-except
- log.trace("Failed to send msg %r", exc)
+ log.error("Failed to send msg %r", exc)
if _try >= tries:
raise
else:
diff --git a/salt/minion.py b/salt/minion.py
index c3b65f16c3..9597d6e63a 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -1361,30 +1361,11 @@ class Minion(MinionBase):
"""
Return a future which will complete when you are connected to a master
"""
- # Consider refactoring so that eval_master does not have a subtle side-effect on the contents of the opts array
master, self.pub_channel = yield self.eval_master(
self.opts, self.timeout, self.safe, failed
)
-
- # a long-running req channel
- self.req_channel = salt.transport.client.AsyncReqChannel.factory(
- self.opts, io_loop=self.io_loop
- )
-
- if hasattr(
- self.req_channel, "connect"
- ): # TODO: consider generalizing this for all channels
- log.debug("Connecting minion's long-running req channel")
- yield self.req_channel.connect()
-
yield self._post_master_init(master)
- @salt.ext.tornado.gen.coroutine
- def handle_payload(self, payload, reply_func):
- self.payloads.append(payload)
- yield reply_func(payload)
- self.payload_ack.notify()
-
# TODO: better name...
@salt.ext.tornado.gen.coroutine
def _post_master_init(self, master):
@@ -1599,6 +1580,7 @@ class Minion(MinionBase):
return functions, returners, errors, executors
def _send_req_sync(self, load, timeout):
+
if self.opts["minion_sign_messages"]:
log.trace("Signing event to be published onto the bus.")
minion_privkey_path = os.path.join(self.opts["pki_dir"], "minion.pem")
@@ -1607,11 +1589,9 @@ class Minion(MinionBase):
)
load["sig"] = sig
- with salt.utils.event.get_event(
- "minion", opts=self.opts, listen=False
- ) as event:
- return event.fire_event(
- load, "__master_req_channel_payload", timeout=timeout
+ with salt.channel.client.ReqChannel.factory(self.opts) as channel:
+ return channel.send(
+ load, timeout=timeout, tries=self.opts["return_retry_tries"]
)
@salt.ext.tornado.gen.coroutine
@@ -1624,11 +1604,9 @@ class Minion(MinionBase):
)
load["sig"] = sig
- with salt.utils.event.get_event(
- "minion", opts=self.opts, listen=False
- ) as event:
- ret = yield event.fire_event_async(
- load, "__master_req_channel_payload", timeout=timeout
+ with salt.channel.client.AsyncReqChannel.factory(self.opts) as channel:
+ ret = yield channel.send(
+ load, timeout=timeout, tries=self.opts["return_retry_tries"]
)
raise salt.ext.tornado.gen.Return(ret)
@@ -2055,7 +2033,7 @@ class Minion(MinionBase):
minion_instance._return_pub(ret)
# Add default returners from minion config
- # Should have been converted to comma-delimited string already
+ # Should have been coverted to comma-delimited string already
if isinstance(opts.get("return"), str):
if data["ret"]:
data["ret"] = ",".join((data["ret"], opts["return"]))
@@ -2662,7 +2640,6 @@ class Minion(MinionBase):
"""
Send mine data to the master
"""
- # Consider using a long-running req channel to send mine data
with salt.channel.client.ReqChannel.factory(self.opts) as channel:
data["tok"] = self.tok
try:
@@ -2699,12 +2676,6 @@ class Minion(MinionBase):
force_refresh=data.get("force_refresh", False),
notify=data.get("notify", False),
)
- elif tag.startswith("__master_req_channel_payload"):
- yield _minion.req_channel.send(
- data,
- timeout=_minion._return_retry_timer(),
- tries=_minion.opts["return_retry_tries"],
- )
elif tag.startswith("pillar_refresh"):
yield _minion.pillar_refresh(
force_refresh=data.get("force_refresh", False),
@@ -3175,7 +3146,7 @@ class Minion(MinionBase):
if self._target_load(payload["load"]):
self._handle_decoded_payload(payload["load"])
elif self.opts["zmq_filtering"]:
- # In the filtering enabled case, we'd like to know when minion sees something it shouldn't
+ # In the filtering enabled case, we'd like to know when minion sees something it shouldnt
log.trace(
"Broadcast message received not for this minion, Load: %s",
payload["load"],
diff --git a/salt/transport/ipc.py b/salt/transport/ipc.py
index 3a3f0c7a5f..cee100b086 100644
--- a/salt/transport/ipc.py
+++ b/salt/transport/ipc.py
@@ -208,10 +208,7 @@ class IPCServer:
log.error("Exception occurred while handling stream: %s", exc)
def handle_connection(self, connection, address):
- log.trace(
- "IPCServer: Handling connection to address: %s",
- address if address else connection,
- )
+ log.trace("IPCServer: Handling connection to address: %s", address)
try:
with salt.utils.asynchronous.current_ioloop(self.io_loop):
stream = IOStream(
diff --git a/salt/utils/asynchronous.py b/salt/utils/asynchronous.py
index 0c645bbc3b..88596a4a20 100644
--- a/salt/utils/asynchronous.py
+++ b/salt/utils/asynchronous.py
@@ -34,7 +34,7 @@ class SyncWrapper:
This is uses as a simple wrapper, for example:
asynchronous = AsyncClass()
- # this method would regularly return a future
+ # this method would reguarly return a future
future = asynchronous.async_method()
sync = SyncWrapper(async_factory_method, (arg1, arg2), {'kwarg1': 'val'})
diff --git a/tests/pytests/functional/transport/server/test_req_channel.py b/tests/pytests/functional/transport/server/test_req_channel.py
index 4a74802a0d..555c040c1c 100644
--- a/tests/pytests/functional/transport/server/test_req_channel.py
+++ b/tests/pytests/functional/transport/server/test_req_channel.py
@@ -124,7 +124,7 @@ def req_channel_crypt(request):
@pytest.fixture
-def push_channel(req_server_channel, salt_minion, req_channel_crypt):
+def req_channel(req_server_channel, salt_minion, req_channel_crypt):
with salt.channel.client.ReqChannel.factory(
salt_minion.config, crypt=req_channel_crypt
) as _req_channel:
@@ -135,7 +135,7 @@ def push_channel(req_server_channel, salt_minion, req_channel_crypt):
_req_channel.obj._refcount = 0
-def test_basic(push_channel):
+def test_basic(req_channel):
"""
Test a variety of messages, make sure we get the expected responses
"""
@@ -145,11 +145,11 @@ def test_basic(push_channel):
{"baz": "qux", "list": [1, 2, 3]},
]
for msg in msgs:
- ret = push_channel.send(dict(msg), timeout=5, tries=1)
+ ret = req_channel.send(dict(msg), timeout=5, tries=1)
assert ret["load"] == msg
-def test_normalization(push_channel):
+def test_normalization(req_channel):
"""
Since we use msgpack, we need to test that list types are converted to lists
"""
@@ -160,21 +160,21 @@ def test_normalization(push_channel):
{"list": tuple([1, 2, 3])},
]
for msg in msgs:
- ret = push_channel.send(msg, timeout=5, tries=1)
+ ret = req_channel.send(msg, timeout=5, tries=1)
for key, value in ret["load"].items():
assert types[key] == type(value)
-def test_badload(push_channel, req_channel_crypt):
+def test_badload(req_channel, req_channel_crypt):
"""
Test a variety of bad requests, make sure that we get some sort of error
"""
msgs = ["", [], tuple()]
if req_channel_crypt == "clear":
for msg in msgs:
- ret = push_channel.send(msg, timeout=5, tries=1)
+ ret = req_channel.send(msg, timeout=5, tries=1)
assert ret == "payload and load must be a dict"
else:
for msg in msgs:
with pytest.raises(salt.exceptions.AuthenticationError):
- push_channel.send(msg, timeout=5, tries=1)
+ req_channel.send(msg, timeout=5, tries=1)
diff --git a/tests/pytests/unit/test_minion.py b/tests/pytests/unit/test_minion.py
index 1cee025a48..4508eaee95 100644
--- a/tests/pytests/unit/test_minion.py
+++ b/tests/pytests/unit/test_minion.py
@@ -55,27 +55,26 @@ def test_minion_load_grains_default():
@pytest.mark.parametrize(
- "event",
+ "req_channel",
[
(
- "fire_event",
- lambda data, tag, cb=None, timeout=60: True,
+ "salt.channel.client.AsyncReqChannel.factory",
+ lambda load, timeout, tries: salt.ext.tornado.gen.maybe_future(tries),
),
(
- "fire_event_async",
- lambda data, tag, cb=None, timeout=60: salt.ext.tornado.gen.maybe_future(
- True
- ),
+ "salt.channel.client.ReqChannel.factory",
+ lambda load, timeout, tries: tries,
),
],
)
-def test_send_req_fires_completion_event(event, minion_opts):
- event_enter = MagicMock()
- event_enter.send.side_effect = event[1]
- event = MagicMock()
- event.__enter__.return_value = event_enter
+def test_send_req_tries(req_channel, minion_opts):
+ channel_enter = MagicMock()
+ channel_enter.send.side_effect = req_channel[1]
+ channel = MagicMock()
+ channel.__enter__.return_value = channel_enter
- with patch("salt.utils.event.get_event", return_value=event):
+ with patch(req_channel[0], return_value=channel):
+ minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
minion_opts["random_startup_delay"] = 0
minion_opts["return_retry_tries"] = 30
minion_opts["grains"] = {}
@@ -85,62 +84,16 @@ def test_send_req_fires_completion_event(event, minion_opts):
load = {"load": "value"}
timeout = 60
- # XXX This is buggy because "async" in event[0] will never evaluate
- # to True and if it *did* evaluate to true the test would fail
- # because you Mock isn't a co-routine.
- if "async" in event[0]:
+ if "Async" in req_channel[0]:
rtn = minion._send_req_async(load, timeout).result()
else:
rtn = minion._send_req_sync(load, timeout)
- # get the
- for idx, call in enumerate(event.mock_calls, 1):
- if "fire_event" in call[0]:
- condition_event_tag = (
- len(call.args) > 1
- and call.args[1] == "__master_req_channel_payload"
- )
- condition_event_tag_error = "{} != {}; Call(number={}): {}".format(
- idx, call, call.args[1], "__master_req_channel_payload"
- )
- condition_timeout = (
- len(call.kwargs) == 1 and call.kwargs["timeout"] == timeout
- )
- condition_timeout_error = "{} != {}; Call(number={}): {}".format(
- idx, call, call.kwargs["timeout"], timeout
- )
-
- fire_event_called = True
- assert condition_event_tag, condition_event_tag_error
- assert condition_timeout, condition_timeout_error
-
- assert fire_event_called
- assert rtn
-
-
-async def test_send_req_async_regression_62453(minion_opts):
- event_enter = MagicMock()
- event_enter.send.side_effect = (
- lambda data, tag, cb=None, timeout=60: salt.ext.tornado.gen.maybe_future(True)
- )
- event = MagicMock()
- event.__enter__.return_value = event_enter
-
- minion_opts["random_startup_delay"] = 0
- minion_opts["return_retry_tries"] = 30
- minion_opts["grains"] = {}
- with patch("salt.loader.grains"):
- minion = salt.minion.Minion(minion_opts)
-
- load = {"load": "value"}
- timeout = 60
-
- # We are just validating no exception is raised
- rtn = await minion._send_req_async(load, timeout)
- assert rtn is False
+ assert rtn == 30
-def test_mine_send_tries():
+@patch("salt.channel.client.ReqChannel.factory")
+def test_mine_send_tries(req_channel_factory):
channel_enter = MagicMock()
channel_enter.send.side_effect = lambda load, timeout, tries: tries
channel = MagicMock()
--
2.41.0

View File

@ -0,0 +1,25 @@
From a94cfd5dea05c2c4a9d6b8b243048a2ceeb3f208 Mon Sep 17 00:00:00 2001
From: Christian Lanig <clanig@suse.com>
Date: Mon, 27 Nov 2017 13:10:26 +0100
Subject: [PATCH] Run salt-api as user salt (bsc#1064520)
---
pkg/common/salt-api.service | 1 +
1 file changed, 1 insertion(+)
diff --git a/pkg/common/salt-api.service b/pkg/common/salt-api.service
index d0b6d74120..9cdc9c582b 100644
--- a/pkg/common/salt-api.service
+++ b/pkg/common/salt-api.service
@@ -6,6 +6,7 @@ After=network.target
[Service]
Type=notify
NotifyAccess=all
+User=salt
LimitNOFILE=8192
ExecStart=/usr/bin/salt-api
TimeoutStopSec=3
--
2.39.2

View File

@ -0,0 +1,47 @@
From 6ffbf7fcc178f32c670b177b25ed64658c59f1bf Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Klaus=20K=C3=A4mpf?= <kkaempf@suse.de>
Date: Wed, 20 Jan 2016 11:01:06 +0100
Subject: [PATCH] Run salt master as dedicated salt user
* Minion runs always as a root
---
conf/master | 3 ++-
pkg/common/salt-common.logrotate | 2 ++
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/conf/master b/conf/master
index f542051d76..acff94abec 100644
--- a/conf/master
+++ b/conf/master
@@ -25,7 +25,8 @@
# permissions to allow the specified user to run the master. The exception is
# the job cache, which must be deleted if this user is changed. If the
# modified files cause conflicts, set verify_env to False.
-#user: root
+user: salt
+syndic_user: salt
# Tell the master to also use salt-ssh when running commands against minions.
#enable_ssh_minions: False
diff --git a/pkg/common/salt-common.logrotate b/pkg/common/salt-common.logrotate
index a0306ff370..97d158db18 100644
--- a/pkg/common/salt-common.logrotate
+++ b/pkg/common/salt-common.logrotate
@@ -1,4 +1,5 @@
/var/log/salt/master {
+ su salt salt
weekly
missingok
rotate 7
@@ -15,6 +16,7 @@
}
/var/log/salt/key {
+ su salt salt
weekly
missingok
rotate 7
--
2.39.2

5
salt-tmpfiles.d Normal file
View File

@ -0,0 +1,5 @@
# Type Path Mode UID GID Age Argument
d /run/salt 0750 root salt
d /run/salt/master 0750 salt salt
d /run/salt/minion 0750 root root

6579
salt.changes Normal file

File diff suppressed because it is too large Load Diff

1448
salt.spec Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,56 @@
From 88adb2f59137213119f1da2b6dbf6fce859fc12f Mon Sep 17 00:00:00 2001
From: Vladimir Nadvornik <nadvornik@suse.cz>
Date: Mon, 27 Jun 2022 17:00:58 +0200
Subject: [PATCH] Save log to logfile with docker.build
---
salt/modules/dockermod.py | 18 ++++++++++++++++++
1 file changed, 18 insertions(+)
diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py
index 8b6ab8058e..f7344b66ac 100644
--- a/salt/modules/dockermod.py
+++ b/salt/modules/dockermod.py
@@ -4006,6 +4006,7 @@ def build(
fileobj=None,
dockerfile=None,
buildargs=None,
+ logfile=None,
):
"""
.. versionchanged:: 2018.3.0
@@ -4059,6 +4060,9 @@ def build(
buildargs
A dictionary of build arguments provided to the docker build process.
+ logfile
+ Path to log file. Output from build is written to this file if not None.
+
**RETURN DATA**
@@ -4133,6 +4137,20 @@ def build(
stream_data = []
for line in response:
stream_data.extend(salt.utils.json.loads(line, cls=DockerJSONDecoder))
+
+ if logfile:
+ try:
+ with salt.utils.files.fopen(logfile, "a") as f:
+ for item in stream_data:
+ try:
+ item_type = next(iter(item))
+ except StopIteration:
+ continue
+ if item_type == "stream":
+ f.write(item[item_type])
+ except OSError:
+ log.error("Unable to write logfile '%s'", logfile)
+
errors = []
# Iterate through API response and collect information
for item in stream_data:
--
2.39.2

View File

@ -0,0 +1,27 @@
From c61da0bef8d4d8394592db2f9995cdf4820c02af Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Mon, 27 Feb 2023 11:35:41 +0100
Subject: [PATCH] Skip package names without colon (bsc#1208691) (#578)
Fixes a problem in `_find_ptf_packages()` when passing multiple packages to `zypperpkg.remove` / `zypperpkg.purge`. The problem occurs when a passed package is not installed, in that case the output of the `rpm` subprocess is not parsed correctly.
---
salt/modules/zypperpkg.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 44f2cdbd3a..cdec397d69 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -2688,6 +2688,8 @@ def _find_ptf_packages(pkgs, root=None):
for line in output.splitlines():
if not line.strip():
continue
+ if ":" not in line:
+ continue
pkg, provides = line.split(":", 1)
if "ptf()" in provides:
ptfs.append(pkg)
--
2.39.2

View File

@ -0,0 +1,72 @@
From 57626d8eb77d2c559365d1df974100e474671fef Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 18 Jan 2022 17:12:04 +0100
Subject: [PATCH] Switch firewalld state to use change_interface
firewalld.present state allows to bind interface to given zone.
However if the interface is already bound to some other zone, call-
ing `add_interface` will not change rebind the interface but report
error.
Option `change_interface` however can rebind the interface from one
zone to another.
This PR adds `firewalld.change_interface` call to firewalld module
and updates `firewalld.present` state to use this call.
---
salt/modules/firewalld.py | 23 +++++++++++++++++++++++
salt/states/firewalld.py | 4 +++-
2 files changed, 26 insertions(+), 1 deletion(-)
diff --git a/salt/modules/firewalld.py b/salt/modules/firewalld.py
index 135713d851..70bc738240 100644
--- a/salt/modules/firewalld.py
+++ b/salt/modules/firewalld.py
@@ -918,6 +918,29 @@ def remove_interface(zone, interface, permanent=True):
return __firewall_cmd(cmd)
+def change_interface(zone, interface, permanent=True):
+ """
+ Change zone the interface bound to
+
+ .. versionadded:: 2019.?.?
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' firewalld.change_interface zone eth0
+ """
+ if interface in get_interfaces(zone, permanent):
+ log.info("Interface is already bound to zone.")
+
+ cmd = "--zone={} --change-interface={}".format(zone, interface)
+
+ if permanent:
+ cmd += " --permanent"
+
+ return __firewall_cmd(cmd)
+
+
def get_sources(zone, permanent=True):
"""
List sources bound to a zone
diff --git a/salt/states/firewalld.py b/salt/states/firewalld.py
index cc6eaba5c3..534b9dd62d 100644
--- a/salt/states/firewalld.py
+++ b/salt/states/firewalld.py
@@ -691,7 +691,9 @@ def _present(
for interface in new_interfaces:
if not __opts__["test"]:
try:
- __salt__["firewalld.add_interface"](name, interface, permanent=True)
+ __salt__["firewalld.change_interface"](
+ name, interface, permanent=True
+ )
except CommandExecutionError as err:
ret["comment"] = "Error: {}".format(err)
return ret
--
2.39.2

View File

@ -0,0 +1,34 @@
From 2575e64ee21f774a1efb6960972e9d476a8d5927 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 24 Jan 2019 18:12:35 +0100
Subject: [PATCH] temporary fix: extend the whitelist of allowed commands
---
salt/auth/__init__.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/salt/auth/__init__.py b/salt/auth/__init__.py
index b87e2aff0d..331baab211 100644
--- a/salt/auth/__init__.py
+++ b/salt/auth/__init__.py
@@ -12,6 +12,7 @@ so that any external authentication system can be used inside of Salt
# 5. Cache auth token with relative data opts['token_dir']
# 6. Interface to verify tokens
+
import getpass
import logging
import random
@@ -42,6 +43,8 @@ AUTH_INTERNAL_KEYWORDS = frozenset(
"gather_job_timeout",
"kwarg",
"match",
+ "id_",
+ "force",
"metadata",
"print_event",
"raw",
--
2.39.2

View File

@ -0,0 +1,35 @@
From 78f5a76315891168d24e923d2b08211baefefb4f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 22 Jun 2023 16:36:20 +0100
Subject: [PATCH] tornado: Fix an open redirect in StaticFileHandler
(CVE-2023-28370, bsc#1211741) (#583)
---
salt/ext/tornado/web.py | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/salt/ext/tornado/web.py b/salt/ext/tornado/web.py
index 60bde695d3..97fadcf87d 100644
--- a/salt/ext/tornado/web.py
+++ b/salt/ext/tornado/web.py
@@ -2544,6 +2544,15 @@ class StaticFileHandler(RequestHandler):
# but there is some prefix to the path that was already
# trimmed by the routing
if not self.request.path.endswith("/"):
+ if self.request.path.startswith("//"):
+ # A redirect with two initial slashes is a "protocol-relative" URL.
+ # This means the next path segment is treated as a hostname instead
+ # of a part of the path, making this effectively an open redirect.
+ # Reject paths starting with two slashes to prevent this.
+ # This is only reachable under certain configurations.
+ raise HTTPError(
+ 403, "cannot redirect path with two initial slashes"
+ )
self.redirect(self.request.path + "/", permanent=True)
return
absolute_path = os.path.join(absolute_path, self.default_filename)
--
2.41.0

View File

@ -0,0 +1,4 @@
# Enable the transactional_update executor
module_executors:
- transactional_update
- direct_call

35
travis.yml Normal file
View File

@ -0,0 +1,35 @@
language: python
python:
- '2.6'
- '2.7'
before_install:
- sudo apt-get update
- sudo apt-get install --fix-broken --ignore-missing -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" swig rabbitmq-server ruby python-apt mysql-server libmysqlclient-dev
- (git describe && git fetch --tags) || (git remote add upstream git://github.com/saltstack/salt.git && git fetch --tags upstream)
- pip install mock
- pip install --allow-external http://dl.dropbox.com/u/174789/m2crypto-0.20.1.tar.gz
- pip install --upgrade pep8 'pylint<=1.2.0'
- pip install --upgrade coveralls
- "if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then pip install unittest2 ordereddict; fi"
- pip install git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting
install:
- pip install -r requirements/zeromq.txt -r requirements/cloud.txt
- pip install --allow-all-external -r requirements/opt.txt
before_script:
- "/home/travis/virtualenv/python${TRAVIS_PYTHON_VERSION}/bin/pylint --rcfile=.testing.pylintrc salt/ && echo 'Finished Pylint Check Cleanly' || echo 'Finished Pylint Check With Errors'"
- "/home/travis/virtualenv/python${TRAVIS_PYTHON_VERSION}/bin/pep8 --ignore=E501,E12 salt/ && echo 'Finished PEP-8 Check Cleanly' || echo 'Finished PEP-8 Check With Errors'"
script: "sudo -E /home/travis/virtualenv/python${TRAVIS_PYTHON_VERSION}/bin/python setup.py test --runtests-opts='--run-destructive --sysinfo -v --coverage'"
after_success:
- coveralls
notifications:
irc:
channels: "irc.freenode.org#salt-devel"
on_success: change
on_failure: change

100
update-documentation.sh Normal file
View File

@ -0,0 +1,100 @@
#!/bin/bash
#
# Update html.tar.bz2 documentation tarball
# Author: Bo Maryniuk <bo@suse.de>
#
NO_SPHINX_PARAM="--without-sphinx"
function build_virtenv() {
virtualenv --system-site-packages $1
source $1/bin/activate
pip install --upgrade pip
if [ -z "$2" ]; then
pip install -I Sphinx
fi
}
function check_env() {
if [[ -z "$1" || "$1" != "$NO_SPHINX_PARAM" ]] && [ ! -z "$(which sphinx-build 2>/dev/null)" ]; then
cat <<EOF
You've installed Spinx globally. But it might be outdated or
clash with the version I am going to install into the temporary
virtual environment from PIP.
Please consider to remove Sphinx from your system, perhaps?
Or pass me "$NO_SPHINX_PARAM" param so I will try reusing yours
and see what happens. :)
EOF
exit 1;
fi
for cmd in "make" "quilt" "virtualenv" "pip"; do
if [ -z "$(which $cmd 2>/dev/null)" ]; then
echo "Error: '$cmd' is still missing. Install it, please."
exit 1;
fi
done
}
function quilt_setup() {
quilt setup -v salt.spec
cd $1
quilt push -a
}
function build_docs() {
cd $1
make html
rm _build/html/.buildinfo
cd _build/html
chmod -R -x+X *
cd ..
tar cvf - html | bzip2 > $2/html.tar.bz2
}
function write_changelog() {
mv salt.changes salt.changes.previous
TIME=$(date -u +'%a %b %d %T %Z %Y')
MAIL=$1
SEP="-------------------------------------------------------------------"
cat <<EOF > salt.changes
$SEP
$TIME - $MAIL
- Updated html.tar.bz2 documentation tarball.
EOF
cat salt.changes.previous >> salt.changes
rm salt.changes.previous
}
if [ -z "$1" ]; then
echo "Usage: $0 <your e-mail> [--without-sphinx]"
exit 1;
fi
check_env $2;
START=$(pwd)
V_ENV="sphinx_doc_gen"
V_TMP=$(mktemp -d)
for f in "salt.spec" "v*tar.gz" "*"; do
cp -v $f $V_TMP
done
cd $V_TMP;
build_virtenv $V_ENV $2;
SRC_DIR="salt-$(cat salt.spec | grep ^Version: | cut -d: -f2 | sed -e 's/[[:blank:]]//g')-suse";
quilt_setup $SRC_DIR
build_docs doc $V_TMP
cd $START
mv $V_TMP/html.tar.bz2 $START
rm -rf $V_TMP
echo "Done"
echo "---------------"

View File

@ -0,0 +1,98 @@
From b6bf7e1cb3efedbb651b7d6c5f36b73d88cfa1c0 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Fri, 9 Apr 2021 16:01:32 +0300
Subject: [PATCH] Update target fix for salt-ssh to process targets list
(bsc#1179831) (#336)
* Update target fix for salt-ssh to process targets list (bsc#1179831)
* Improvement for fixing (bsc#1179831)
Regression fix of salt-ssh on processing targets (#353)
---
salt/client/ssh/__init__.py | 46 +++++++++++++++++++++++--------------
1 file changed, 29 insertions(+), 17 deletions(-)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index 049baff51a..19089ce8ad 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -338,7 +338,7 @@ class SSH(MultiprocessingStateMixin):
if not self.opts.get("ssh_cli_tgt"):
self.opts["ssh_cli_tgt"] = self.opts.get("tgt", "")
hostname = self.opts.get("ssh_cli_tgt", "")
- if "@" in hostname:
+ if isinstance(hostname, str) and "@" in hostname:
user, hostname = hostname.split("@", 1)
else:
user = self.opts.get("ssh_user")
@@ -393,7 +393,7 @@ class SSH(MultiprocessingStateMixin):
self.__parsed_rosters[self.ROSTER_UPDATE_FLAG] = False
return
- def _update_roster(self):
+ def _update_roster(self, hostname=None, user=None):
"""
Update default flat roster with the passed in information.
:return:
@@ -407,8 +407,8 @@ class SSH(MultiprocessingStateMixin):
" host: {hostname}\n user: {user}\n passwd: {passwd}\n".format(
s_user=getpass.getuser(),
s_time=datetime.datetime.utcnow().isoformat(),
- hostname=self.opts.get("tgt", ""),
- user=self.opts.get("ssh_user", ""),
+ hostname=hostname if hostname else self.opts.get("tgt", ""),
+ user=user if user else self.opts.get("ssh_user", ""),
passwd=self.opts.get("ssh_passwd", ""),
)
)
@@ -425,20 +425,32 @@ class SSH(MultiprocessingStateMixin):
Uptade targets in case hostname was directly passed without the roster.
:return:
"""
- hostname = self.parse_tgt["hostname"]
+ hosts = self.parse_tgt["hostname"]
user = self.parse_tgt["user"]
- if hostname == "*":
- hostname = ""
-
- if salt.utils.network.is_reachable_host(hostname):
- self.opts["tgt"] = hostname
- self.targets[hostname] = {
- "passwd": self.opts.get("ssh_passwd", ""),
- "host": hostname,
- "user": user,
- }
- if self.opts.get("ssh_update_roster"):
- self._update_roster()
+
+ if not isinstance(hosts, (list, tuple)):
+ hosts = list([hosts])
+ _hosts = list()
+ for hostname in hosts:
+ _user = user
+ if "@" in hostname:
+ _user, hostname = hostname.split("@", 1)
+ if hostname == "*":
+ continue
+ if salt.utils.network.is_reachable_host(hostname):
+ _hosts.append(hostname)
+ self.targets[hostname] = {
+ "passwd": self.opts.get("ssh_passwd", ""),
+ "host": hostname,
+ "user": _user,
+ }
+ if self.opts.get("ssh_update_roster"):
+ self._update_roster(hostname=hostname, user=_user)
+
+ if self.tgt_type == "list":
+ self.opts["tgt"] = _hosts
+ elif _hosts:
+ self.opts["tgt"] = _hosts[0]
def get_pubkey(self):
"""
--
2.39.2

View File

@ -0,0 +1,124 @@
From ef6da7d43fcf51a7d705422624c1e7a94b1297f2 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 18 Jan 2022 16:36:57 +0100
Subject: [PATCH] Use Adler32 algorithm to compute string checksums
Generate the same numeric value across all Python versions and platforms
Re-add getting hash by Python shell-out method
Add an option to choose between default hashing, Adler32 or CRC32 algorithms
Set default config option for server_id hashing to False on minion
Choose CRC method, default to faster but less reliable "adler32", if crc is in use
Add warning for Sodium.
Move server_id deprecation warning to reduce log spamming (bsc#1135567) (bsc#1135732)
Remove deprecated warning that breaks miniion execution when "server_id_use_crc" opts are missing
---
salt/config/__init__.py | 4 ++++
salt/grains/core.py | 48 +++++++++++++++++++++++++++++++++++++----
2 files changed, 48 insertions(+), 4 deletions(-)
diff --git a/salt/config/__init__.py b/salt/config/__init__.py
index 1632663474..43182f3f92 100644
--- a/salt/config/__init__.py
+++ b/salt/config/__init__.py
@@ -991,6 +991,9 @@ VALID_OPTS = immutabletypes.freeze(
"maintenance_interval": int,
# Fileserver process restart interval
"fileserver_interval": int,
+ # Use Adler32 hashing algorithm for server_id (default False until Sodium, "adler32" after)
+ # Possible values are: False, adler32, crc32
+ "server_id_use_crc": (bool, str),
}
)
@@ -1296,6 +1299,7 @@ DEFAULT_MINION_OPTS = immutabletypes.freeze(
"global_state_conditions": None,
"reactor_niceness": None,
"fips_mode": False,
+ "server_id_use_crc": False,
}
)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 1199ad274f..5c12556346 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -21,6 +21,7 @@ import subprocess
import sys
import time
import uuid
+import zlib
from errno import EACCES, EPERM
import salt.exceptions
@@ -3382,6 +3383,36 @@ def _hw_data(osdata):
return grains
+def _get_hash_by_shell():
+ """
+ Shell-out Python 3 for compute reliable hash
+ :return:
+ """
+ id_ = __opts__.get("id", "")
+ id_hash = None
+ py_ver = sys.version_info[:2]
+ if py_ver >= (3, 3):
+ # Python 3.3 enabled hash randomization, so we need to shell out to get
+ # a reliable hash.
+ id_hash = __salt__["cmd.run"](
+ [sys.executable, "-c", 'print(hash("{}"))'.format(id_)],
+ env={"PYTHONHASHSEED": "0"},
+ )
+ try:
+ id_hash = int(id_hash)
+ except (TypeError, ValueError):
+ log.debug(
+ "Failed to hash the ID to get the server_id grain. Result of hash command: %s",
+ id_hash,
+ )
+ id_hash = None
+ if id_hash is None:
+ # Python < 3.3 or error encountered above
+ id_hash = hash(id_)
+
+ return abs(id_hash % (2 ** 31))
+
+
def get_server_id():
"""
Provides an integer based on the FQDN of a machine.
@@ -3392,10 +3423,19 @@ def get_server_id():
# server_id
if salt.utils.platform.is_proxy():
- return {}
- id_ = __opts__.get("id", "")
- hash_ = int(hashlib.sha256(id_.encode()).hexdigest(), 16)
- return {"server_id": abs(hash_ % (2**31))}
+ server_id = {}
+ else:
+ use_crc = __opts__.get("server_id_use_crc")
+ if bool(use_crc):
+ id_hash = (
+ getattr(zlib, use_crc, zlib.adler32)(__opts__.get("id", "").encode())
+ & 0xFFFFFFFF
+ )
+ else:
+ id_hash = _get_hash_by_shell()
+ server_id = {"server_id": id_hash}
+
+ return server_id
def get_master():
--
2.39.2

View File

@ -0,0 +1,27 @@
From 578932e56be4b4151aa33bd25997c916b0e00a04 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 4 Jan 2023 13:11:50 +0000
Subject: [PATCH] Use RLock to avoid deadlocks in salt-ssh
---
salt/loader/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/loader/__init__.py b/salt/loader/__init__.py
index bbe4269839..b41cc64b8e 100644
--- a/salt/loader/__init__.py
+++ b/salt/loader/__init__.py
@@ -82,7 +82,7 @@ SALT_INTERNAL_LOADERS_PATHS = (
str(SALT_BASE_PATH / "wheel"),
)
-LOAD_LOCK = threading.Lock()
+LOAD_LOCK = threading.RLock()
def LazyLoader(*args, **kwargs):
--
2.39.2

View File

@ -0,0 +1,375 @@
From b0891f83afa354c4b1f803af8a679ecf5a7fb63c Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Mon, 27 Jun 2022 17:59:24 +0300
Subject: [PATCH] Use Salt Bundle in dockermod
* Use Salt Bundle for salt calls in dockermod
* Add test of performing a call with the Salt Bundle
---
salt/modules/dockermod.py | 197 +++++++++++++++---
.../unit/modules/dockermod/test_module.py | 78 ++++++-
2 files changed, 241 insertions(+), 34 deletions(-)
diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py
index 6870c26b0e..8b6ab8058e 100644
--- a/salt/modules/dockermod.py
+++ b/salt/modules/dockermod.py
@@ -201,14 +201,19 @@ import copy
import fnmatch
import functools
import gzip
+import hashlib
import json
import logging
import os
+import pathlib
import pipes
import re
import shutil
import string
import subprocess
+import sys
+import tarfile
+import tempfile
import time
import uuid
@@ -6698,6 +6703,111 @@ def _compile_state(sls_opts, mods=None):
return st_.state.compile_high_data(high_data)
+def gen_venv_tar(cachedir, venv_dest_dir, venv_name):
+ """
+ Generate tarball with the Salt Bundle if required and return the path to it
+ """
+ exec_path = pathlib.Path(sys.executable).parts
+ venv_dir_name = "venv-salt-minion"
+ if venv_dir_name not in exec_path:
+ return None
+
+ venv_tar = os.path.join(cachedir, "venv-salt.tgz")
+ venv_hash = os.path.join(cachedir, "venv-salt.hash")
+ venv_lock = os.path.join(cachedir, ".venv-salt.lock")
+
+ venv_path = os.path.join(*exec_path[0 : exec_path.index(venv_dir_name)])
+
+ with __utils__["files.flopen"](venv_lock, "w"):
+ start_dir = os.getcwd()
+ venv_hash_file = os.path.join(venv_path, venv_dir_name, "venv-hash.txt")
+ try:
+ with __utils__["files.fopen"](venv_hash_file, "r") as fh:
+ venv_hash_src = fh.readline().strip()
+ except Exception: # pylint: disable=broad-except
+ # It makes no sense what caused the exception
+ # Just calculate the hash different way
+ for cmd in ("rpm -qi venv-salt-minion", "dpkg -s venv-salt-minion"):
+ ret = __salt__["cmd.run_all"](
+ cmd,
+ python_shell=True,
+ clean_env=True,
+ env={"LANG": "C", "LANGUAGE": "C", "LC_ALL": "C"},
+ )
+ if ret.get("retcode") == 0 and ret.get("stdout"):
+ venv_hash_src = hashlib.sha256(
+ "{}\n".format(ret.get("stdout")).encode()
+ ).hexdigest()
+ break
+ try:
+ with __utils__["files.fopen"](venv_hash, "r") as fh:
+ venv_hash_dest = fh.readline().strip()
+ except Exception: # pylint: disable=broad-except
+ # It makes no sense what caused the exception
+ # Set the hash to impossible value to force new tarball creation
+ venv_hash_dest = "UNKNOWN"
+ if venv_hash_src == venv_hash_dest and os.path.isfile(venv_tar):
+ return venv_tar
+ try:
+ tfd, tmp_venv_tar = tempfile.mkstemp(
+ dir=cachedir,
+ prefix=".venv-",
+ suffix=os.path.splitext(venv_tar)[1],
+ )
+ os.close(tfd)
+
+ os.chdir(venv_path)
+ tfp = tarfile.open(tmp_venv_tar, "w:gz")
+
+ for root, dirs, files in salt.utils.path.os_walk(
+ venv_dir_name, followlinks=True
+ ):
+ for name in files:
+ if name == "python" and pathlib.Path(root).parts == (
+ venv_dir_name,
+ "bin",
+ ):
+ tfd, tmp_python_file = tempfile.mkstemp(
+ dir=cachedir,
+ prefix=".python-",
+ )
+ os.close(tfd)
+ try:
+ with __utils__["files.fopen"](
+ os.path.join(root, name), "r"
+ ) as fh_in:
+ with __utils__["files.fopen"](
+ tmp_python_file, "w"
+ ) as fh_out:
+ rd_lines = fh_in.readlines()
+ rd_lines = [
+ 'export VIRTUAL_ENV="{}"\n'.format(
+ os.path.join(venv_dest_dir, venv_name)
+ )
+ if line.startswith("export VIRTUAL_ENV=")
+ else line
+ for line in rd_lines
+ ]
+ fh_out.write("".join(rd_lines))
+ os.chmod(tmp_python_file, 0o755)
+ tfp.add(tmp_python_file, arcname=os.path.join(root, name))
+ continue
+ finally:
+ if os.path.isfile(tmp_python_file):
+ os.remove(tmp_python_file)
+ if not name.endswith((".pyc", ".pyo")):
+ tfp.add(os.path.join(root, name))
+
+ tfp.close()
+ shutil.move(tmp_venv_tar, venv_tar)
+ with __utils__["files.fopen"](venv_hash, "w") as fh:
+ fh.write("{}\n".format(venv_hash_src))
+ finally:
+ os.chdir(start_dir)
+
+ return venv_tar
+
+
def call(name, function, *args, **kwargs):
"""
Executes a Salt function inside a running container
@@ -6733,47 +6843,68 @@ def call(name, function, *args, **kwargs):
if function is None:
raise CommandExecutionError("Missing function parameter")
- # move salt into the container
- thin_path = __utils__["thin.gen_thin"](
- __opts__["cachedir"],
- extra_mods=__salt__["config.option"]("thin_extra_mods", ""),
- so_mods=__salt__["config.option"]("thin_so_mods", ""),
- )
- ret = copy_to(
- name, thin_path, os.path.join(thin_dest_path, os.path.basename(thin_path))
- )
+ venv_dest_path = "/var/tmp"
+ venv_name = "venv-salt-minion"
+ venv_tar = gen_venv_tar(__opts__["cachedir"], venv_dest_path, venv_name)
- # figure out available python interpreter inside the container (only Python3)
- pycmds = ("python3", "/usr/libexec/platform-python")
- container_python_bin = None
- for py_cmd in pycmds:
- cmd = [py_cmd] + ["--version"]
- ret = run_all(name, subprocess.list2cmdline(cmd))
- if ret["retcode"] == 0:
- container_python_bin = py_cmd
- break
- if not container_python_bin:
- raise CommandExecutionError(
- "Python interpreter cannot be found inside the container. Make sure Python is installed in the container"
+ if venv_tar is not None:
+ venv_python_bin = os.path.join(venv_dest_path, venv_name, "bin", "python")
+ dest_venv_tar = os.path.join(venv_dest_path, os.path.basename(venv_tar))
+ copy_to(name, venv_tar, dest_venv_tar, overwrite=True, makedirs=True)
+ run_all(
+ name,
+ subprocess.list2cmdline(
+ ["tar", "zxf", dest_venv_tar, "-C", venv_dest_path]
+ ),
+ )
+ run_all(name, subprocess.list2cmdline(["rm", "-f", dest_venv_tar]))
+ container_python_bin = venv_python_bin
+ thin_dest_path = os.path.join(venv_dest_path, venv_name)
+ thin_salt_call = os.path.join(thin_dest_path, "bin", "salt-call")
+ else:
+ # move salt into the container
+ thin_path = __utils__["thin.gen_thin"](
+ __opts__["cachedir"],
+ extra_mods=__salt__["config.option"]("thin_extra_mods", ""),
+ so_mods=__salt__["config.option"]("thin_so_mods", ""),
)
- # untar archive
- untar_cmd = [
- container_python_bin,
- "-c",
- 'import tarfile; tarfile.open("{0}/{1}").extractall(path="{0}")'.format(
- thin_dest_path, os.path.basename(thin_path)
- ),
- ]
- ret = run_all(name, subprocess.list2cmdline(untar_cmd))
- if ret["retcode"] != 0:
- return {"result": False, "comment": ret["stderr"]}
+ ret = copy_to(
+ name, thin_path, os.path.join(thin_dest_path, os.path.basename(thin_path))
+ )
+
+ # figure out available python interpreter inside the container (only Python3)
+ pycmds = ("python3", "/usr/libexec/platform-python")
+ container_python_bin = None
+ for py_cmd in pycmds:
+ cmd = [py_cmd] + ["--version"]
+ ret = run_all(name, subprocess.list2cmdline(cmd))
+ if ret["retcode"] == 0:
+ container_python_bin = py_cmd
+ break
+ if not container_python_bin:
+ raise CommandExecutionError(
+ "Python interpreter cannot be found inside the container. Make sure Python is installed in the container"
+ )
+
+ # untar archive
+ untar_cmd = [
+ container_python_bin,
+ "-c",
+ 'import tarfile; tarfile.open("{0}/{1}").extractall(path="{0}")'.format(
+ thin_dest_path, os.path.basename(thin_path)
+ ),
+ ]
+ ret = run_all(name, subprocess.list2cmdline(untar_cmd))
+ if ret["retcode"] != 0:
+ return {"result": False, "comment": ret["stderr"]}
+ thin_salt_call = os.path.join(thin_dest_path, "salt-call")
try:
salt_argv = (
[
container_python_bin,
- os.path.join(thin_dest_path, "salt-call"),
+ thin_salt_call,
"--metadata",
"--local",
"--log-file",
diff --git a/tests/pytests/unit/modules/dockermod/test_module.py b/tests/pytests/unit/modules/dockermod/test_module.py
index 8fb7806497..1ac7dff52a 100644
--- a/tests/pytests/unit/modules/dockermod/test_module.py
+++ b/tests/pytests/unit/modules/dockermod/test_module.py
@@ -3,6 +3,7 @@ Unit tests for the docker module
"""
import logging
+import sys
import pytest
@@ -26,6 +27,7 @@ def configure_loader_modules(minion_opts):
whitelist=[
"args",
"docker",
+ "files",
"json",
"state",
"thin",
@@ -880,13 +882,16 @@ def test_call_success():
client = Mock()
client.put_archive = Mock()
get_client_mock = MagicMock(return_value=client)
+ gen_venv_tar_mock = MagicMock(return_value=None)
context = {"docker.exec_driver": "docker-exec"}
salt_dunder = {"config.option": docker_config_mock}
with patch.object(docker_mod, "run_all", docker_run_all_mock), patch.object(
docker_mod, "copy_to", docker_copy_to_mock
- ), patch.object(docker_mod, "_get_client", get_client_mock), patch.dict(
+ ), patch.object(docker_mod, "_get_client", get_client_mock), patch.object(
+ docker_mod, "gen_venv_tar", gen_venv_tar_mock
+ ), patch.dict(
docker_mod.__opts__, {"cachedir": "/tmp"}
), patch.dict(
docker_mod.__salt__, salt_dunder
@@ -931,6 +936,11 @@ def test_call_success():
!= docker_run_all_mock.mock_calls[9][1][1]
)
+ # check the parameters of gen_venv_tar call
+ assert gen_venv_tar_mock.mock_calls[0][1][0] == "/tmp"
+ assert gen_venv_tar_mock.mock_calls[0][1][1] == "/var/tmp"
+ assert gen_venv_tar_mock.mock_calls[0][1][2] == "venv-salt-minion"
+
assert {"retcode": 0, "comment": "container cmd"} == ret
@@ -1352,3 +1362,69 @@ def test_port():
"bar": {"6666/tcp": ports["bar"]["6666/tcp"]},
"baz": {},
}
+
+
+@pytest.mark.slow_test
+def test_call_with_gen_venv_tar():
+ """
+ test module calling inside containers with the Salt Bundle
+ """
+ ret = None
+ docker_run_all_mock = MagicMock(
+ return_value={
+ "retcode": 0,
+ "stdout": '{"retcode": 0, "comment": "container cmd"}',
+ "stderr": "err",
+ }
+ )
+ docker_copy_to_mock = MagicMock(return_value={"retcode": 0})
+ docker_config_mock = MagicMock(return_value="")
+ docker_cmd_run_mock = MagicMock(
+ return_value={
+ "retcode": 0,
+ "stdout": "test",
+ }
+ )
+ client = Mock()
+ client.put_archive = Mock()
+ get_client_mock = MagicMock(return_value=client)
+
+ context = {"docker.exec_driver": "docker-exec"}
+ salt_dunder = {
+ "config.option": docker_config_mock,
+ "cmd.run_all": docker_cmd_run_mock,
+ }
+
+ with patch.object(docker_mod, "run_all", docker_run_all_mock), patch.object(
+ docker_mod, "copy_to", docker_copy_to_mock
+ ), patch.object(docker_mod, "_get_client", get_client_mock), patch.object(
+ sys, "executable", "/tmp/venv-salt-minion/bin/python"
+ ), patch.dict(
+ docker_mod.__opts__, {"cachedir": "/tmp"}
+ ), patch.dict(
+ docker_mod.__salt__, salt_dunder
+ ), patch.dict(
+ docker_mod.__context__, context
+ ):
+ ret = docker_mod.call("ID", "test.arg", 1, 2, arg1="val1")
+
+ # Check that the directory is different each time
+ # [ call(name, [args]), ...
+ assert "mkdir" in docker_run_all_mock.mock_calls[0][1][1]
+
+ assert (
+ "tar zxf /var/tmp/venv-salt.tgz -C /var/tmp"
+ == docker_run_all_mock.mock_calls[1][1][1]
+ )
+
+ assert docker_run_all_mock.mock_calls[3][1][1].startswith(
+ "/var/tmp/venv-salt-minion/bin/python /var/tmp/venv-salt-minion/bin/salt-call "
+ )
+
+ # check remove the salt bundle tarball
+ assert docker_run_all_mock.mock_calls[2][1][1] == "rm -f /var/tmp/venv-salt.tgz"
+
+ # check directory cleanup
+ assert docker_run_all_mock.mock_calls[4][1][1] == "rm -rf /var/tmp/venv-salt-minion"
+
+ assert {"retcode": 0, "comment": "container cmd"} == ret
--
2.39.2

BIN
v3006.0.tar.gz (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -0,0 +1,30 @@
From cc161359ef7432960ef2f0b8f816986fa6798403 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 20 Sep 2023 13:07:29 +0100
Subject: [PATCH] Write salt version before building when using
--with-salt-version (bsc#1215489) (#604)
---
setup.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/setup.py b/setup.py
index 8ca8a66d45..cf7e54f930 100755
--- a/setup.py
+++ b/setup.py
@@ -591,6 +591,10 @@ HOME_DIR = {home_dir!r}
class Build(build):
def run(self):
+ if getattr(self.distribution, "with_salt_version", False):
+ self.distribution.salt_version_hardcoded_path = SALT_VERSION_HARDCODED
+ self.run_command("write_salt_version")
+
# Run build.run function
build.run(self)
salt_build_ver_file = os.path.join(self.build_lib, "salt", "_version.txt")
--
2.41.0

431
x509-fixes-111.patch Normal file
View File

@ -0,0 +1,431 @@
From 094b34760a85c3ee27bf64783624b17bd3bbca0a Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 18 Jan 2022 16:38:17 +0100
Subject: [PATCH] X509 fixes (#111)
* Return proper content type for the x509 certificate
* Remove parenthesis
* Remove extra-variables during the import
* Comment fix
* Remove double returns
* Change log level from trace to debug
* Remove 'pass' and add logging instead
* Remove unnecessary wrapping
Remove wrapping
* PEP 8: line too long
PEP8: line too long
* PEP8: Redefine RSAError variable in except clause
* Do not return None if name was not found
* Do not return None if no matched minions found
* Fix unit tests
Fix for log checking in x509 test
We are logging in debug and not in trace mode here.
---
salt/modules/publish.py | 2 +
salt/modules/x509.py | 93 ++++++++++++++++-----------------
salt/states/x509.py | 74 ++++++++++++++++++++++++--
tests/unit/modules/test_x509.py | 6 +--
4 files changed, 120 insertions(+), 55 deletions(-)
diff --git a/salt/modules/publish.py b/salt/modules/publish.py
index cc424cc383..a82cb3ac98 100644
--- a/salt/modules/publish.py
+++ b/salt/modules/publish.py
@@ -199,6 +199,8 @@ def _publish(
else:
return ret
+ return {}
+
def publish(
tgt, fun, arg=None, tgt_type="glob", returner="", timeout=5, via_master=None
diff --git a/salt/modules/x509.py b/salt/modules/x509.py
index 57c381ea38..6699a5d363 100644
--- a/salt/modules/x509.py
+++ b/salt/modules/x509.py
@@ -42,16 +42,13 @@ from salt.utils.odict import OrderedDict
try:
import M2Crypto
-
- HAS_M2 = True
except ImportError:
- HAS_M2 = False
+ M2Crypto = None
+
try:
import OpenSSL
-
- HAS_OPENSSL = True
except ImportError:
- HAS_OPENSSL = False
+ OpenSSL = None
__virtualname__ = "x509"
@@ -94,15 +91,10 @@ def __virtual__():
# salt.features appears to not be setup when invoked via peer publishing
if __opts__.get("features", {}).get("x509_v2"):
return (False, "Superseded, using x509_v2")
- if HAS_M2:
- salt.utils.versions.warn_until(
- "Potassium",
- "The x509 modules are deprecated. Please migrate to the replacement "
- "modules (x509_v2). They are the default from Salt 3008 (Argon) onwards.",
- )
- return __virtualname__
- else:
- return (False, "Could not load x509 module, m2crypto unavailable")
+ return (
+ __virtualname__ if M2Crypto is not None else False,
+ "Could not load x509 module, m2crypto unavailable",
+ )
class _Ctx(ctypes.Structure):
@@ -160,8 +152,8 @@ def _new_extension(name, value, critical=0, issuer=None, _pyfree=1):
x509_ext_ptr = M2Crypto.m2.x509v3_ext_conf(None, ctx, name, value)
lhash = None
except AttributeError:
- lhash = M2Crypto.m2.x509v3_lhash()
- ctx = M2Crypto.m2.x509v3_set_conf_lhash(lhash)
+ lhash = M2Crypto.m2.x509v3_lhash() # pylint: disable=no-member
+ ctx = M2Crypto.m2.x509v3_set_conf_lhash(lhash) # pylint: disable=no-member
# ctx not zeroed
_fix_ctx(ctx, issuer)
x509_ext_ptr = M2Crypto.m2.x509v3_ext_conf(lhash, ctx, name, value)
@@ -300,7 +292,7 @@ def _get_signing_policy(name):
signing_policy = policies.get(name)
if signing_policy:
return signing_policy
- return __salt__["config.get"]("x509_signing_policies", {}).get(name)
+ return __salt__["config.get"]("x509_signing_policies", {}).get(name) or {}
def _pretty_hex(hex_str):
@@ -338,9 +330,11 @@ def _text_or_file(input_):
"""
if _isfile(input_):
with salt.utils.files.fopen(input_) as fp_:
- return salt.utils.stringutils.to_str(fp_.read())
+ out = salt.utils.stringutils.to_str(fp_.read())
else:
- return salt.utils.stringutils.to_str(input_)
+ out = salt.utils.stringutils.to_str(input_)
+
+ return out
def _parse_subject(subject):
@@ -359,7 +353,7 @@ def _parse_subject(subject):
ret_list.append((nid_num, nid_name, val))
nids.append(nid_num)
except TypeError as err:
- log.trace("Missing attribute '%s'. Error: %s", nid_name, err)
+ log.debug("Missing attribute '%s'. Error: %s", nid_name, err)
for nid_num, nid_name, val in sorted(ret_list):
ret[nid_name] = val
return ret
@@ -557,8 +551,8 @@ def get_pem_entries(glob_path):
if os.path.isfile(path):
try:
ret[path] = get_pem_entry(text=path)
- except ValueError:
- pass
+ except ValueError as err:
+ log.debug("Unable to get PEM entries from %s: %s", path, err)
return ret
@@ -636,8 +630,8 @@ def read_certificates(glob_path):
if os.path.isfile(path):
try:
ret[path] = read_certificate(certificate=path)
- except ValueError:
- pass
+ except ValueError as err:
+ log.debug("Unable to read certificate %s: %s", path, err)
return ret
@@ -667,10 +661,9 @@ def read_csr(csr):
"Subject": _parse_subject(csr.get_subject()),
"Subject Hash": _dec2hex(csr.get_subject().as_hash()),
"Public Key Hash": hashlib.sha1(csr.get_pubkey().get_modulus()).hexdigest(),
+ "X509v3 Extensions": _get_csr_extensions(csr),
}
- ret["X509v3 Extensions"] = _get_csr_extensions(csr)
-
return ret
@@ -980,7 +973,7 @@ def create_crl(
# pyOpenSSL Note due to current limitations in pyOpenSSL it is impossible
# to specify a digest For signing the CRL. This will hopefully be fixed
# soon: https://github.com/pyca/pyopenssl/pull/161
- if not HAS_OPENSSL:
+ if OpenSSL is None:
raise salt.exceptions.SaltInvocationError(
"Could not load OpenSSL module, OpenSSL unavailable"
)
@@ -1131,6 +1124,7 @@ def get_signing_policy(signing_policy_name):
signing_policy = _get_signing_policy(signing_policy_name)
if not signing_policy:
return "Signing policy {} does not exist.".format(signing_policy_name)
+
if isinstance(signing_policy, list):
dict_ = {}
for item in signing_policy:
@@ -1147,7 +1141,7 @@ def get_signing_policy(signing_policy_name):
signing_policy["signing_cert"], "CERTIFICATE"
)
except KeyError:
- pass
+ log.debug('Unable to get "certificate" PEM entry')
return signing_policy
@@ -1782,7 +1776,8 @@ def create_csr(path=None, text=False, **kwargs):
)
)
- for entry in sorted(subject.nid):
+ # pylint: disable=unused-variable
+ for entry, num in subject.nid.items():
if entry in kwargs:
setattr(subject, entry, kwargs[entry])
@@ -1818,7 +1813,6 @@ def create_csr(path=None, text=False, **kwargs):
extstack.push(ext)
csr.add_extensions(extstack)
-
csr.sign(
_get_private_key_obj(
kwargs["private_key"], passphrase=kwargs["private_key_passphrase"]
@@ -1826,10 +1820,11 @@ def create_csr(path=None, text=False, **kwargs):
kwargs["algorithm"],
)
- if path:
- return write_pem(text=csr.as_pem(), path=path, pem_type="CERTIFICATE REQUEST")
- else:
- return csr.as_pem()
+ return (
+ write_pem(text=csr.as_pem(), path=path, pem_type="CERTIFICATE REQUEST")
+ if path
+ else csr.as_pem()
+ )
def verify_private_key(private_key, public_key, passphrase=None):
@@ -1854,7 +1849,7 @@ def verify_private_key(private_key, public_key, passphrase=None):
salt '*' x509.verify_private_key private_key=/etc/pki/myca.key \\
public_key=/etc/pki/myca.crt
"""
- return bool(get_public_key(private_key, passphrase) == get_public_key(public_key))
+ return get_public_key(private_key, passphrase) == get_public_key(public_key)
def verify_signature(
@@ -1910,7 +1905,10 @@ def verify_crl(crl, cert):
salt '*' x509.verify_crl crl=/etc/pki/myca.crl cert=/etc/pki/myca.crt
"""
if not salt.utils.path.which("openssl"):
- raise salt.exceptions.SaltInvocationError("openssl binary not found in path")
+ raise salt.exceptions.SaltInvocationError(
+ 'External command "openssl" not found'
+ )
+
crltext = _text_or_file(crl)
crltext = get_pem_entry(crltext, pem_type="X509 CRL")
crltempfile = tempfile.NamedTemporaryFile(delete=True)
@@ -1970,8 +1968,9 @@ def expired(certificate):
ret["expired"] = True
else:
ret["expired"] = False
- except ValueError:
- pass
+ except ValueError as err:
+ log.debug("Failed to get data of expired certificate: %s", err)
+ log.trace(err, exc_info=True)
return ret
@@ -1994,6 +1993,7 @@ def will_expire(certificate, days):
salt '*' x509.will_expire "/etc/pki/mycert.crt" days=30
"""
+ ts_pt = "%Y-%m-%d %H:%M:%S"
ret = {}
if os.path.isfile(certificate):
@@ -2007,14 +2007,11 @@ def will_expire(certificate, days):
_expiration_date = cert.get_not_after().get_datetime()
ret["cn"] = _parse_subject(cert.get_subject())["CN"]
-
- if _expiration_date.strftime("%Y-%m-%d %H:%M:%S") <= _check_time.strftime(
- "%Y-%m-%d %H:%M:%S"
- ):
- ret["will_expire"] = True
- else:
- ret["will_expire"] = False
- except ValueError:
- pass
+ ret["will_expire"] = _expiration_date.strftime(
+ ts_pt
+ ) <= _check_time.strftime(ts_pt)
+ except ValueError as err:
+ log.debug("Unable to return details of a sertificate expiration: %s", err)
+ log.trace(err, exc_info=True)
return ret
diff --git a/salt/states/x509.py b/salt/states/x509.py
index aebbc4cc82..f9cbec87f9 100644
--- a/salt/states/x509.py
+++ b/salt/states/x509.py
@@ -192,11 +192,12 @@ import re
import salt.exceptions
import salt.utils.versions
from salt.features import features
+import salt.utils.stringutils
try:
from M2Crypto.RSA import RSAError
except ImportError:
- pass
+ RSAError = Exception("RSA Error")
log = logging.getLogger(__name__)
@@ -215,7 +216,7 @@ def __virtual__():
)
return "x509"
else:
- return (False, "Could not load x509 state: m2crypto unavailable")
+ return False, "Could not load x509 state: the x509 is not available"
def _revoked_to_list(revs):
@@ -704,7 +705,70 @@ def certificate_managed(name, days_remaining=90, append_certs=None, **kwargs):
"Old": invalid_reason,
"New": "Certificate will be valid and up to date",
}
- return ret
+ private_key_args.update(managed_private_key)
+ kwargs["public_key_passphrase"] = private_key_args["passphrase"]
+
+ if private_key_args["new"]:
+ rotate_private_key = True
+ private_key_args["new"] = False
+
+ if _check_private_key(
+ private_key_args["name"],
+ bits=private_key_args["bits"],
+ passphrase=private_key_args["passphrase"],
+ new=private_key_args["new"],
+ overwrite=private_key_args["overwrite"],
+ ):
+ private_key = __salt__["x509.get_pem_entry"](
+ private_key_args["name"], pem_type="RSA PRIVATE KEY"
+ )
+ else:
+ new_private_key = True
+ private_key = __salt__["x509.create_private_key"](
+ text=True,
+ bits=private_key_args["bits"],
+ passphrase=private_key_args["passphrase"],
+ cipher=private_key_args["cipher"],
+ verbose=private_key_args["verbose"],
+ )
+
+ kwargs["public_key"] = private_key
+
+ current_days_remaining = 0
+ current_comp = {}
+
+ if os.path.isfile(name):
+ try:
+ current = __salt__["x509.read_certificate"](certificate=name)
+ current_comp = copy.deepcopy(current)
+ if "serial_number" not in kwargs:
+ current_comp.pop("Serial Number")
+ if "signing_cert" not in kwargs:
+ try:
+ current_comp["X509v3 Extensions"][
+ "authorityKeyIdentifier"
+ ] = re.sub(
+ r"serial:([0-9A-F]{2}:)*[0-9A-F]{2}",
+ "serial:--",
+ current_comp["X509v3 Extensions"]["authorityKeyIdentifier"],
+ )
+ except KeyError:
+ pass
+ current_comp.pop("Not Before")
+ current_comp.pop("MD5 Finger Print")
+ current_comp.pop("SHA1 Finger Print")
+ current_comp.pop("SHA-256 Finger Print")
+ current_notafter = current_comp.pop("Not After")
+ current_days_remaining = (
+ datetime.datetime.strptime(current_notafter, "%Y-%m-%d %H:%M:%S")
+ - datetime.datetime.now()
+ ).days
+ if days_remaining == 0:
+ days_remaining = current_days_remaining - 1
+ except salt.exceptions.SaltInvocationError:
+ current = "{} is not a valid Certificate.".format(name)
+ else:
+ current = "{} does not exist.".format(name)
contents = __salt__["x509.create_certificate"](text=True, **kwargs)
# Check the module actually returned a cert and not an error message as a string
@@ -900,6 +964,8 @@ def pem_managed(name, text, backup=False, **kwargs):
Any arguments supported by :py:func:`file.managed <salt.states.file.managed>` are supported.
"""
file_args, kwargs = _get_file_args(name, **kwargs)
- file_args["contents"] = __salt__["x509.get_pem_entry"](text=text)
+ file_args["contents"] = salt.utils.stringutils.to_str(
+ __salt__["x509.get_pem_entry"](text=text)
+ )
return __states__["file.managed"](**file_args)
diff --git a/tests/unit/modules/test_x509.py b/tests/unit/modules/test_x509.py
index f1ca5bb45a..a5c44f0ed2 100644
--- a/tests/unit/modules/test_x509.py
+++ b/tests/unit/modules/test_x509.py
@@ -119,9 +119,9 @@ class X509TestCase(TestCase, LoaderModuleMockMixin):
subj = FakeSubject()
x509._parse_subject(subj)
- assert x509.log.trace.call_args[0][0] == "Missing attribute '%s'. Error: %s"
- assert x509.log.trace.call_args[0][1] == list(subj.nid.keys())[0]
- assert isinstance(x509.log.trace.call_args[0][2], TypeError)
+ assert x509.log.debug.call_args[0][0] == "Missing attribute '%s'. Error: %s"
+ assert x509.log.debug.call_args[0][1] == list(subj.nid.keys())[0]
+ assert isinstance(x509.log.debug.call_args[0][2], TypeError)
@pytest.mark.skipif(
not HAS_M2CRYPTO, reason="Skipping, reason=M2Crypto is unavailable"
--
2.39.2

View File

@ -0,0 +1,366 @@
From 6b6ba4bdbd4b4c52a46bf3d0bcdbaca6b47534d1 Mon Sep 17 00:00:00 2001
From: Georg <georg@lysergic.dev>
Date: Wed, 28 Jun 2023 16:39:30 +0200
Subject: [PATCH] Zypper pkgrepo alreadyconfigured (#585)
* Fix zypper repository reconfiguration
See https://github.com/saltstack/salt/issues/63402 for issue details.
Signed-off-by: Georg Pfuetzenreuter <georg.pfuetzenreuter@suse.com>
* Functional pkgrepo tests for SUSE
Signed-off-by: Georg Pfuetzenreuter <georg.pfuetzenreuter@suse.com>
* Change pkgrepo state to use f-strings
Follow new styling rules.
Signed-off-by: Georg Pfuetzenreuter <mail@georg-pfuetzenreuter.net>
---------
Signed-off-by: Georg Pfuetzenreuter <georg.pfuetzenreuter@suse.com>
Signed-off-by: Georg Pfuetzenreuter <mail@georg-pfuetzenreuter.net>
---
changelog/63402.fixed.md | 1 +
salt/states/pkgrepo.py | 27 ++-
.../functional/states/pkgrepo/test_suse.py | 219 ++++++++++++++++++
3 files changed, 235 insertions(+), 12 deletions(-)
create mode 100644 changelog/63402.fixed.md
create mode 100644 tests/pytests/functional/states/pkgrepo/test_suse.py
diff --git a/changelog/63402.fixed.md b/changelog/63402.fixed.md
new file mode 100644
index 0000000000..c38715738a
--- /dev/null
+++ b/changelog/63402.fixed.md
@@ -0,0 +1 @@
+Repaired zypper repositories being reconfigured without changes
diff --git a/salt/states/pkgrepo.py b/salt/states/pkgrepo.py
index c2d23f95bb..f041644287 100644
--- a/salt/states/pkgrepo.py
+++ b/salt/states/pkgrepo.py
@@ -464,7 +464,7 @@ def managed(name, ppa=None, copr=None, aptkey=True, **kwargs):
pre = __salt__["pkg.get_repo"](repo=repo, **kwargs)
except CommandExecutionError as exc:
ret["result"] = False
- ret["comment"] = "Failed to examine repo '{}': {}".format(name, exc)
+ ret["comment"] = f"Failed to examine repo '{name}': {exc}"
return ret
# This is because of how apt-sources works. This pushes distro logic
@@ -500,7 +500,10 @@ def managed(name, ppa=None, copr=None, aptkey=True, **kwargs):
else:
break
else:
- break
+ if kwarg in ("comps", "key_url"):
+ break
+ else:
+ continue
elif kwarg in ("comps", "key_url"):
if sorted(sanitizedkwargs[kwarg]) != sorted(pre[kwarg]):
break
@@ -546,7 +549,7 @@ def managed(name, ppa=None, copr=None, aptkey=True, **kwargs):
break
else:
ret["result"] = True
- ret["comment"] = "Package repo '{}' already configured".format(name)
+ ret["comment"] = f"Package repo '{name}' already configured"
return ret
if __opts__["test"]:
@@ -581,7 +584,7 @@ def managed(name, ppa=None, copr=None, aptkey=True, **kwargs):
# This is another way to pass information back from the mod_repo
# function.
ret["result"] = False
- ret["comment"] = "Failed to configure repo '{}': {}".format(name, exc)
+ ret["comment"] = f"Failed to configure repo '{name}': {exc}"
return ret
try:
@@ -597,10 +600,10 @@ def managed(name, ppa=None, copr=None, aptkey=True, **kwargs):
ret["changes"] = {"repo": repo}
ret["result"] = True
- ret["comment"] = "Configured package repo '{}'".format(name)
+ ret["comment"] = f"Configured package repo '{name}'"
except Exception as exc: # pylint: disable=broad-except
ret["result"] = False
- ret["comment"] = "Failed to confirm config of repo '{}': {}".format(name, exc)
+ ret["comment"] = f"Failed to confirm config of repo '{name}': {exc}"
# Clear cache of available packages, if present, since changes to the
# repositories may change the packages that are available.
@@ -700,11 +703,11 @@ def absent(name, **kwargs):
repo = __salt__["pkg.get_repo"](stripname, **kwargs)
except CommandExecutionError as exc:
ret["result"] = False
- ret["comment"] = "Failed to configure repo '{}': {}".format(name, exc)
+ ret["comment"] = f"Failed to configure repo '{name}': {exc}"
return ret
if not repo:
- ret["comment"] = "Package repo {} is absent".format(name)
+ ret["comment"] = f"Package repo {name} is absent"
ret["result"] = True
return ret
@@ -727,7 +730,7 @@ def absent(name, **kwargs):
repos = __salt__["pkg.list_repos"]()
if stripname not in repos:
ret["changes"]["repo"] = name
- ret["comment"] = "Removed repo {}".format(name)
+ ret["comment"] = f"Removed repo {name}"
if not remove_key:
ret["result"] = True
@@ -736,14 +739,14 @@ def absent(name, **kwargs):
removed_keyid = __salt__["pkg.del_repo_key"](stripname, **kwargs)
except (CommandExecutionError, SaltInvocationError) as exc:
ret["result"] = False
- ret["comment"] += ", but failed to remove key: {}".format(exc)
+ ret["comment"] += f", but failed to remove key: {exc}"
else:
ret["result"] = True
ret["changes"]["keyid"] = removed_keyid
- ret["comment"] += ", and keyid {}".format(removed_keyid)
+ ret["comment"] += f", and keyid {removed_keyid}"
else:
ret["result"] = False
- ret["comment"] = "Failed to remove repo {}".format(name)
+ ret["comment"] = f"Failed to remove repo {name}"
return ret
diff --git a/tests/pytests/functional/states/pkgrepo/test_suse.py b/tests/pytests/functional/states/pkgrepo/test_suse.py
new file mode 100644
index 0000000000..19ba928ce6
--- /dev/null
+++ b/tests/pytests/functional/states/pkgrepo/test_suse.py
@@ -0,0 +1,219 @@
+import pytest
+
+pytestmark = [
+ pytest.mark.destructive_test,
+ pytest.mark.skip_if_not_root,
+]
+
+
+@pytest.fixture
+def pkgrepo(states, grains):
+ if grains["os_family"] != "Suse":
+ raise pytest.skip.Exception(
+ "Test is only applicable to SUSE based operating systems",
+ _use_item_location=True,
+ )
+ return states.pkgrepo
+
+
+@pytest.fixture
+def suse_state_tree(grains, pkgrepo, state_tree):
+ managed_sls_contents = """
+ salttest:
+ pkgrepo.managed:
+ - enabled: 1
+ - gpgcheck: 1
+ - comments:
+ - '# Salt Test'
+ - refresh: 1
+ {% if grains['osmajorrelease'] == 15 %}
+ - baseurl: https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15-SP4/standard/
+ - humanname: openSUSE Backports for SLE 15 SP4
+ - gpgkey: https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15-SP4/standard/repodata/repomd.xml.key
+ {% elif grains['osfullname'] == 'openSUSE Tumbleweed' %}
+ - baseurl: http://download.opensuse.org/tumbleweed/repo/oss/
+ - humanname: openSUSE Tumbleweed OSS
+ - gpgkey: https://download.opensuse.org/tumbleweed/repo/oss/repodata/repomd.xml.key
+ {% endif %}
+ """
+
+ absent_sls_contents = """
+ salttest:
+ pkgrepo:
+ - absent
+ """
+
+ modified_sls_contents = """
+ salttest:
+ pkgrepo.managed:
+ - enabled: 1
+ - gpgcheck: 1
+ - comments:
+ - '# Salt Test (modified)'
+ - refresh: 1
+ {% if grains['osmajorrelease'] == 15 %}
+ - baseurl: https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15-SP4/standard/
+ - humanname: Salt modified Backports
+ - gpgkey: https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15-SP4/standard/repodata/repomd.xml.key
+ {% elif grains['osfullname'] == 'openSUSE Tumbleweed' %}
+ - baseurl: http://download.opensuse.org/tumbleweed/repo/oss/
+ - humanname: Salt modified OSS
+ - gpgkey: https://download.opensuse.org/tumbleweed/repo/oss/repodata/repomd.xml.key
+ {% endif %}
+ """
+
+ managed_state_file = pytest.helpers.temp_file(
+ "pkgrepo/managed.sls", managed_sls_contents, state_tree
+ )
+ absent_state_file = pytest.helpers.temp_file(
+ "pkgrepo/absent.sls", absent_sls_contents, state_tree
+ )
+ modified_state_file = pytest.helpers.temp_file(
+ "pkgrepo/modified.sls", modified_sls_contents, state_tree
+ )
+
+ try:
+ with managed_state_file, absent_state_file, modified_state_file:
+ yield
+ finally:
+ pass
+
+
+@pytest.mark.requires_salt_states("pkgrepo.managed", "pkgrepo.absent")
+def test_pkgrepo_managed_absent(grains, modules, subtests, suse_state_tree):
+ """
+ Test adding and removing a repository
+ """
+ add_repo_test_passed = False
+
+ def _run(name, test=False):
+ return modules.state.sls(
+ mods=name,
+ test=test,
+ )
+
+ with subtests.test("Add repository"):
+ ret = _run("pkgrepo.managed")
+ assert ret.failed is False
+ for state in ret:
+ assert state.result is True
+ add_repo_test_passed = True
+
+ if add_repo_test_passed is False:
+ pytest.skip("Adding the repository failed, skipping removal tests.")
+
+ with subtests.test("Remove repository, test"):
+ ret = _run("pkgrepo.absent", test=True)
+ assert ret.failed is False
+ for state in ret:
+ assert state.changes == {}
+ assert state.comment.startswith("Package repo 'salttest' will be removed.")
+ assert state.result is None
+
+ with subtests.test("Remove repository"):
+ ret = _run("pkgrepo.absent")
+ assert ret.failed is False
+ for state in ret:
+ assert state.result is True
+
+ with subtests.test("Remove repository again, test"):
+ ret = _run("pkgrepo.absent", test=True)
+ assert ret.failed is False
+ for state in ret:
+ assert state.changes == {}
+ assert state.comment == "Package repo salttest is absent"
+ assert state.result is True
+
+ with subtests.test("Remove repository again"):
+ ret = _run("pkgrepo.absent")
+ assert ret.failed is False
+ for state in ret:
+ assert state.changes == {}
+ assert state.comment == "Package repo salttest is absent"
+ assert state.result is True
+
+
+@pytest.mark.requires_salt_states("pkgrepo.managed")
+def test_pkgrepo_managed_modify(grains, modules, subtests, suse_state_tree):
+ """
+ Test adding and modifying a repository
+ """
+ add_repo_test_passed = False
+
+ def _run(name, test=False):
+ return modules.state.sls(
+ mods=name,
+ test=test,
+ )
+
+ with subtests.test("Add repository, test"):
+ ret = _run("pkgrepo.managed", test=True)
+ assert ret.failed is False
+ for state in ret:
+ assert state.changes == {"repo": "salttest"}
+ assert state.comment.startswith(
+ "Package repo 'salttest' would be configured."
+ )
+ assert state.result is None
+
+ with subtests.test("Add repository"):
+ ret = _run("pkgrepo.managed")
+ assert ret.failed is False
+ for state in ret:
+ assert state.changes == {"repo": "salttest"}
+ assert state.comment == "Configured package repo 'salttest'"
+ assert state.result is True
+ add_repo_test_passed = True
+
+ if add_repo_test_passed is False:
+ pytest.skip("Adding the repository failed, skipping modification tests.")
+
+ with subtests.test("Add repository again, test"):
+ ret = _run("pkgrepo.managed", test=True)
+ assert ret.failed is False
+ for state in ret:
+ assert state.changes == {}
+ assert state.comment == "Package repo 'salttest' already configured"
+ assert state.result is True
+
+ with subtests.test("Add repository again"):
+ ret = _run("pkgrepo.managed")
+ assert ret.failed is False
+ for state in ret:
+ assert state.result is True
+ assert state.changes == {}
+ assert state.comment == "Package repo 'salttest' already configured"
+
+ with subtests.test("Modify repository, test"):
+ ret = _run("pkgrepo.modified", test=True)
+ assert ret.failed is False
+ for state in ret:
+ assert state.changes == {
+ "comments": {"new": ["# Salt Test (modified)"], "old": None},
+ "refresh": {"new": 1, "old": None},
+ "gpgkey": {
+ "new": "https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15-SP4/standard/repodata/repomd.xml.key",
+ "old": None,
+ },
+ "name": {
+ "new": "Salt modified Backports",
+ "old": "openSUSE Backports for SLE 15 SP4",
+ },
+ }
+ assert state.comment.startswith(
+ "Package repo 'salttest' would be configured."
+ )
+ assert state.result is None
+
+ with subtests.test("Modify repository"):
+ ret = _run("pkgrepo.modified")
+ assert ret.failed is False
+ for state in ret:
+ assert state.result is True
+ assert state.changes == {
+ "name": {
+ "new": "Salt modified Backports",
+ "old": "openSUSE Backports for SLE 15 SP4",
+ }
+ }
+ assert state.comment == "Configured package repo 'salttest'"
--
2.41.0

View File

@ -0,0 +1,275 @@
From deaee93b2f83f1524ec136afc1a5198b33d293d2 Mon Sep 17 00:00:00 2001
From: Alberto Planas <aplanas@suse.com>
Date: Mon, 5 Oct 2020 16:24:16 +0200
Subject: [PATCH] zypperpkg: ignore retcode 104 for search()
(bsc#1176697) (#270)
---
salt/modules/zypperpkg.py | 28 ++++++---
tests/unit/modules/test_zypperpkg.py | 87 ++++++++++++++++++++++------
2 files changed, 89 insertions(+), 26 deletions(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index d8220a1fdd..4bb10f445a 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -103,6 +103,8 @@ class _Zypper:
}
LOCK_EXIT_CODE = 7
+ NOT_FOUND_EXIT_CODE = 104
+
XML_DIRECTIVES = ["-x", "--xmlout"]
# ZYPPER_LOCK is not affected by --root
ZYPPER_LOCK = "/var/run/zypp.pid"
@@ -134,6 +136,7 @@ class _Zypper:
self.__no_raise = False
self.__refresh = False
self.__ignore_repo_failure = False
+ self.__ignore_not_found = False
self.__systemd_scope = False
self.__root = None
@@ -153,6 +156,9 @@ class _Zypper:
# Ignore exit code for 106 (repo is not available)
if "no_repo_failure" in kwargs:
self.__ignore_repo_failure = kwargs["no_repo_failure"]
+ # Ignore exit code for 104 (package not found)
+ if "ignore_not_found" in kwargs:
+ self.__ignore_not_found = kwargs["ignore_not_found"]
if "systemd_scope" in kwargs:
self.__systemd_scope = kwargs["systemd_scope"]
if "root" in kwargs:
@@ -333,6 +339,10 @@ class _Zypper:
if self.__root:
self.__cmd.extend(["--root", self.__root])
+ # Do not consider 104 as a retcode error
+ if self.__ignore_not_found:
+ kwargs["success_retcodes"] = [_Zypper.NOT_FOUND_EXIT_CODE]
+
self.__cmd.extend(args)
kwargs["output_loglevel"] = "trace"
kwargs["python_shell"] = False
@@ -479,9 +489,11 @@ class Wildcard:
Get available versions of the package.
:return:
"""
- solvables = self.zypper.nolock.xml.call(
- "se", "-xv", self.name
- ).getElementsByTagName("solvable")
+ solvables = (
+ self.zypper(ignore_not_found=True)
+ .nolock.xml.call("se", "-v", self.name)
+ .getElementsByTagName("solvable")
+ )
if not solvables:
raise CommandExecutionError(
"No packages found matching '{}'".format(self.name)
@@ -1086,7 +1098,7 @@ def list_repo_pkgs(*args, **kwargs):
root = kwargs.get("root") or None
for node in (
- __zypper__(root=root)
+ __zypper__(root=root, ignore_not_found=True)
.xml.call("se", "-s", *targets)
.getElementsByTagName("solvable")
):
@@ -2556,7 +2568,9 @@ def owner(*paths, **kwargs):
def _get_visible_patterns(root=None):
"""Get all available patterns in the repo that are visible."""
patterns = {}
- search_patterns = __zypper__(root=root).nolock.xml.call("se", "-t", "pattern")
+ search_patterns = __zypper__(root=root, ignore_not_found=True).nolock.xml.call(
+ "se", "-t", "pattern"
+ )
for element in search_patterns.getElementsByTagName("solvable"):
installed = element.getAttribute("status") == "installed"
patterns[element.getAttribute("name")] = {
@@ -2753,7 +2767,7 @@ def search(criteria, refresh=False, **kwargs):
cmd.append(criteria)
solvables = (
- __zypper__(root=root)
+ __zypper__(root=root, ignore_not_found=True)
.nolock.noraise.xml.call(*cmd)
.getElementsByTagName("solvable")
)
@@ -3005,7 +3019,7 @@ def _get_patches(installed_only=False, root=None):
"""
patches = {}
for element in (
- __zypper__(root=root)
+ __zypper__(root=root, ignore_not_found=True)
.nolock.xml.call("se", "-t", "patch")
.getElementsByTagName("solvable")
):
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index 22137a2544..5e4c967520 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -28,7 +28,10 @@ class ZyppCallMock:
def __call__(self, *args, **kwargs):
# If the call is for a configuration modifier, we return self
- if any(i in kwargs for i in ("no_repo_failure", "systemd_scope", "root")):
+ if any(
+ i in kwargs
+ for i in ("no_repo_failure", "ignore_not_found", "systemd_scope", "root")
+ ):
return self
return MagicMock(return_value=self.__return_value)()
@@ -1662,8 +1665,9 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
<solvable status="installed" name="libzypp" kind="package" edition="16.2.4-19.5" arch="x86_64" repository="(System Packages)"/>
</solvable-list></search-result></stream>
"""
- _zpr = MagicMock()
- _zpr.nolock.xml.call = MagicMock(return_value=minidom.parseString(xmldoc))
+ __zpr = MagicMock()
+ __zpr.nolock.xml.call.return_value = minidom.parseString(xmldoc)
+ _zpr = MagicMock(return_value=__zpr)
wcard = zypper.Wildcard(_zpr)
wcard.name, wcard.version = "libzypp", "*"
assert wcard._get_scope_versions(wcard._get_available_versions()) == [
@@ -1685,8 +1689,9 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
</solvable-list></search-result></stream>
"""
- _zpr = MagicMock()
- _zpr.nolock.xml.call = MagicMock(return_value=minidom.parseString(xmldoc))
+ __zpr = MagicMock()
+ __zpr.nolock.xml.call.return_value = minidom.parseString(xmldoc)
+ _zpr = MagicMock(return_value=__zpr)
wcard = zypper.Wildcard(_zpr)
wcard.name, wcard.version = "libzypp", "16.2.*-2*"
assert wcard._get_scope_versions(wcard._get_available_versions()) == [
@@ -1707,8 +1712,9 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
</solvable-list></search-result></stream>
"""
- _zpr = MagicMock()
- _zpr.nolock.xml.call = MagicMock(return_value=minidom.parseString(xmldoc))
+ __zpr = MagicMock()
+ __zpr.nolock.xml.call.return_value = minidom.parseString(xmldoc)
+ _zpr = MagicMock(return_value=__zpr)
wcard = zypper.Wildcard(_zpr)
wcard.name, wcard.version = "libzypp", "16.2.5*"
assert wcard._get_scope_versions(wcard._get_available_versions()) == [
@@ -1728,8 +1734,9 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
</solvable-list></search-result></stream>
"""
- _zpr = MagicMock()
- _zpr.nolock.xml.call = MagicMock(return_value=minidom.parseString(xmldoc))
+ __zpr = MagicMock()
+ __zpr.nolock.xml.call.return_value = minidom.parseString(xmldoc)
+ _zpr = MagicMock(return_value=__zpr)
wcard = zypper.Wildcard(_zpr)
wcard.name, wcard.version = "libzypp", "*.1"
assert wcard._get_scope_versions(wcard._get_available_versions()) == [
@@ -1750,8 +1757,9 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
<solvable status="other-version" name="libzypp" kind="package" edition="17.2.6-27.9.1" arch="x86_64" repository="foo"/>
</solvable-list></search-result></stream>
"""
- _zpr = MagicMock()
- _zpr.nolock.xml.call = MagicMock(return_value=minidom.parseString(xmldoc))
+ __zpr = MagicMock()
+ __zpr.nolock.xml.call.return_value = minidom.parseString(xmldoc)
+ _zpr = MagicMock(return_value=__zpr)
assert zypper.Wildcard(_zpr)("libzypp", "16.2.4*") == "16.2.4-19.5"
assert zypper.Wildcard(_zpr)("libzypp", "16.2*") == "16.2.5-25.1"
assert zypper.Wildcard(_zpr)("libzypp", "*6-*") == "17.2.6-27.9.1"
@@ -1770,8 +1778,10 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
<solvable status="other-version" name="libzypp" kind="package" edition="17.2.6-27.9.1" arch="x86_64" repository="foo"/>
</solvable-list></search-result></stream>
"""
- _zpr = MagicMock()
- _zpr.nolock.xml.call = MagicMock(return_value=minidom.parseString(xmldoc))
+ __zpr = MagicMock()
+ __zpr.nolock.xml.call.return_value = minidom.parseString(xmldoc)
+ _zpr = MagicMock(return_value=__zpr)
+
assert zypper.Wildcard(_zpr)("libzypp", None) is None
def test_wildcard_to_query_typecheck(self):
@@ -1787,8 +1797,9 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
<solvable status="other-version" name="libzypp" kind="package" edition="17.2.6-27.9.1" arch="x86_64" repository="foo"/>
</solvable-list></search-result></stream>
"""
- _zpr = MagicMock()
- _zpr.nolock.xml.call = MagicMock(return_value=minidom.parseString(xmldoc))
+ __zpr = MagicMock()
+ __zpr.nolock.xml.call.return_value = minidom.parseString(xmldoc)
+ _zpr = MagicMock(return_value=__zpr)
assert isinstance(zypper.Wildcard(_zpr)("libzypp", "*.1"), str)
def test_wildcard_to_query_condition_preservation(self):
@@ -1804,8 +1815,9 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
<solvable status="other-version" name="libzypp" kind="package" edition="17.2.6-27.9.1" arch="x86_64" repository="foo"/>
</solvable-list></search-result></stream>
"""
- _zpr = MagicMock()
- _zpr.nolock.xml.call = MagicMock(return_value=minidom.parseString(xmldoc))
+ __zpr = MagicMock()
+ __zpr.nolock.xml.call.return_value = minidom.parseString(xmldoc)
+ _zpr = MagicMock(return_value=__zpr)
for op in zypper.Wildcard.Z_OP:
assert zypper.Wildcard(_zpr)(
@@ -1831,8 +1843,10 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
<solvable status="other-version" name="libzypp" kind="package" edition="17.2.6-27.9.1" arch="x86_64" repository="foo"/>
</solvable-list></search-result></stream>
"""
- _zpr = MagicMock()
- _zpr.nolock.xml.call = MagicMock(return_value=minidom.parseString(xmldoc))
+ __zpr = MagicMock()
+ __zpr.nolock.xml.call.return_value = minidom.parseString(xmldoc)
+ _zpr = MagicMock(return_value=__zpr)
+
with self.assertRaises(CommandExecutionError):
for op in [">>", "==", "<<", "+"]:
zypper.Wildcard(_zpr)("libzypp", "{}*.1".format(op))
@@ -1958,3 +1972,38 @@ pattern() = package-c"""
self.assertFalse(zypper.__zypper__._is_rpm_lock())
self.assertEqual(lockf_mock.call_count, 2)
zypper.__zypper__._reset()
+
+ def test_search(self):
+ """Test zypperpkg.search()"""
+ xml_mock = MagicMock(return_value=[])
+ zypp_mock = MagicMock(return_value=xml_mock)
+ ZyppCallMock(return_value=xml_mock)
+ with patch("salt.modules.zypperpkg.__zypper__", zypp_mock):
+ zypper.search("emacs")
+ zypp_mock.assert_called_with(root=None, ignore_not_found=True)
+ xml_mock.nolock.noraise.xml.call.assert_called_with("search", "emacs")
+
+ def test_search_not_found(self):
+ """Test zypperpkg.search()"""
+ ret = {
+ "stdout": "<?xml version='1.0'?><stream></stream>",
+ "stderr": None,
+ "retcode": 104,
+ }
+ run_all_mock = MagicMock(return_value=ret)
+ with patch.dict(zypper.__salt__, {"cmd.run_all": run_all_mock}):
+ self.assertRaises(CommandExecutionError, zypper.search, "vim")
+ run_all_mock.assert_called_with(
+ [
+ "zypper",
+ "--non-interactive",
+ "--xmlout",
+ "--no-refresh",
+ "search",
+ "vim",
+ ],
+ success_retcodes=[104],
+ output_loglevel="trace",
+ python_shell=False,
+ env={"ZYPP_READONLY_HACK": "1"},
+ )
--
2.39.2