SHA256
1
0
forked from pool/salt

- Make auth.pam more robust with Salt Bundle and fix tests

- Added:
  * some-more-small-tests-fixes-enhancements-661.patch

OBS-URL: https://build.opensuse.org/package/show/systemsmanagement:saltstack/salt?expand=0&rev=248
This commit is contained in:
Victor Zhestkov 2024-07-09 08:30:09 +00:00 committed by Git OBS Bridge
commit 5e19306ccc
138 changed files with 45542 additions and 0 deletions

23
.gitattributes vendored Normal file
View File

@ -0,0 +1,23 @@
## Default LFS
*.7z filter=lfs diff=lfs merge=lfs -text
*.bsp filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.gem filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.jar filter=lfs diff=lfs merge=lfs -text
*.lz filter=lfs diff=lfs merge=lfs -text
*.lzma filter=lfs diff=lfs merge=lfs -text
*.obscpio filter=lfs diff=lfs merge=lfs -text
*.oxt filter=lfs diff=lfs merge=lfs -text
*.pdf filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text
*.rpm filter=lfs diff=lfs merge=lfs -text
*.tbz filter=lfs diff=lfs merge=lfs -text
*.tbz2 filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.ttf filter=lfs diff=lfs merge=lfs -text
*.txz filter=lfs diff=lfs merge=lfs -text
*.whl filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
.osc

View File

@ -0,0 +1,505 @@
From 327a5e5b24c4fa047df44b245abd672e02999cca Mon Sep 17 00:00:00 2001
From: Michael Calmer <Michael.Calmer@suse.de>
Date: Mon, 23 Jan 2023 14:33:26 +0100
Subject: [PATCH] 3005.1 implement zypper removeptf (#573)
* handle ptf packages inside of normal pkg.remove function
* add testcase for remove and removeptf
* add changelog
* adapt old tests to changed function
* Update Docs
Co-authored-by: Megan Wilhite <mwilhite@vmware.com>
---
changelog/63442.added | 1 +
salt/modules/zypperpkg.py | 38 +-
tests/pytests/unit/modules/test_zypperpkg.py | 356 ++++++++++++++++++-
tests/unit/modules/test_zypperpkg.py | 1 +
4 files changed, 394 insertions(+), 2 deletions(-)
create mode 100644 changelog/63442.added
diff --git a/changelog/63442.added b/changelog/63442.added
new file mode 100644
index 0000000000..ad81b2f9d5
--- /dev/null
+++ b/changelog/63442.added
@@ -0,0 +1 @@
+implement removal of ptf packages in zypper pkg module
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 051f8f72c7..44f2cdbd3a 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -2073,17 +2073,21 @@ def _uninstall(inclusion_detection, name=None, pkgs=None, root=None):
except MinionError as exc:
raise CommandExecutionError(exc)
+ ptfpackages = _find_ptf_packages(pkg_params.keys(), root=root)
includes = _detect_includes(pkg_params.keys(), inclusion_detection)
old = list_pkgs(root=root, includes=includes)
targets = []
for target in pkg_params:
+ if target in ptfpackages:
+ # ptfpackages needs special handling
+ continue
# Check if package version set to be removed is actually installed:
# old[target] contains a comma-separated list of installed versions
if target in old and pkg_params[target] in old[target].split(","):
targets.append(target + "-" + pkg_params[target])
elif target in old and not pkg_params[target]:
targets.append(target)
- if not targets:
+ if not targets and not ptfpackages:
return {}
systemd_scope = _systemd_scope()
@@ -2095,6 +2099,13 @@ def _uninstall(inclusion_detection, name=None, pkgs=None, root=None):
)
targets = targets[500:]
+ # handle ptf packages
+ while ptfpackages:
+ __zypper__(systemd_scope=systemd_scope, root=root).call(
+ "removeptf", "--allow-downgrade", *ptfpackages[:500]
+ )
+ ptfpackages = ptfpackages[500:]
+
_clean_cache()
new = list_pkgs(root=root, includes=includes)
ret = salt.utils.data.compare_dicts(old, new)
@@ -2183,6 +2194,11 @@ def remove(
salt '*' pkg.remove <package name>
salt '*' pkg.remove <package1>,<package2>,<package3>
salt '*' pkg.remove pkgs='["foo", "bar"]'
+
+ .. versionchanged:: 3007
+ Can now remove also PTF packages which require a different handling in the backend.
+
+ Can now remove also PTF packages which require a different handling in the backend.
"""
return _uninstall(inclusion_detection, name=name, pkgs=pkgs, root=root)
@@ -2658,6 +2674,26 @@ def _get_visible_patterns(root=None):
return patterns
+def _find_ptf_packages(pkgs, root=None):
+ """
+ Find ptf packages in "pkgs" and return them as list
+ """
+ ptfs = []
+ cmd = ["rpm"]
+ if root:
+ cmd.extend(["--root", root])
+ cmd.extend(["-q", "--qf", "%{NAME}: [%{PROVIDES} ]\n"])
+ cmd.extend(pkgs)
+ output = __salt__["cmd.run"](cmd)
+ for line in output.splitlines():
+ if not line.strip():
+ continue
+ pkg, provides = line.split(":", 1)
+ if "ptf()" in provides:
+ ptfs.append(pkg)
+ return ptfs
+
+
def _get_installed_patterns(root=None):
"""
List all installed patterns.
diff --git a/tests/pytests/unit/modules/test_zypperpkg.py b/tests/pytests/unit/modules/test_zypperpkg.py
index 91132b7277..c996662e1c 100644
--- a/tests/pytests/unit/modules/test_zypperpkg.py
+++ b/tests/pytests/unit/modules/test_zypperpkg.py
@@ -11,7 +11,7 @@ import pytest
import salt.modules.pkg_resource as pkg_resource
import salt.modules.zypperpkg as zypper
from salt.exceptions import CommandExecutionError, SaltInvocationError
-from tests.support.mock import MagicMock, mock_open, patch
+from tests.support.mock import MagicMock, mock_open, call, patch
@pytest.fixture
@@ -27,6 +27,11 @@ def configure_loader_modules():
}
+@pytest.fixture(autouse=True)
+def fresh_zypper_instance():
+ zypper.__zypper__ = zypper._Zypper()
+
+
def test_list_pkgs_no_context():
"""
Test packages listing.
@@ -395,3 +400,352 @@ def test_del_repo_key():
with patch.dict(zypper.__salt__, salt_mock):
assert zypper.del_repo_key(keyid="keyid", root="/mnt")
salt_mock["lowpkg.remove_gpg_key"].assert_called_once_with("keyid", "/mnt")
+
+@pytest.mark.parametrize(
+ "zypper_version,lowpkg_version_cmp,expected_inst_avc,expected_dup_avc",
+ [
+ ("0.5", [-1, -1], False, False),
+ ("1.11.34", [0, -1], False, True),
+ ("1.14.8", [0, 0], True, True),
+ ],
+)
+def test_refresh_zypper_flags(
+ zypper_version, lowpkg_version_cmp, expected_inst_avc, expected_dup_avc
+):
+ with patch(
+ "salt.modules.zypperpkg.version", MagicMock(return_value=zypper_version)
+ ), patch.dict(
+ zypper.__salt__,
+ {"lowpkg.version_cmp": MagicMock(side_effect=lowpkg_version_cmp)},
+ ):
+ _zypper = zypper._Zypper()
+ _zypper.refresh_zypper_flags()
+ assert _zypper.inst_avc == expected_inst_avc
+ assert _zypper.dup_avc == expected_dup_avc
+
+
+@pytest.mark.parametrize(
+ "inst_avc,dup_avc,avc,allowvendorchange_param,novendorchange_param,expected",
+ [
+ # inst_avc = True, dup_avc = True
+ (True, True, False, False, False, True),
+ (True, True, False, True, False, True),
+ (True, True, False, False, True, False),
+ (True, True, False, True, True, True),
+ # inst_avc = False, dup_avc = True
+ (False, True, False, False, False, True),
+ (False, True, False, True, False, True),
+ (False, True, False, False, True, False),
+ (False, True, False, True, True, True),
+ # inst_avc = False, dup_avc = False
+ (False, False, False, False, False, False),
+ (False, False, False, True, False, False),
+ (False, False, False, False, True, False),
+ (False, False, False, True, True, False),
+ ],
+)
+@patch("salt.modules.zypperpkg._Zypper.refresh_zypper_flags", MagicMock())
+def test_allow_vendor_change(
+ inst_avc,
+ dup_avc,
+ avc,
+ allowvendorchange_param,
+ novendorchange_param,
+ expected,
+):
+ _zypper = zypper._Zypper()
+ _zypper.inst_avc = inst_avc
+ _zypper.dup_avc = dup_avc
+ _zypper.avc = avc
+ _zypper.allow_vendor_change(allowvendorchange_param, novendorchange_param)
+ assert _zypper.avc == expected
+
+
+@pytest.mark.parametrize(
+ "package,pre_version,post_version,fromrepo_param,name_param,pkgs_param,diff_attr_param",
+ [
+ ("vim", "1.1", "1.2", [], "", [], "all"),
+ ("kernel-default", "1.1", "1.1,1.2", ["dummy", "dummy2"], "", [], None),
+ ("vim", "1.1", "1.2", [], "vim", [], None),
+ ],
+)
+@patch.object(zypper, "refresh_db", MagicMock(return_value=True))
+def test_upgrade(
+ package,
+ pre_version,
+ post_version,
+ fromrepo_param,
+ name_param,
+ pkgs_param,
+ diff_attr_param,
+):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call"
+ ) as zypper_mock, patch.object(
+ zypper,
+ "list_pkgs",
+ MagicMock(side_effect=[{package: pre_version}, {package: post_version}]),
+ ) as list_pkgs_mock:
+ expected_call = ["update", "--auto-agree-with-licenses"]
+ for repo in fromrepo_param:
+ expected_call.extend(["--repo", repo])
+
+ if pkgs_param:
+ expected_call.extend(pkgs_param)
+ elif name_param:
+ expected_call.append(name_param)
+
+ result = zypper.upgrade(
+ name=name_param,
+ pkgs=pkgs_param,
+ fromrepo=fromrepo_param,
+ diff_attr=diff_attr_param,
+ )
+ zypper_mock.assert_any_call(*expected_call)
+ assert result == {package: {"old": pre_version, "new": post_version}}
+ list_pkgs_mock.assert_any_call(root=None, attr=diff_attr_param)
+
+
+@pytest.mark.parametrize(
+ "package,pre_version,post_version,fromrepo_param",
+ [
+ ("vim", "1.1", "1.2", []),
+ ("emacs", "1.1", "1.2", ["Dummy", "Dummy2"]),
+ ],
+)
+@patch.object(zypper, "refresh_db", MagicMock(return_value=True))
+def test_dist_upgrade(package, pre_version, post_version, fromrepo_param):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call"
+ ) as zypper_mock, patch.object(
+ zypper,
+ "list_pkgs",
+ MagicMock(side_effect=[{package: pre_version}, {package: post_version}]),
+ ):
+ expected_call = ["dist-upgrade", "--auto-agree-with-licenses"]
+
+ for repo in fromrepo_param:
+ expected_call.extend(["--from", repo])
+
+ result = zypper.upgrade(dist_upgrade=True, fromrepo=fromrepo_param)
+ zypper_mock.assert_any_call(*expected_call)
+ assert result == {package: {"old": pre_version, "new": post_version}}
+
+
+@pytest.mark.parametrize(
+ "package,pre_version,post_version,dup_avc,novendorchange_param,allowvendorchange_param,vendor_change",
+ [
+ # dup_avc = True, both params = default -> no vendor change
+ ("vim", "1.1", "1.2", True, True, False, False),
+ # dup_avc = True, allowvendorchange = True -> vendor change
+ (
+ "emacs",
+ "1.1",
+ "1.2",
+ True,
+ True,
+ True,
+ True,
+ ),
+ # dup_avc = True, novendorchange = False -> vendor change
+ ("joe", "1.1", "1.2", True, False, False, True),
+ # dup_avc = True, both params = toggled -> vendor change
+ ("kate", "1.1", "1.2", True, False, True, True),
+ # dup_avc = False -> no vendor change
+ (
+ "gedit",
+ "1.1",
+ "1.2",
+ False,
+ False,
+ True,
+ False
+ ),
+ ],
+)
+@patch.object(zypper, "refresh_db", MagicMock(return_value=True))
+def test_dist_upgrade_vendorchange(
+ package,
+ pre_version,
+ post_version,
+ dup_avc,
+ novendorchange_param,
+ allowvendorchange_param,
+ vendor_change
+):
+ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None})
+ with patch.object(
+ zypper,
+ "list_pkgs",
+ MagicMock(side_effect=[{package: pre_version}, {package: post_version}]),
+ ), patch("salt.modules.zypperpkg.__zypper__.refresh_zypper_flags",), patch.dict(
+ zypper.__salt__, {"cmd.run_all": cmd_run_mock}
+ ):
+ expected_cmd = ["zypper", "--non-interactive", "--no-refresh", "dist-upgrade"]
+ # --allow-vendor-change is injected right after "dist-upgrade"
+ if vendor_change:
+ expected_cmd.append("--allow-vendor-change")
+ expected_cmd.append("--auto-agree-with-licenses")
+
+ zypper.__zypper__.dup_avc = dup_avc
+ zypper.upgrade(
+ dist_upgrade=True,
+ allowvendorchange=allowvendorchange_param,
+ novendorchange=novendorchange_param,
+ )
+ cmd_run_mock.assert_any_call(
+ expected_cmd, output_loglevel="trace", python_shell=False, env={}
+ )
+
+
+@pytest.mark.parametrize(
+ "package,pre_version,post_version,fromrepo_param",
+ [
+ ("vim", "1.1", "1.1", []),
+ ("emacs", "1.1", "1.1", ["Dummy", "Dummy2"]),
+ ],
+)
+@patch.object(zypper, "refresh_db", MagicMock(return_value=True))
+def test_dist_upgrade_dry_run(package, pre_version, post_version, fromrepo_param):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call"
+ ) as zypper_mock, patch.object(
+ zypper,
+ "list_pkgs",
+ MagicMock(side_effect=[{package: pre_version}, {package: post_version}]),
+ ):
+ expected_call = ["dist-upgrade", "--auto-agree-with-licenses", "--dry-run"]
+
+ for repo in fromrepo_param:
+ expected_call.extend(["--from", repo])
+
+ zypper.upgrade(dist_upgrade=True, dryrun=True, fromrepo=fromrepo_param)
+ zypper_mock.assert_any_call(*expected_call)
+ # dryrun=True causes two calls, one with a trailing --debug-solver flag
+ expected_call.append("--debug-solver")
+ zypper_mock.assert_any_call(*expected_call)
+
+
+@patch.object(zypper, "refresh_db", MagicMock(return_value=True))
+def test_dist_upgrade_failure():
+ zypper_output = textwrap.dedent(
+ """\
+ Loading repository data...
+ Reading installed packages...
+ Computing distribution upgrade...
+ Use 'zypper repos' to get the list of defined repositories.
+ Repository 'DUMMY' not found by its alias, number, or URI.
+ """
+ )
+ call_spy = MagicMock()
+ zypper_mock = MagicMock()
+ zypper_mock.stdout = zypper_output
+ zypper_mock.stderr = ""
+ zypper_mock.exit_code = 3
+ zypper_mock.noraise.call = call_spy
+ with patch("salt.modules.zypperpkg.__zypper__", zypper_mock), patch.object(
+ zypper, "list_pkgs", MagicMock(side_effect=[{"vim": 1.1}, {"vim": 1.1}])
+ ):
+ expected_call = [
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--from",
+ "Dummy",
+ ]
+
+ with pytest.raises(CommandExecutionError) as exc:
+ zypper.upgrade(dist_upgrade=True, fromrepo=["Dummy"])
+ call_spy.assert_called_with(*expected_call)
+
+ assert exc.exception.info["changes"] == {}
+ assert exc.exception.info["result"]["stdout"] == zypper_output
+
+
+def test_remove_multiple_pkgs_with_ptf():
+ call_spy = MagicMock()
+ zypper_mock = MagicMock()
+ zypper_mock.stdout = ""
+ zypper_mock.stderr = ""
+ zypper_mock.exit_code = 0
+ zypper_mock.call = call_spy
+
+ rpm_output = textwrap.dedent(
+ """
+ vim: vi vim vim(x86-64) vim-base vim-enhanced vim-python vim_client
+ ptf-12345: ptf() ptf-12345
+ """
+ )
+ rpm_mock = MagicMock(side_effect=[rpm_output])
+
+ with patch(
+ "salt.modules.zypperpkg.__zypper__", MagicMock(return_value=zypper_mock)
+ ), patch.object(
+ zypper,
+ "list_pkgs",
+ MagicMock(side_effect=[{"vim": "0.18.0", "ptf-12345": "1"}, {}]),
+ ), patch.dict(
+ zypper.__salt__, {"cmd.run": rpm_mock}
+ ):
+ expected_calls = [
+ call(
+ "remove",
+ "vim",
+ ),
+ call(
+ "removeptf",
+ "--allow-downgrade",
+ "ptf-12345",
+ ),
+ ]
+
+ result = zypper.remove(name="vim,ptf-12345")
+ call_spy.assert_has_calls(expected_calls, any_order=False)
+ assert result["vim"]["new"] == "", result
+ assert result["vim"]["old"] == "0.18.0", result
+ assert result["ptf-12345"]["new"] == "", result
+ assert result["ptf-12345"]["old"] == "1", result
+
+
+def test_remove_ptf():
+ call_spy = MagicMock()
+ zypper_mock = MagicMock()
+ zypper_mock.stdout = ""
+ zypper_mock.stderr = ""
+ zypper_mock.exit_code = 0
+ zypper_mock.call = call_spy
+
+ rpm_mock = MagicMock(
+ side_effect=[
+ "vim: vi vim vim(x86-64) vim-base vim-enhanced vim-python vim_client",
+ "ptf-12345: ptf() ptf-12345",
+ ]
+ )
+
+ with patch(
+ "salt.modules.zypperpkg.__zypper__", MagicMock(return_value=zypper_mock)
+ ), patch.object(
+ zypper,
+ "list_pkgs",
+ MagicMock(side_effect=[{"vim": "0.18.0"}, {}, {"ptf-12345": "1"}, {}]),
+ ), patch.dict(
+ zypper.__salt__, {"cmd.run": rpm_mock}
+ ):
+ expected_call_vim = [
+ "remove",
+ "vim",
+ ]
+ expected_call_ptf = [
+ "removeptf",
+ "--allow-downgrade",
+ "ptf-12345",
+ ]
+
+ result = zypper.remove(name="vim")
+ call_spy.assert_called_with(*expected_call_vim)
+ assert result["vim"]["new"] == "", result
+ assert result["vim"]["old"] == "0.18.0", result
+
+ result = zypper.remove(name="ptf-12345")
+ call_spy.assert_called_with(*expected_call_ptf)
+ assert result["ptf-12345"]["new"] == "", result
+ assert result["ptf-12345"]["old"] == "1", result
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index f5b6d74b6f..6e5ca88895 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -1953,6 +1953,7 @@ Repository 'DUMMY' not found by its alias, number, or URI.
# If config.get starts being used elsewhere, we'll need to write a
# side_effect function.
patches = {
+ "cmd.run": MagicMock(return_value="vim: vi vim\npico: pico"),
"cmd.run_all": MagicMock(return_value=cmd_out),
"pkg_resource.parse_targets": MagicMock(return_value=parsed_targets),
"pkg_resource.stringify": MagicMock(),
--
2.39.2

View File

@ -0,0 +1,71 @@
From 40a57afc65e71835127a437248ed655404cff0e8 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 27 Jun 2023 11:24:39 +0100
Subject: [PATCH] 3006.0: Prevent _pygit2.GitError: error loading
known_hosts when $HOME is not set (bsc#1210994) (#588)
* Prevent _pygit2.GitError: error loading known_hosts when $HOME is not set
* Add unit test to cover case of unset home
---
salt/utils/gitfs.py | 5 +++++
tests/unit/utils/test_gitfs.py | 14 ++++++++++++++
2 files changed, 19 insertions(+)
diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py
index cc9895d8ab..38e84f38aa 100644
--- a/salt/utils/gitfs.py
+++ b/salt/utils/gitfs.py
@@ -34,6 +34,7 @@ import salt.utils.stringutils
import salt.utils.url
import salt.utils.user
import salt.utils.versions
+import salt.syspaths
from salt.config import DEFAULT_MASTER_OPTS as _DEFAULT_MASTER_OPTS
from salt.exceptions import FileserverConfigError, GitLockError, get_error_message
from salt.utils.event import tagify
@@ -1867,6 +1868,10 @@ class Pygit2(GitProvider):
# pruning only available in pygit2 >= 0.26.2
pass
try:
+ # Make sure $HOME env variable is set to prevent
+ # _pygit2.GitError: error loading known_hosts in some libgit2 versions.
+ if "HOME" not in os.environ:
+ os.environ["HOME"] = salt.syspaths.HOME_DIR
fetch_results = origin.fetch(**fetch_kwargs)
except GitError as exc: # pylint: disable=broad-except
exc_str = get_error_message(exc).lower()
diff --git a/tests/unit/utils/test_gitfs.py b/tests/unit/utils/test_gitfs.py
index b99da3ef91..7c400b69af 100644
--- a/tests/unit/utils/test_gitfs.py
+++ b/tests/unit/utils/test_gitfs.py
@@ -14,6 +14,7 @@ import salt.utils.gitfs
import salt.utils.platform
import tests.support.paths
from salt.exceptions import FileserverConfigError
+from tests.support.helpers import patched_environ
from tests.support.mixins import AdaptedConfigurationTestCaseMixin
from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase
@@ -335,3 +336,16 @@ class TestPygit2(TestCase):
self.assertIn(provider.cachedir, provider.checkout())
provider.branch = "does_not_exist"
self.assertIsNone(provider.checkout())
+
+ def test_checkout_with_home_env_unset(self):
+ remote = os.path.join(tests.support.paths.TMP, "pygit2-repo")
+ cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache")
+ self._prepare_remote_repository(remote)
+ provider = self._prepare_cache_repository(remote, cache)
+ provider.remotecallbacks = None
+ provider.credentials = None
+ with patched_environ(__cleanup__=["HOME"]):
+ self.assertTrue("HOME" not in os.environ)
+ provider.init_remote()
+ provider.fetch()
+ self.assertTrue("HOME" in os.environ)
--
2.41.0

31
README.SUSE Normal file
View File

@ -0,0 +1,31 @@
Salt-master as non-root user
============================
With this version of salt the salt-master will run as salt user.
Why an extra user
=================
While the current setup runs the master as root user, this is considered a security issue
and not in line with the other configuration management tools (eg. puppet) which runs as a
dedicated user.
How can I undo the change
=========================
If you would like to make the change before you can do the following steps manually:
1. change the user parameter in the master configuration
user: root
2. update the file permissions:
as root: chown -R root /etc/salt /var/cache/salt /var/log/salt /var/run/salt
3. restart the salt-master daemon:
as root: rcsalt-master restart or systemctl restart salt-master
NOTE
====
Running the salt-master daemon as a root user is considers by some a security risk, but
running as root, enables the pam external auth system, as this system needs root access to check authentication.
For more information:
http://docs.saltstack.com/en/latest/ref/configuration/nonroot.html

1
_lastrevision Normal file
View File

@ -0,0 +1 @@
9a488902bc0653a10dba209ad9150d2939215954

3
_multibuild Normal file
View File

@ -0,0 +1,3 @@
<multibuild>
<flavor>testsuite</flavor>
</multibuild>

20
_service Normal file
View File

@ -0,0 +1,20 @@
<services>
<service name="tar_scm" mode="disabled">
<param name="url">https://github.com/openSUSE/salt-packaging.git</param>
<param name="subdir">salt</param>
<param name="filename">package</param>
<param name="revision">release/3006.0</param>
<param name="scm">git</param>
</service>
<service name="extract_file" mode="disabled">
<param name="archive">*package*.tar</param>
<param name="files">*/*</param>
</service>
<service name="download_url" mode="disabled">
<param name="host">codeload.github.com</param>
<param name="path">openSUSE/salt/tar.gz/v3006.0-suse</param>
<param name="filename">v3006.0.tar.gz</param>
</service>
<service name="update_changelog" mode="disabled"></service>
</services>

View File

@ -0,0 +1,28 @@
From f2938966bd1fcb46df0f202f5a86729ab190565a Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 17 Oct 2017 16:52:33 +0200
Subject: [PATCH] Activate all beacons sources: config/pillar/grains
---
salt/minion.py | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/salt/minion.py b/salt/minion.py
index 6237fcc4b7..2f905e4a4f 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -503,9 +503,7 @@ class MinionBase:
the pillar or grains changed
"""
if "config.merge" in functions:
- b_conf = functions["config.merge"](
- "beacons", self.opts["beacons"], omit_opts=True
- )
+ b_conf = functions["config.merge"]("beacons", self.opts["beacons"])
if b_conf:
return self.beacons.process(
b_conf, self.opts["grains"]
--
2.39.2

View File

@ -0,0 +1,30 @@
From 311d4e320527158b6ff88604b45e15f0dc2bfa62 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 18 Jan 2022 12:59:43 +0100
Subject: [PATCH] Add custom SUSE capabilities as Grains
Add new custom SUSE capability for saltutil state module
---
salt/grains/extra.py | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/salt/grains/extra.py b/salt/grains/extra.py
index 300052f1ee..f2504dbf19 100644
--- a/salt/grains/extra.py
+++ b/salt/grains/extra.py
@@ -96,3 +96,11 @@ def uefi():
def transactional():
"""Determine if the system is transactional."""
return {"transactional": bool(salt.utils.path.which("transactional-update"))}
+
+
+def suse_backported_capabilities():
+ return {
+ '__suse_reserved_pkg_all_versions_support': True,
+ '__suse_reserved_pkg_patches_support': True,
+ '__suse_reserved_saltutil_states_support': True
+ }
--
2.39.2

View File

@ -0,0 +1,83 @@
From d7682d1bc67ccdd63022c63b2d3229f8ab40d52b Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 18 Jan 2022 12:57:21 +0100
Subject: [PATCH] Add environment variable to know if yum is invoked from
Salt(bsc#1057635)
---
salt/modules/yumpkg.py | 23 +++++++++++++++++------
1 file changed, 17 insertions(+), 6 deletions(-)
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index 4d0070f21a..b362d30bf4 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -964,7 +964,9 @@ def list_repo_pkgs(*args, **kwargs):
None
if _yum() != "yum"
else LooseVersion(
- __salt__["cmd.run"](["yum", "--version"], python_shell=False)
+ __salt__["cmd.run"](
+ ["yum", "--version"], python_shell=False, env={"SALT_RUNNING": "1"}
+ )
.splitlines()[0]
.strip()
)
@@ -2474,7 +2476,9 @@ def list_holds(pattern=__HOLD_PATTERN, full=True):
"""
_check_versionlock()
- out = __salt__["cmd.run"]([_yum(), "versionlock", "list"], python_shell=False)
+ out = __salt__["cmd.run"](
+ [_yum(), "versionlock", "list"], python_shell=False, env={"SALT_RUNNING": "1"}
+ )
ret = []
for line in salt.utils.itertools.split(out, "\n"):
match = _get_hold(line, pattern=pattern, full=full)
@@ -2542,7 +2546,10 @@ def group_list():
}
out = __salt__["cmd.run_stdout"](
- [_yum(), "grouplist", "hidden"], output_loglevel="trace", python_shell=False
+ [_yum(), "grouplist", "hidden"],
+ output_loglevel="trace",
+ python_shell=False,
+ env={"SALT_RUNNING": "1"},
)
key = None
for line in salt.utils.itertools.split(out, "\n"):
@@ -2613,7 +2620,9 @@ def group_info(name, expand=False, ignore_groups=None):
ret[pkgtype] = set()
cmd = [_yum(), "--quiet", "groupinfo", name]
- out = __salt__["cmd.run_stdout"](cmd, output_loglevel="trace", python_shell=False)
+ out = __salt__["cmd.run_stdout"](
+ cmd, output_loglevel="trace", python_shell=False, env={"SALT_RUNNING": "1"}
+ )
g_info = {}
for line in salt.utils.itertools.split(out, "\n"):
@@ -3342,7 +3351,9 @@ def download(*packages, **kwargs):
cmd = ["yumdownloader", "-q", "--destdir={}".format(CACHE_DIR)]
cmd.extend(packages)
- __salt__["cmd.run"](cmd, output_loglevel="trace", python_shell=False)
+ __salt__["cmd.run"](
+ cmd, output_loglevel="trace", python_shell=False, env={"SALT_RUNNING": "1"}
+ )
ret = {}
for dld_result in os.listdir(CACHE_DIR):
if not dld_result.endswith(".rpm"):
@@ -3418,7 +3429,7 @@ def _get_patches(installed_only=False):
patches = {}
cmd = [_yum(), "--quiet", "updateinfo", "list", "all"]
- ret = __salt__["cmd.run_stdout"](cmd, python_shell=False)
+ ret = __salt__["cmd.run_stdout"](cmd, python_shell=False, env={"SALT_RUNNING": "1"})
parsing_errors = False
for line in salt.utils.itertools.split(ret, os.linesep):
--
2.39.2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,38 @@
From 1a5716365e0c3b8d290759847f4046f28ee4b79f Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 15 May 2024 09:53:20 +0200
Subject: [PATCH] Add missing contextvars dependency in salt.version
---
salt/version.py | 1 +
tests/unit/states/test_pip_state.py | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/salt/version.py b/salt/version.py
index 44372830b2..b2643550e9 100644
--- a/salt/version.py
+++ b/salt/version.py
@@ -717,6 +717,7 @@ def dependency_information(include_salt_cloud=False):
("docker-py", "docker", "__version__"),
("packaging", "packaging", "__version__"),
("looseversion", "looseversion", None),
+ ("contextvars", "contextvars", None),
("relenv", "relenv", "__version__"),
]
diff --git a/tests/unit/states/test_pip_state.py b/tests/unit/states/test_pip_state.py
index d70b115000..fe5d171a15 100644
--- a/tests/unit/states/test_pip_state.py
+++ b/tests/unit/states/test_pip_state.py
@@ -419,7 +419,7 @@ class PipStateInstallationErrorTest(TestCase):
def test_importable_installation_error(self):
extra_requirements = []
for name, version in salt.version.dependency_information():
- if name in ["PyYAML", "packaging", "looseversion"]:
+ if name in ["PyYAML", "packaging", "looseversion", "contextvars"]:
extra_requirements.append("{}=={}".format(name, version))
failures = {}
pip_version_requirements = [
--
2.45.0

View File

@ -0,0 +1,26 @@
From 3ef2071daf7a415f2c43e1339affe2b7cad93b3e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 28 May 2020 09:37:08 +0100
Subject: [PATCH] Add publish_batch to ClearFuncs exposed methods
---
salt/master.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/salt/master.py b/salt/master.py
index 2a526b4f21..a0552fa232 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -1960,6 +1960,7 @@ class ClearFuncs(TransportMethods):
expose_methods = (
"ping",
"publish",
+ "publish_batch",
"get_token",
"mk_token",
"wheel",
--
2.39.2

View File

@ -0,0 +1,795 @@
From 3fd6c0c6793632c819fb5f8fb3b3538463eaaccc Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Thu, 24 Feb 2022 16:52:24 +0300
Subject: [PATCH] Add salt-ssh support with venv-salt-minion - 3004
(#493)
* Add salt-ssh support with venv-salt-minion
* Add some comments and drop the commented line
* Fix return in check_venv_hash_file
* Convert all script parameters to strings
* Reduce the size of minion response
Minion response contains SSH_PY_CODE wrapped to base64.
This fix reduces the size of the response in DEBUG logging
* Make VENV_HASH_FILE global
* Pass the context to roster modules
* Avoid race condition on loading roster modules
* Prevent simultaneous to salt-ssh minion
* Make ssh session grace time configurable
* Prevent possible segfault by GC
* Revert "Avoid race condition on loading roster modules"
This reverts commit 8ff822a162cc494d3528184aef983ad20e09f4e2.
* Prevent deadlocks with importlib on using LazyLoader
* Make logging on salt-ssh errors more informative
* Add comments about using salt.loader.LOAD_LOCK
* Fix test_loader test
* Prevent deadlocks on using logging
* Use collections.deque instead of list for salt-ssh
Suggested by @agraul
* Get proper exitstatus from salt.utils.vt.Terminal
to prevent empty event returns due to improperly detecting
the child process as failed
* Do not run pre flight script for raw_shell
---
salt/_logging/impl.py | 55 +++++++-----
salt/client/ssh/__init__.py | 157 ++++++++++++++++++++++++++++-----
salt/client/ssh/client.py | 7 +-
salt/client/ssh/shell.py | 8 ++
salt/client/ssh/ssh_py_shim.py | 108 +++++++++++++----------
salt/loader/__init__.py | 31 ++++++-
salt/netapi/__init__.py | 3 +-
salt/roster/__init__.py | 6 +-
tests/unit/test_loader.py | 2 +-
9 files changed, 278 insertions(+), 99 deletions(-)
diff --git a/salt/_logging/impl.py b/salt/_logging/impl.py
index cc18f49a9e..e050f43caf 100644
--- a/salt/_logging/impl.py
+++ b/salt/_logging/impl.py
@@ -14,6 +14,7 @@ import re
import socket
import sys
import traceback
+import threading
import types
import urllib.parse
@@ -104,6 +105,10 @@ DFLT_LOG_DATEFMT_LOGFILE = "%Y-%m-%d %H:%M:%S"
DFLT_LOG_FMT_CONSOLE = "[%(levelname)-8s] %(message)s"
DFLT_LOG_FMT_LOGFILE = "%(asctime)s,%(msecs)03d [%(name)-17s:%(lineno)-4d][%(levelname)-8s][%(process)d] %(message)s"
+# LOG_LOCK is used to prevent deadlocks on using logging
+# in combination with multiprocessing with salt-api
+LOG_LOCK = threading.Lock()
+
class SaltLogRecord(logging.LogRecord):
def __init__(self, *args, **kwargs):
@@ -270,27 +275,35 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta):
else:
extra["exc_info_on_loglevel"] = exc_info_on_loglevel
- if sys.version_info < (3, 8):
- LOGGING_LOGGER_CLASS._log(
- self,
- level,
- msg,
- args,
- exc_info=exc_info,
- extra=extra,
- stack_info=stack_info,
- )
- else:
- LOGGING_LOGGER_CLASS._log(
- self,
- level,
- msg,
- args,
- exc_info=exc_info,
- extra=extra,
- stack_info=stack_info,
- stacklevel=stacklevel,
- )
+ try:
+ LOG_LOCK.acquire()
+ if sys.version_info < (3,):
+ LOGGING_LOGGER_CLASS._log(
+ self, level, msg, args, exc_info=exc_info, extra=extra
+ )
+ elif sys.version_info < (3, 8):
+ LOGGING_LOGGER_CLASS._log(
+ self,
+ level,
+ msg,
+ args,
+ exc_info=exc_info,
+ extra=extra,
+ stack_info=stack_info,
+ )
+ else:
+ LOGGING_LOGGER_CLASS._log(
+ self,
+ level,
+ msg,
+ args,
+ exc_info=exc_info,
+ extra=extra,
+ stack_info=stack_info,
+ stacklevel=stacklevel,
+ )
+ finally:
+ LOG_LOCK.release()
def makeRecord(
self,
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index 19089ce8ad..e6837df4e5 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -6,11 +6,13 @@ import base64
import binascii
import copy
import datetime
+import gc
import getpass
import hashlib
import logging
import multiprocessing
import os
+import psutil
import queue
import re
import shlex
@@ -20,6 +22,7 @@ import tarfile
import tempfile
import time
import uuid
+from collections import deque
import salt.client.ssh.shell
import salt.client.ssh.wrapper
@@ -47,6 +50,7 @@ import salt.utils.url
import salt.utils.verify
from salt._logging import LOG_LEVELS
from salt._logging.mixins import MultiprocessingStateMixin
+from salt._logging.impl import LOG_LOCK
from salt.template import compile_template
from salt.utils.process import Process
from salt.utils.zeromq import zmq
@@ -146,15 +150,26 @@ if [ "$SUDO" ] && [ "$SUDO_USER" ]
then SUDO="$SUDO -u $SUDO_USER"
fi
EX_PYTHON_INVALID={EX_THIN_PYTHON_INVALID}
-PYTHON_CMDS="python3 /usr/libexec/platform-python python27 python2.7 python26 python2.6 python2 python"
+set +x
+SSH_PY_CODE='import base64;
+ exec(base64.b64decode("""{{SSH_PY_CODE}}""").decode("utf-8"))'
+if [ -n "$DEBUG" ]
+ then set -x
+fi
+PYTHON_CMDS="/var/tmp/venv-salt-minion/bin/python python3 /usr/libexec/platform-python python27 python2.7 python26 python2.6 python2 python"
for py_cmd in $PYTHON_CMDS
do
if command -v "$py_cmd" >/dev/null 2>&1 && "$py_cmd" -c "import sys; sys.exit(not (sys.version_info >= (2, 6)));"
then
py_cmd_path=`"$py_cmd" -c 'from __future__ import print_function;import sys; print(sys.executable);'`
cmdpath=`command -v $py_cmd 2>/dev/null || which $py_cmd 2>/dev/null`
+ cmdpath=`readlink -f $cmdpath`
if file $cmdpath | grep "shell script" > /dev/null
then
+ if echo $cmdpath | grep venv-salt-minion > /dev/null
+ then
+ exec $SUDO "$cmdpath" -c "$SSH_PY_CODE"
+ fi
ex_vars="'PATH', 'LD_LIBRARY_PATH', 'MANPATH', \
'XDG_DATA_DIRS', 'PKG_CONFIG_PATH'"
export `$py_cmd -c \
@@ -166,13 +181,9 @@ do
exec $SUDO PATH=$PATH LD_LIBRARY_PATH=$LD_LIBRARY_PATH \
MANPATH=$MANPATH XDG_DATA_DIRS=$XDG_DATA_DIRS \
PKG_CONFIG_PATH=$PKG_CONFIG_PATH \
- "$py_cmd_path" -c \
- 'import base64;
- exec(base64.b64decode("""{{SSH_PY_CODE}}""").decode("utf-8"))'
+ "$py_cmd_path" -c "$SSH_PY_CODE"
else
- exec $SUDO "$py_cmd_path" -c \
- 'import base64;
- exec(base64.b64decode("""{{SSH_PY_CODE}}""").decode("utf-8"))'
+ exec $SUDO "$py_cmd_path" -c "$SSH_PY_CODE"
fi
exit 0
else
@@ -189,6 +200,9 @@ EOF'''.format(
]
)
+# The file on a salt-ssh minion used to identify if Salt Bundle was deployed
+VENV_HASH_FILE = "/var/tmp/venv-salt-minion/venv-hash.txt"
+
if not salt.utils.platform.is_windows() and not salt.utils.platform.is_junos():
shim_file = os.path.join(os.path.dirname(__file__), "ssh_py_shim.py")
if not os.path.exists(shim_file):
@@ -209,7 +223,7 @@ class SSH(MultiprocessingStateMixin):
ROSTER_UPDATE_FLAG = "#__needs_update"
- def __init__(self, opts):
+ def __init__(self, opts, context=None):
self.__parsed_rosters = {SSH.ROSTER_UPDATE_FLAG: True}
pull_sock = os.path.join(opts["sock_dir"], "master_event_pull.ipc")
if os.path.exists(pull_sock) and zmq:
@@ -236,7 +250,9 @@ class SSH(MultiprocessingStateMixin):
else "glob"
)
self._expand_target()
- self.roster = salt.roster.Roster(self.opts, self.opts.get("roster", "flat"))
+ self.roster = salt.roster.Roster(
+ self.opts, self.opts.get("roster", "flat"), context=context
+ )
self.targets = self.roster.targets(self.opts["tgt"], self.tgt_type)
if not self.targets:
self._update_targets()
@@ -316,6 +332,13 @@ class SSH(MultiprocessingStateMixin):
extended_cfg=self.opts.get("ssh_ext_alternatives"),
)
self.mods = mod_data(self.fsclient)
+ self.cache = salt.cache.Cache(self.opts)
+ self.master_id = self.opts["id"]
+ self.max_pid_wait = int(self.opts.get("ssh_max_pid_wait", 600))
+ self.session_flock_file = os.path.join(
+ self.opts["cachedir"], "salt-ssh.session.lock"
+ )
+ self.ssh_session_grace_time = int(self.opts.get("ssh_session_grace_time", 3))
# __setstate__ and __getstate__ are only used on spawning platforms.
def __setstate__(self, state):
@@ -546,6 +569,8 @@ class SSH(MultiprocessingStateMixin):
"""
Run the routine in a "Thread", put a dict on the queue
"""
+ LOG_LOCK.release()
+ salt.loader.LOAD_LOCK.release()
opts = copy.deepcopy(opts)
single = Single(
opts,
@@ -585,7 +610,7 @@ class SSH(MultiprocessingStateMixin):
"""
que = multiprocessing.Queue()
running = {}
- target_iter = self.targets.__iter__()
+ targets_queue = deque(self.targets.keys())
returned = set()
rets = set()
init = False
@@ -594,11 +619,43 @@ class SSH(MultiprocessingStateMixin):
log.error("No matching targets found in roster.")
break
if len(running) < self.opts.get("ssh_max_procs", 25) and not init:
- try:
- host = next(target_iter)
- except StopIteration:
+ if targets_queue:
+ host = targets_queue.popleft()
+ else:
init = True
continue
+ with salt.utils.files.flopen(self.session_flock_file, "w"):
+ cached_session = self.cache.fetch("salt-ssh/session", host)
+ if cached_session is not None and "ts" in cached_session:
+ prev_session_running = time.time() - cached_session["ts"]
+ if (
+ "pid" in cached_session
+ and cached_session.get("master_id", self.master_id)
+ == self.master_id
+ ):
+ pid_running = (
+ False
+ if cached_session["pid"] == 0
+ else psutil.pid_exists(cached_session["pid"])
+ )
+ if (
+ pid_running and prev_session_running < self.max_pid_wait
+ ) or (
+ not pid_running
+ and prev_session_running < self.ssh_session_grace_time
+ ):
+ targets_queue.append(host)
+ time.sleep(0.3)
+ continue
+ self.cache.store(
+ "salt-ssh/session",
+ host,
+ {
+ "pid": 0,
+ "master_id": self.master_id,
+ "ts": time.time(),
+ },
+ )
for default in self.defaults:
if default not in self.targets[host]:
self.targets[host][default] = self.defaults[default]
@@ -630,8 +687,38 @@ class SSH(MultiprocessingStateMixin):
mine,
)
routine = Process(target=self.handle_routine, args=args)
- routine.start()
+ # Explicitly call garbage collector to prevent possible segfault
+ # in salt-api child process. (bsc#1188607)
+ gc.collect()
+ try:
+ # salt.loader.LOAD_LOCK is used to prevent deadlock
+ # with importlib in combination with using multiprocessing (bsc#1182851)
+ # If the salt-api child process is creating while LazyLoader instance
+ # is loading module, new child process gets the lock for this module acquired.
+ # Touching this module with importlib inside child process leads to deadlock.
+ #
+ # salt.loader.LOAD_LOCK is used to prevent salt-api child process creation
+ # while creating new instance of LazyLoader
+ # salt.loader.LOAD_LOCK must be released explicitly in self.handle_routine
+ salt.loader.LOAD_LOCK.acquire()
+ # The same solution applied to fix logging deadlock
+ # LOG_LOCK must be released explicitly in self.handle_routine
+ LOG_LOCK.acquire()
+ routine.start()
+ finally:
+ LOG_LOCK.release()
+ salt.loader.LOAD_LOCK.release()
running[host] = {"thread": routine}
+ with salt.utils.files.flopen(self.session_flock_file, "w"):
+ self.cache.store(
+ "salt-ssh/session",
+ host,
+ {
+ "pid": routine.pid,
+ "master_id": self.master_id,
+ "ts": time.time(),
+ },
+ )
continue
ret = {}
try:
@@ -662,12 +749,27 @@ class SSH(MultiprocessingStateMixin):
)
ret = {"id": host, "ret": error}
log.error(error)
+ log.error(
+ "PID %s did not return any data for host '%s'",
+ running[host]["thread"].pid,
+ host,
+ )
yield {ret["id"]: ret["ret"]}
running[host]["thread"].join()
rets.add(host)
for host in rets:
if host in running:
running.pop(host)
+ with salt.utils.files.flopen(self.session_flock_file, "w"):
+ self.cache.store(
+ "salt-ssh/session",
+ host,
+ {
+ "pid": 0,
+ "master_id": self.master_id,
+ "ts": time.time(),
+ },
+ )
if len(rets) >= len(self.targets):
break
# Sleep when limit or all threads started
@@ -1036,14 +1138,24 @@ class Single:
return False
return True
+ def check_venv_hash_file(self):
+ """
+ check if the venv exists on the remote machine
+ """
+ stdout, stderr, retcode = self.shell.exec_cmd(
+ "test -f {}".format(VENV_HASH_FILE)
+ )
+ return retcode == 0
+
def deploy(self):
"""
Deploy salt-thin
"""
- self.shell.send(
- self.thin,
- os.path.join(self.thin_dir, "salt-thin.tgz"),
- )
+ if not self.check_venv_hash_file():
+ self.shell.send(
+ self.thin,
+ os.path.join(self.thin_dir, "salt-thin.tgz"),
+ )
self.deploy_ext()
return True
@@ -1071,8 +1183,9 @@ class Single:
Returns tuple of (stdout, stderr, retcode)
"""
stdout = stderr = retcode = None
+ raw_shell = self.opts.get("raw_shell", False)
- if self.ssh_pre_flight:
+ if self.ssh_pre_flight and not raw_shell:
if not self.opts.get("ssh_run_pre_flight", False) and self.check_thin_dir():
log.info(
"%s thin dir already exists. Not running ssh_pre_flight script",
@@ -1086,14 +1199,16 @@ class Single:
stdout, stderr, retcode = self.run_ssh_pre_flight()
if retcode != 0:
log.error(
- "Error running ssh_pre_flight script %s", self.ssh_pre_file
+ "Error running ssh_pre_flight script %s for host '%s'",
+ self.ssh_pre_file,
+ self.target["host"],
)
return stdout, stderr, retcode
log.info(
"Successfully ran the ssh_pre_flight script: %s", self.ssh_pre_file
)
- if self.opts.get("raw_shell", False):
+ if raw_shell:
cmd_str = " ".join([self._escape_arg(arg) for arg in self.argv])
stdout, stderr, retcode = self.shell.exec_cmd(cmd_str)
diff --git a/salt/client/ssh/client.py b/salt/client/ssh/client.py
index be9247cb15..0b67598fc6 100644
--- a/salt/client/ssh/client.py
+++ b/salt/client/ssh/client.py
@@ -108,7 +108,7 @@ class SSHClient:
return sane_kwargs
def _prep_ssh(
- self, tgt, fun, arg=(), timeout=None, tgt_type="glob", kwarg=None, **kwargs
+ self, tgt, fun, arg=(), timeout=None, tgt_type="glob", kwarg=None, context=None, **kwargs
):
"""
Prepare the arguments
@@ -123,7 +123,7 @@ class SSHClient:
opts["selected_target_option"] = tgt_type
opts["tgt"] = tgt
opts["arg"] = arg
- return salt.client.ssh.SSH(opts)
+ return salt.client.ssh.SSH(opts, context=context)
def cmd_iter(
self,
@@ -160,7 +160,7 @@ class SSHClient:
final.update(ret)
return final
- def cmd_sync(self, low):
+ def cmd_sync(self, low, context=None):
"""
Execute a salt-ssh call synchronously.
@@ -193,6 +193,7 @@ class SSHClient:
low.get("timeout"),
low.get("tgt_type"),
low.get("kwarg"),
+ context=context,
**kwargs
)
diff --git a/salt/client/ssh/shell.py b/salt/client/ssh/shell.py
index cfa82d13c2..bc1ad034df 100644
--- a/salt/client/ssh/shell.py
+++ b/salt/client/ssh/shell.py
@@ -464,6 +464,14 @@ class Shell:
if stdout:
old_stdout = stdout
time.sleep(0.01)
+ if term.exitstatus is None:
+ try:
+ term.wait()
+ except: # pylint: disable=broad-except
+ # It's safe to put the broad exception handling here
+ # as we just need to ensure the child process in term finished
+ # to get proper term.exitstatus instead of None
+ pass
return ret_stdout, ret_stderr, term.exitstatus
finally:
term.close(terminate=True, kill=True)
diff --git a/salt/client/ssh/ssh_py_shim.py b/salt/client/ssh/ssh_py_shim.py
index b77749f495..293ea1b7fa 100644
--- a/salt/client/ssh/ssh_py_shim.py
+++ b/salt/client/ssh/ssh_py_shim.py
@@ -279,56 +279,72 @@ def main(argv): # pylint: disable=W0613
"""
Main program body
"""
- thin_path = os.path.join(OPTIONS.saltdir, THIN_ARCHIVE)
- if os.path.isfile(thin_path):
- if OPTIONS.checksum != get_hash(thin_path, OPTIONS.hashfunc):
- need_deployment()
- unpack_thin(thin_path)
- # Salt thin now is available to use
- else:
- if not sys.platform.startswith("win"):
- scpstat = subprocess.Popen(["/bin/sh", "-c", "command -v scp"]).wait()
- if scpstat != 0:
- sys.exit(EX_SCP_NOT_FOUND)
-
- if os.path.exists(OPTIONS.saltdir) and not os.path.isdir(OPTIONS.saltdir):
- sys.stderr.write(
- 'ERROR: salt path "{0}" exists but is not a directory\n'.format(
- OPTIONS.saltdir
+
+ virt_env = os.getenv("VIRTUAL_ENV", None)
+ # VIRTUAL_ENV environment variable is defined by venv-salt-minion wrapper
+ # it's used to check if the shim is running under this wrapper
+ venv_salt_call = None
+ if virt_env and "venv-salt-minion" in virt_env:
+ venv_salt_call = os.path.join(virt_env, "bin", "salt-call")
+ if not os.path.exists(venv_salt_call):
+ venv_salt_call = None
+ elif not os.path.exists(OPTIONS.saltdir):
+ os.makedirs(OPTIONS.saltdir)
+ cache_dir = os.path.join(OPTIONS.saltdir, "running_data", "var", "cache")
+ os.makedirs(os.path.join(cache_dir, "salt"))
+ os.symlink("salt", os.path.relpath(os.path.join(cache_dir, "venv-salt-minion")))
+
+ if venv_salt_call is None:
+ # Use Salt thin only if Salt Bundle (venv-salt-minion) is not available
+ thin_path = os.path.join(OPTIONS.saltdir, THIN_ARCHIVE)
+ if os.path.isfile(thin_path):
+ if OPTIONS.checksum != get_hash(thin_path, OPTIONS.hashfunc):
+ need_deployment()
+ unpack_thin(thin_path)
+ # Salt thin now is available to use
+ else:
+ if not sys.platform.startswith("win"):
+ scpstat = subprocess.Popen(["/bin/sh", "-c", "command -v scp"]).wait()
+ if scpstat != 0:
+ sys.exit(EX_SCP_NOT_FOUND)
+
+ if os.path.exists(OPTIONS.saltdir) and not os.path.isdir(OPTIONS.saltdir):
+ sys.stderr.write(
+ 'ERROR: salt path "{0}" exists but is'
+ " not a directory\n".format(OPTIONS.saltdir)
)
- )
- sys.exit(EX_CANTCREAT)
+ sys.exit(EX_CANTCREAT)
- if not os.path.exists(OPTIONS.saltdir):
- need_deployment()
+ if not os.path.exists(OPTIONS.saltdir):
+ need_deployment()
- code_checksum_path = os.path.normpath(
- os.path.join(OPTIONS.saltdir, "code-checksum")
- )
- if not os.path.exists(code_checksum_path) or not os.path.isfile(
- code_checksum_path
- ):
- sys.stderr.write(
- "WARNING: Unable to locate current code checksum: {0}.\n".format(
- code_checksum_path
- )
+ code_checksum_path = os.path.normpath(
+ os.path.join(OPTIONS.saltdir, "code-checksum")
)
- need_deployment()
- with open(code_checksum_path, "r") as vpo:
- cur_code_cs = vpo.readline().strip()
- if cur_code_cs != OPTIONS.code_checksum:
- sys.stderr.write(
- "WARNING: current code checksum {0} is different to {1}.\n".format(
- cur_code_cs, OPTIONS.code_checksum
+ if not os.path.exists(code_checksum_path) or not os.path.isfile(
+ code_checksum_path
+ ):
+ sys.stderr.write(
+ "WARNING: Unable to locate current code checksum: {0}.\n".format(
+ code_checksum_path
+ )
)
- )
- need_deployment()
- # Salt thin exists and is up-to-date - fall through and use it
+ need_deployment()
+ with open(code_checksum_path, "r") as vpo:
+ cur_code_cs = vpo.readline().strip()
+ if cur_code_cs != OPTIONS.code_checksum:
+ sys.stderr.write(
+ "WARNING: current code checksum {0} is different to {1}.\n".format(
+ cur_code_cs, OPTIONS.code_checksum
+ )
+ )
+ need_deployment()
+ # Salt thin exists and is up-to-date - fall through and use it
- salt_call_path = os.path.join(OPTIONS.saltdir, "salt-call")
- if not os.path.isfile(salt_call_path):
- sys.stderr.write('ERROR: thin is missing "{0}"\n'.format(salt_call_path))
- need_deployment()
+ salt_call_path = os.path.join(OPTIONS.saltdir, "salt-call")
+ if not os.path.isfile(salt_call_path):
+ sys.stderr.write('ERROR: thin is missing "{0}"\n'.format(salt_call_path))
+ need_deployment()
with open(os.path.join(OPTIONS.saltdir, "minion"), "w") as config:
config.write(OPTIONS.config + "\n")
@@ -351,8 +367,8 @@ def main(argv): # pylint: disable=W0613
argv_prepared = ARGS
salt_argv = [
- get_executable(),
- salt_call_path,
+ sys.executable if venv_salt_call is not None else get_executable(),
+ venv_salt_call if venv_salt_call is not None else salt_call_path,
"--retcode-passthrough",
"--local",
"--metadata",
diff --git a/salt/loader/__init__.py b/salt/loader/__init__.py
index 72a5e54401..32f8a7702c 100644
--- a/salt/loader/__init__.py
+++ b/salt/loader/__init__.py
@@ -9,6 +9,7 @@ import inspect
import logging
import os
import re
+import threading
import time
import types
@@ -31,7 +32,7 @@ from salt.exceptions import LoaderError
from salt.template import check_render_pipe_str
from salt.utils import entrypoints
-from .lazy import SALT_BASE_PATH, FilterDictWrapper, LazyLoader
+from .lazy import SALT_BASE_PATH, FilterDictWrapper, LazyLoader as _LazyLoader
log = logging.getLogger(__name__)
@@ -81,6 +82,18 @@ SALT_INTERNAL_LOADERS_PATHS = (
str(SALT_BASE_PATH / "wheel"),
)
+LOAD_LOCK = threading.Lock()
+
+
+def LazyLoader(*args, **kwargs):
+ # This wrapper is used to prevent deadlocks with importlib (bsc#1182851)
+ # LOAD_LOCK is also used directly in salt.client.ssh.SSH
+ try:
+ LOAD_LOCK.acquire()
+ return _LazyLoader(*args, **kwargs)
+ finally:
+ LOAD_LOCK.release()
+
def static_loader(
opts,
@@ -725,7 +738,7 @@ def fileserver(opts, backends, loaded_base_name=None):
)
-def roster(opts, runner=None, utils=None, whitelist=None, loaded_base_name=None):
+def roster(opts, runner=None, utils=None, whitelist=None, loaded_base_name=None, context=None):
"""
Returns the roster modules
@@ -736,12 +749,15 @@ def roster(opts, runner=None, utils=None, whitelist=None, loaded_base_name=None)
:param str loaded_base_name: The imported modules namespace when imported
by the salt loader.
"""
+ if context is None:
+ context = {}
+
return LazyLoader(
_module_dirs(opts, "roster"),
opts,
tag="roster",
whitelist=whitelist,
- pack={"__runner__": runner, "__utils__": utils},
+ pack={"__runner__": runner, "__utils__": utils, "__context__": context},
extra_module_dirs=utils.module_dirs if utils else None,
loaded_base_name=loaded_base_name,
)
@@ -933,7 +949,14 @@ def render(
)
rend = FilterDictWrapper(ret, ".render")
- if not check_render_pipe_str(
+ def _check_render_pipe_str(pipestr, renderers, blacklist, whitelist):
+ try:
+ LOAD_LOCK.acquire()
+ return check_render_pipe_str(pipestr, renderers, blacklist, whitelist)
+ finally:
+ LOAD_LOCK.release()
+
+ if not _check_render_pipe_str(
opts["renderer"], rend, opts["renderer_blacklist"], opts["renderer_whitelist"]
):
err = (
diff --git a/salt/netapi/__init__.py b/salt/netapi/__init__.py
index a89c1a19af..8a28c48460 100644
--- a/salt/netapi/__init__.py
+++ b/salt/netapi/__init__.py
@@ -79,6 +79,7 @@ class NetapiClient:
self.loadauth = salt.auth.LoadAuth(apiopts)
self.key = salt.daemons.masterapi.access_keys(apiopts)
self.ckminions = salt.utils.minions.CkMinions(apiopts)
+ self.context = {}
def _is_master_running(self):
"""
@@ -245,7 +246,7 @@ class NetapiClient:
with salt.client.ssh.client.SSHClient(
mopts=self.opts, disable_custom_roster=True
) as client:
- return client.cmd_sync(kwargs)
+ return client.cmd_sync(kwargs, context=self.context)
def runner(self, fun, timeout=None, full_return=False, **kwargs):
"""
diff --git a/salt/roster/__init__.py b/salt/roster/__init__.py
index fc7339d785..ea23d550d7 100644
--- a/salt/roster/__init__.py
+++ b/salt/roster/__init__.py
@@ -59,7 +59,7 @@ class Roster:
minion aware
"""
- def __init__(self, opts, backends="flat"):
+ def __init__(self, opts, backends="flat", context=None):
self.opts = opts
if isinstance(backends, list):
self.backends = backends
@@ -71,7 +71,9 @@ class Roster:
self.backends = ["flat"]
utils = salt.loader.utils(self.opts)
runner = salt.loader.runner(self.opts, utils=utils)
- self.rosters = salt.loader.roster(self.opts, runner=runner, utils=utils)
+ self.rosters = salt.loader.roster(
+ self.opts, runner=runner, utils=utils, context=context
+ )
def _gen_back(self):
"""
diff --git a/tests/unit/test_loader.py b/tests/unit/test_loader.py
index cf33903320..1b616375b3 100644
--- a/tests/unit/test_loader.py
+++ b/tests/unit/test_loader.py
@@ -1697,7 +1697,7 @@ class LazyLoaderRefreshFileMappingTest(TestCase):
cls.funcs = salt.loader.minion_mods(cls.opts, utils=cls.utils, proxy=cls.proxy)
def setUp(self):
- class LazyLoaderMock(salt.loader.LazyLoader):
+ class LazyLoaderMock(salt.loader._LazyLoader):
pass
self.LOADER_CLASS = LazyLoaderMock
--
2.39.2

View File

@ -0,0 +1,41 @@
From bad9e783e1a6923d85bdb1477a2e9766887a511e Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Thu, 18 Feb 2021 14:49:38 +0300
Subject: [PATCH] Add sleep on exception handling on minion connection
attempt to the master (bsc#1174855) (#321)
* Async batch implementation fix
* Add sleep on exception handling on minion connection attempt to the master (bsc#1174855)
---
salt/minion.py | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/salt/minion.py b/salt/minion.py
index 2f905e4a4f..c3b65f16c3 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -1123,6 +1123,9 @@ class MinionManager(MinionBase):
last = 0 # never have we signed in
auth_wait = minion.opts["acceptance_wait_time"]
failed = False
+ retry_wait = 1
+ retry_wait_inc = 1
+ max_retry_wait = 20
while True:
try:
if minion.opts.get("beacons_before_connect", False):
@@ -1161,6 +1164,9 @@ class MinionManager(MinionBase):
minion.opts["master"],
exc_info=True,
)
+ yield salt.ext.tornado.gen.sleep(retry_wait)
+ if retry_wait < max_retry_wait:
+ retry_wait += retry_wait_inc
# Multi Master Tune In
def tune_in(self):
--
2.39.2

View File

@ -0,0 +1,26 @@
From 94e702e83c05814296ea8987a722b71e99117360 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 22 May 2019 13:00:46 +0100
Subject: [PATCH] Add standalone configuration file for enabling package
formulas
---
conf/suse/standalone-formulas-configuration.conf | 4 ++++
1 file changed, 4 insertions(+)
create mode 100644 conf/suse/standalone-formulas-configuration.conf
diff --git a/conf/suse/standalone-formulas-configuration.conf b/conf/suse/standalone-formulas-configuration.conf
new file mode 100644
index 0000000000..94d05fb2ee
--- /dev/null
+++ b/conf/suse/standalone-formulas-configuration.conf
@@ -0,0 +1,4 @@
+file_roots:
+ base:
+ - /usr/share/salt-formulas/states
+ - /srv/salt
--
2.39.2

View File

@ -0,0 +1,369 @@
From 2e103365c50fe42a72de3e9d57c3fdbee47454aa Mon Sep 17 00:00:00 2001
From: Michael Calmer <mc@suse.de>
Date: Fri, 8 Jul 2022 10:15:37 +0200
Subject: [PATCH] add support for gpgautoimport (#539)
* add support for gpgautoimport to refresh_db in the zypperpkg module
* call refresh_db function from mod_repo
* call refresh_db with kwargs where possible
* ignore no repos defined exit code
* fix zypperpkg test after adding more success return codes
---
salt/modules/zypperpkg.py | 47 +++++++---
tests/unit/modules/test_zypperpkg.py | 124 +++++++++++++++++++++++----
2 files changed, 140 insertions(+), 31 deletions(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 318c871b37..051f8f72c7 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -623,7 +623,7 @@ def list_upgrades(refresh=True, root=None, **kwargs):
salt '*' pkg.list_upgrades
"""
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
ret = dict()
cmd = ["list-updates"]
@@ -737,7 +737,7 @@ def info_available(*names, **kwargs):
# Refresh db before extracting the latest package
if kwargs.get("refresh", True):
- refresh_db(root)
+ refresh_db(root, **kwargs)
pkg_info = []
batch = names[:]
@@ -1439,7 +1439,6 @@ def mod_repo(repo, **kwargs):
cmd_opt.append(kwargs.get("name"))
if kwargs.get("gpgautoimport") is True:
- global_cmd_opt.append("--gpg-auto-import-keys")
call_refresh = True
if cmd_opt:
@@ -1451,8 +1450,8 @@ def mod_repo(repo, **kwargs):
# when used with "zypper ar --refresh" or "zypper mr --refresh"
# --gpg-auto-import-keys is not doing anything
# so we need to specifically refresh here with --gpg-auto-import-keys
- refresh_opts = global_cmd_opt + ["refresh"] + [repo]
- __zypper__(root=root).xml.call(*refresh_opts)
+ kwargs.update({"repos": repo})
+ refresh_db(root=root, **kwargs)
elif not added and not cmd_opt:
comment = "Specified arguments did not result in modification of repo"
@@ -1463,7 +1462,7 @@ def mod_repo(repo, **kwargs):
return repo
-def refresh_db(force=None, root=None):
+def refresh_db(force=None, root=None, **kwargs):
"""
Trigger a repository refresh by calling ``zypper refresh``. Refresh will run
with ``--force`` if the "force=True" flag is passed on the CLI or
@@ -1474,6 +1473,17 @@ def refresh_db(force=None, root=None):
{'<database name>': Bool}
+ gpgautoimport : False
+ If set to True, automatically trust and import public GPG key for
+ the repository.
+
+ .. versionadded:: 3005
+
+ repos
+ Refresh just the specified repos
+
+ .. versionadded:: 3005
+
root
operate on a different root directory.
@@ -1494,11 +1504,22 @@ def refresh_db(force=None, root=None):
salt.utils.pkg.clear_rtag(__opts__)
ret = {}
refresh_opts = ["refresh"]
+ global_opts = []
if force is None:
force = __pillar__.get("zypper", {}).get("refreshdb_force", True)
if force:
refresh_opts.append("--force")
- out = __zypper__(root=root).refreshable.call(*refresh_opts)
+ repos = kwargs.get("repos", [])
+ refresh_opts.extend([repos] if not isinstance(repos, list) else repos)
+
+ if kwargs.get("gpgautoimport", False):
+ global_opts.append("--gpg-auto-import-keys")
+
+ # We do the actual call to zypper refresh.
+ # We ignore retcode 6 which is returned when there are no repositories defined.
+ out = __zypper__(root=root).refreshable.call(
+ *global_opts, *refresh_opts, success_retcodes=[0, 6]
+ )
for line in out.splitlines():
if not line:
@@ -1683,7 +1704,7 @@ def install(
'arch': '<new-arch>'}}}
"""
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
try:
pkg_params, pkg_type = __salt__["pkg_resource.parse_targets"](
@@ -1980,7 +2001,7 @@ def upgrade(
cmd_update.insert(0, "--no-gpg-checks")
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
if dryrun:
cmd_update.append("--dry-run")
@@ -2808,7 +2829,7 @@ def search(criteria, refresh=False, **kwargs):
root = kwargs.get("root", None)
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
cmd = ["search"]
if kwargs.get("match") == "exact":
@@ -2959,7 +2980,7 @@ def download(*packages, **kwargs):
refresh = kwargs.get("refresh", False)
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
pkg_ret = {}
for dld_result in (
@@ -3111,7 +3132,7 @@ def list_patches(refresh=False, root=None, **kwargs):
salt '*' pkg.list_patches
"""
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
return _get_patches(root=root)
@@ -3205,7 +3226,7 @@ def resolve_capabilities(pkgs, refresh=False, root=None, **kwargs):
salt '*' pkg.resolve_capabilities resolve_capabilities=True w3m_ssl
"""
if refresh:
- refresh_db(root)
+ refresh_db(root, **kwargs)
ret = list()
for pkg in pkgs:
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index e85c93da3b..f5b6d74b6f 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -377,7 +377,12 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
run_out = {"stderr": "", "stdout": "\n".join(ref_out), "retcode": 0}
zypper_mock = MagicMock(return_value=run_out)
- call_kwargs = {"output_loglevel": "trace", "python_shell": False, "env": {}}
+ call_kwargs = {
+ "output_loglevel": "trace",
+ "python_shell": False,
+ "env": {},
+ "success_retcodes": [0, 6],
+ }
with patch.dict(zypper.__salt__, {"cmd.run_all": zypper_mock}):
with patch.object(salt.utils.pkg, "clear_rtag", Mock()):
result = zypper.refresh_db()
@@ -395,6 +400,73 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
zypper_mock.assert_called_with(
["zypper", "--non-interactive", "refresh", "--force"], **call_kwargs
)
+ zypper.refresh_db(gpgautoimport=True)
+ zypper_mock.assert_called_with(
+ [
+ "zypper",
+ "--non-interactive",
+ "--gpg-auto-import-keys",
+ "refresh",
+ "--force",
+ ],
+ **call_kwargs
+ )
+ zypper.refresh_db(gpgautoimport=True, force=True)
+ zypper_mock.assert_called_with(
+ [
+ "zypper",
+ "--non-interactive",
+ "--gpg-auto-import-keys",
+ "refresh",
+ "--force",
+ ],
+ **call_kwargs
+ )
+ zypper.refresh_db(gpgautoimport=True, force=False)
+ zypper_mock.assert_called_with(
+ [
+ "zypper",
+ "--non-interactive",
+ "--gpg-auto-import-keys",
+ "refresh",
+ ],
+ **call_kwargs
+ )
+ zypper.refresh_db(
+ gpgautoimport=True,
+ refresh=True,
+ repos="mock-repo-name",
+ root=None,
+ url="http://repo.url/some/path",
+ )
+ zypper_mock.assert_called_with(
+ [
+ "zypper",
+ "--non-interactive",
+ "--gpg-auto-import-keys",
+ "refresh",
+ "--force",
+ "mock-repo-name",
+ ],
+ **call_kwargs
+ )
+ zypper.refresh_db(
+ gpgautoimport=True,
+ repos="mock-repo-name",
+ root=None,
+ url="http://repo.url/some/path",
+ )
+ zypper_mock.assert_called_with(
+ [
+ "zypper",
+ "--non-interactive",
+ "--gpg-auto-import-keys",
+ "refresh",
+ "--force",
+ "mock-repo-name",
+ ],
+ **call_kwargs
+ )
def test_info_installed(self):
"""
@@ -2082,18 +2154,23 @@ Repository 'DUMMY' not found by its alias, number, or URI.
url = self.new_repo_config["url"]
name = self.new_repo_config["name"]
- with zypper_patcher:
+ with zypper_patcher, patch.object(zypper, "refresh_db", Mock()) as refreshmock:
zypper.mod_repo(name, **{"url": url, "gpgautoimport": True})
self.assertEqual(
zypper.__zypper__(root=None).xml.call.call_args_list,
[
call("ar", url, name),
- call("--gpg-auto-import-keys", "refresh", name),
],
)
self.assertTrue(
zypper.__zypper__(root=None).refreshable.xml.call.call_count == 0
)
+ refreshmock.assert_called_once_with(
+ gpgautoimport=True,
+ repos=name,
+ root=None,
+ url="http://repo.url/some/path",
+ )
def test_repo_noadd_nomod_ref(self):
"""
@@ -2112,15 +2189,17 @@ Repository 'DUMMY' not found by its alias, number, or URI.
"salt.modules.zypperpkg", **self.zypper_patcher_config
)
- with zypper_patcher:
+ with zypper_patcher, patch.object(zypper, "refresh_db", Mock()) as refreshmock:
zypper.mod_repo(name, **{"url": url, "gpgautoimport": True})
- self.assertEqual(
- zypper.__zypper__(root=None).xml.call.call_args_list,
- [call("--gpg-auto-import-keys", "refresh", name)],
- )
self.assertTrue(
zypper.__zypper__(root=None).refreshable.xml.call.call_count == 0
)
+ refreshmock.assert_called_once_with(
+ gpgautoimport=True,
+ repos=name,
+ root=None,
+ url="http://repo.url/some/path",
+ )
def test_repo_add_mod_ref(self):
"""
@@ -2133,10 +2212,10 @@ Repository 'DUMMY' not found by its alias, number, or URI.
zypper_patcher = patch.multiple(
"salt.modules.zypperpkg", **self.zypper_patcher_config
)
-
url = self.new_repo_config["url"]
name = self.new_repo_config["name"]
- with zypper_patcher:
+
+ with zypper_patcher, patch.object(zypper, "refresh_db", Mock()) as refreshmock:
zypper.mod_repo(
name, **{"url": url, "refresh": True, "gpgautoimport": True}
)
@@ -2144,11 +2223,17 @@ Repository 'DUMMY' not found by its alias, number, or URI.
zypper.__zypper__(root=None).xml.call.call_args_list,
[
call("ar", url, name),
- call("--gpg-auto-import-keys", "refresh", name),
],
)
zypper.__zypper__(root=None).refreshable.xml.call.assert_called_once_with(
- "--gpg-auto-import-keys", "mr", "--refresh", name
+ "mr", "--refresh", name
+ )
+ refreshmock.assert_called_once_with(
+ gpgautoimport=True,
+ refresh=True,
+ repos=name,
+ root=None,
+ url="http://repo.url/some/path",
)
def test_repo_noadd_mod_ref(self):
@@ -2168,16 +2253,19 @@ Repository 'DUMMY' not found by its alias, number, or URI.
"salt.modules.zypperpkg", **self.zypper_patcher_config
)
- with zypper_patcher:
+ with zypper_patcher, patch.object(zypper, "refresh_db", Mock()) as refreshmock:
zypper.mod_repo(
name, **{"url": url, "refresh": True, "gpgautoimport": True}
)
- self.assertEqual(
- zypper.__zypper__(root=None).xml.call.call_args_list,
- [call("--gpg-auto-import-keys", "refresh", name)],
- )
zypper.__zypper__(root=None).refreshable.xml.call.assert_called_once_with(
- "--gpg-auto-import-keys", "mr", "--refresh", name
+ "mr", "--refresh", name
+ )
+ refreshmock.assert_called_once_with(
+ gpgautoimport=True,
+ refresh=True,
+ repos=name,
+ root=None,
+ url="http://repo.url/some/path",
)
def test_wildcard_to_query_match_all(self):
--
2.39.2

View File

@ -0,0 +1,97 @@
From ae4e1d1cc15b3c510bdd774a1dfeff67c522324a Mon Sep 17 00:00:00 2001
From: Marek Czernek <marek.czernek@suse.com>
Date: Tue, 17 Oct 2023 13:05:00 +0200
Subject: [PATCH] Allow all primitive grain types for autosign_grains
(#607)
* Allow all primitive grain types for autosign_grains
Signed-off-by: Marek Czernek <marek.czernek@suse.com>
* blacken daemons/masterapi.py and its test_auto_key
Signed-off-by: Marek Czernek <marek.czernek@suse.com>
---------
Signed-off-by: Marek Czernek <marek.czernek@suse.com>
Co-authored-by: Alexander Graul <agraul@suse.com>
---
changelog/61416.fixed.md | 1 +
changelog/63708.fixed.md | 1 +
salt/daemons/masterapi.py | 2 +-
.../pytests/unit/daemons/masterapi/test_auto_key.py | 13 +++++++------
4 files changed, 10 insertions(+), 7 deletions(-)
create mode 100644 changelog/61416.fixed.md
create mode 100644 changelog/63708.fixed.md
diff --git a/changelog/61416.fixed.md b/changelog/61416.fixed.md
new file mode 100644
index 0000000000..3203a0a1c6
--- /dev/null
+++ b/changelog/61416.fixed.md
@@ -0,0 +1 @@
+Allow all primitive grain types for autosign_grains
diff --git a/changelog/63708.fixed.md b/changelog/63708.fixed.md
new file mode 100644
index 0000000000..3203a0a1c6
--- /dev/null
+++ b/changelog/63708.fixed.md
@@ -0,0 +1 @@
+Allow all primitive grain types for autosign_grains
diff --git a/salt/daemons/masterapi.py b/salt/daemons/masterapi.py
index 3716c63d99..54aca64a76 100644
--- a/salt/daemons/masterapi.py
+++ b/salt/daemons/masterapi.py
@@ -366,7 +366,7 @@ class AutoKey:
line = salt.utils.stringutils.to_unicode(line).strip()
if line.startswith("#"):
continue
- if autosign_grains[grain] == line:
+ if str(autosign_grains[grain]) == line:
return True
return False
diff --git a/tests/pytests/unit/daemons/masterapi/test_auto_key.py b/tests/pytests/unit/daemons/masterapi/test_auto_key.py
index b3657b7f1b..54c3f22d2a 100644
--- a/tests/pytests/unit/daemons/masterapi/test_auto_key.py
+++ b/tests/pytests/unit/daemons/masterapi/test_auto_key.py
@@ -17,11 +17,11 @@ def gen_permissions(owner="", group="", others=""):
"""
ret = 0
for c in owner:
- ret |= getattr(stat, "S_I{}USR".format(c.upper()), 0)
+ ret |= getattr(stat, f"S_I{c.upper()}USR", 0)
for c in group:
- ret |= getattr(stat, "S_I{}GRP".format(c.upper()), 0)
+ ret |= getattr(stat, f"S_I{c.upper()}GRP", 0)
for c in others:
- ret |= getattr(stat, "S_I{}OTH".format(c.upper()), 0)
+ ret |= getattr(stat, f"S_I{c.upper()}OTH", 0)
return ret
@@ -256,16 +256,17 @@ def test_check_autosign_grains_no_autosign_grains_dir(auto_key):
_test_check_autosign_grains(test_func, auto_key, autosign_grains_dir=None)
-def test_check_autosign_grains_accept(auto_key):
+@pytest.mark.parametrize("grain_value", ["test_value", 123, True])
+def test_check_autosign_grains_accept(grain_value, auto_key):
"""
Asserts that autosigning from grains passes when a matching grain value is in an
autosign_grain file.
"""
def test_func(*args):
- assert auto_key.check_autosign_grains({"test_grain": "test_value"}) is True
+ assert auto_key.check_autosign_grains({"test_grain": grain_value}) is True
- file_content = "#test_ignore\ntest_value"
+ file_content = f"#test_ignore\n{grain_value}"
_test_check_autosign_grains(test_func, auto_key, file_content=file_content)
--
2.42.0

View File

@ -0,0 +1,164 @@
From 8ae54e8a0e12193507f1936f363c3438b4a006ee Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Yeray=20Guti=C3=A9rrez=20Cedr=C3=A9s?=
<yeray.gutierrez@suse.com>
Date: Tue, 23 Jan 2024 15:33:28 +0000
Subject: [PATCH] Allow kwargs for fileserver roots update
(bsc#1218482) (#618)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* Allow kwargs for fileserver roots update (bsc#1218482)
* Prevent exceptions with fileserver.update when called via state
* Fix wrong logic and enhance tests around fileserver.update
* Remove test which is not longer valid
---------
Co-authored-by: Pablo Suárez Hernández <psuarezhernandez@suse.com>
---
changelog/65819.fixed.md | 1 +
salt/fileserver/roots.py | 8 ++--
salt/runners/fileserver.py | 6 +++
tests/integration/runners/test_fileserver.py | 40 ++++++++++++++++++--
tests/pytests/unit/fileserver/test_roots.py | 2 +-
5 files changed, 47 insertions(+), 10 deletions(-)
create mode 100644 changelog/65819.fixed.md
diff --git a/changelog/65819.fixed.md b/changelog/65819.fixed.md
new file mode 100644
index 0000000000..432f5c791c
--- /dev/null
+++ b/changelog/65819.fixed.md
@@ -0,0 +1 @@
+Prevent exceptions with fileserver.update when called via state
diff --git a/salt/fileserver/roots.py b/salt/fileserver/roots.py
index 4880cbab9b..a02b597c6f 100644
--- a/salt/fileserver/roots.py
+++ b/salt/fileserver/roots.py
@@ -193,9 +193,7 @@ def update():
os.makedirs(mtime_map_path_dir)
with salt.utils.files.fopen(mtime_map_path, "wb") as fp_:
for file_path, mtime in new_mtime_map.items():
- fp_.write(
- salt.utils.stringutils.to_bytes("{}:{}\n".format(file_path, mtime))
- )
+ fp_.write(salt.utils.stringutils.to_bytes(f"{file_path}:{mtime}\n"))
if __opts__.get("fileserver_events", False):
# if there is a change, fire an event
@@ -326,11 +324,11 @@ def _file_lists(load, form):
return []
list_cache = os.path.join(
list_cachedir,
- "{}.p".format(salt.utils.files.safe_filename_leaf(actual_saltenv)),
+ f"{salt.utils.files.safe_filename_leaf(actual_saltenv)}.p",
)
w_lock = os.path.join(
list_cachedir,
- ".{}.w".format(salt.utils.files.safe_filename_leaf(actual_saltenv)),
+ f".{salt.utils.files.safe_filename_leaf(actual_saltenv)}.w",
)
cache_match, refresh_cache, save_cache = salt.fileserver.check_file_list_cache(
__opts__, form, list_cache, w_lock
diff --git a/salt/runners/fileserver.py b/salt/runners/fileserver.py
index d75d7de0cf..1ed05b68ca 100644
--- a/salt/runners/fileserver.py
+++ b/salt/runners/fileserver.py
@@ -350,6 +350,12 @@ def update(backend=None, **kwargs):
salt-run fileserver.update backend=git remotes=myrepo,yourrepo
"""
fileserver = salt.fileserver.Fileserver(__opts__)
+
+ # Remove possible '__pub_user' in kwargs as it is not expected
+ # on "update" function for the different fileserver backends.
+ if "__pub_user" in kwargs:
+ del kwargs["__pub_user"]
+
fileserver.update(back=backend, **kwargs)
return True
diff --git a/tests/integration/runners/test_fileserver.py b/tests/integration/runners/test_fileserver.py
index ae8ab766aa..62f0da0c4a 100644
--- a/tests/integration/runners/test_fileserver.py
+++ b/tests/integration/runners/test_fileserver.py
@@ -202,15 +202,31 @@ class FileserverTest(ShellCase):
fileserver.update
"""
ret = self.run_run_plus(fun="fileserver.update")
- self.assertTrue(ret["return"])
+ self.assertTrue(ret["return"] is True)
# Backend submitted as a string
ret = self.run_run_plus(fun="fileserver.update", backend="roots")
- self.assertTrue(ret["return"])
+ self.assertTrue(ret["return"] is True)
# Backend submitted as a list
ret = self.run_run_plus(fun="fileserver.update", backend=["roots"])
- self.assertTrue(ret["return"])
+ self.assertTrue(ret["return"] is True)
+
+ # Possible '__pub_user' is removed from kwargs
+ ret = self.run_run_plus(
+ fun="fileserver.update", backend=["roots"], __pub_user="foo"
+ )
+ self.assertTrue(ret["return"] is True)
+
+ # Unknown arguments
+ ret = self.run_run_plus(
+ fun="fileserver.update", backend=["roots"], unknown_arg="foo"
+ )
+ self.assertIn(
+ "Passed invalid arguments: update() got an unexpected keyword argument"
+ " 'unknown_arg'",
+ ret["return"],
+ )
# Other arguments are passed to backend
def mock_gitfs_update(remotes=None):
@@ -225,7 +241,23 @@ class FileserverTest(ShellCase):
ret = self.run_run_plus(
fun="fileserver.update", backend="gitfs", remotes="myrepo,yourrepo"
)
- self.assertTrue(ret["return"])
+ self.assertTrue(ret["return"] is True)
+ mock_backend_func.assert_called_once_with(remotes="myrepo,yourrepo")
+
+ # Possible '__pub_user' arguments are removed from kwargs
+ mock_backend_func = create_autospec(mock_gitfs_update)
+ mock_return_value = {
+ "gitfs.envs": None, # This is needed to activate the backend
+ "gitfs.update": mock_backend_func,
+ }
+ with patch("salt.loader.fileserver", MagicMock(return_value=mock_return_value)):
+ ret = self.run_run_plus(
+ fun="fileserver.update",
+ backend="gitfs",
+ remotes="myrepo,yourrepo",
+ __pub_user="foo",
+ )
+ self.assertTrue(ret["return"] is True)
mock_backend_func.assert_called_once_with(remotes="myrepo,yourrepo")
# Unknown arguments are passed to backend
diff --git a/tests/pytests/unit/fileserver/test_roots.py b/tests/pytests/unit/fileserver/test_roots.py
index a8a80eea17..96bceb0fd3 100644
--- a/tests/pytests/unit/fileserver/test_roots.py
+++ b/tests/pytests/unit/fileserver/test_roots.py
@@ -236,7 +236,7 @@ def test_update_mtime_map():
# between Python releases.
lines_written = sorted(mtime_map_mock.write_calls())
expected = sorted(
- salt.utils.stringutils.to_bytes("{key}:{val}\n".format(key=key, val=val))
+ salt.utils.stringutils.to_bytes(f"{key}:{val}\n")
for key, val in new_mtime_map.items()
)
assert lines_written == expected, lines_written
--
2.43.0

View File

@ -0,0 +1,841 @@
From a36d6524e530eca32966f46597c88dbfd4b90e78 Mon Sep 17 00:00:00 2001
From: Martin Seidl <mseidl@suse.de>
Date: Tue, 27 Oct 2020 16:12:29 +0100
Subject: [PATCH] Allow vendor change option with zypper
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Fix novendorchange option (#284)
* Fixed novendorchange handling in zypperpkg
* refactor handling of novendorchange and fix tests
add patch support for allow vendor change option with zypper
Revert "add patch support for allow vendor change option with zypper"
This reverts commit cee4cc182b4740c912861c712dea7bc44eb70ffb.
Allow vendor change option with zypper (#313)
* add patch support for allow vendor change option with zypper
* adjust unit tests vendor change refactor, dropping cli arg
* Fix pr issues
Co-authored-by: Pablo Suárez Hernández <psuarezhernandez@suse.com>
* Fix unit test for allow vendor change on upgrade
* Add unit test with unsupported zypper version
Co-authored-by: Pablo Suárez Hernández <psuarezhernandez@suse.com>
Move vendor change logic to zypper class (#355)
* move vendor change logic to zypper class
* fix thing in zypperkg
* refactor unit tests
* Fix for syntax error
* Fix mocking issue in unit test
* fix issues with pr
* Fix for zypperpkg unit test after refactor of vendorchangeflags
Co-authored-by: Pablo Suárez Hernández <psuarezhernandez@suse.com>
* fix docs for vendor change options
* Fix doc strings, and clean up tests
Co-authored-by: Jochen Breuer <jbreuer@suse.de>
Co-authored-by: Pablo Suárez Hernández <psuarezhernandez@suse.com>
---
salt/modules/zypperpkg.py | 105 ++++--
tests/unit/modules/test_zypperpkg.py | 532 ++++++++++++++++++++++++++-
2 files changed, 612 insertions(+), 25 deletions(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 4bb10f445a..2da470bea3 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -36,6 +36,8 @@ import salt.utils.stringutils
import salt.utils.systemd
import salt.utils.versions
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError
+
+# pylint: disable=import-error,redefined-builtin,no-name-in-module
from salt.utils.versions import LooseVersion
if salt.utils.files.is_fcntl_available():
@@ -140,6 +142,13 @@ class _Zypper:
self.__systemd_scope = False
self.__root = None
+ # Dist upgrade vendor change support (SLE12+)
+ self.dup_avc = False
+ # Install/Patch/Upgrade vendor change support (SLE15+)
+ self.inst_avc = False
+ # Flag if allow vendor change should be allowed
+ self.avc = False
+
# Call status
self.__called = False
@@ -184,6 +193,8 @@ class _Zypper:
self.__no_raise = True
elif item == "refreshable":
self.__refresh = True
+ elif item == "allow_vendor_change":
+ return self.__allow_vendor_change
elif item == "call":
return self.__call
else:
@@ -224,6 +235,33 @@ class _Zypper:
def pid(self):
return self.__call_result.get("pid", "")
+ def __allow_vendor_change(self, allowvendorchange, novendorchange):
+ if allowvendorchange or not novendorchange:
+ self.refresh_zypper_flags()
+ if self.dup_avc or self.inst_avc:
+ log.info("Enabling vendor change")
+ self.avc = True
+ else:
+ log.warning(
+ "Enabling/Disabling vendor changes is not supported on this Zypper version"
+ )
+ return self
+
+ def refresh_zypper_flags(self):
+ try:
+ zypp_version = version("zypper")
+ # zypper version 1.11.34 in SLE12 update supports vendor change for only dist upgrade
+ if version_cmp(zypp_version, "1.11.34") >= 0:
+ # zypper version supports vendor change for dist upgrade
+ self.dup_avc = True
+ # zypper version 1.14.8 in SLE15 update supports vendor change in install/patch/upgrading
+ if version_cmp(zypp_version, "1.14.8") >= 0:
+ self.inst_avc = True
+ else:
+ log.error("Failed to compare Zypper version")
+ except Exception as ex:
+ log.error("Unable to get Zypper version: {}".format(ex))
+
def _is_error(self):
"""
Is this is an error code?
@@ -362,6 +400,15 @@ class _Zypper:
if self.__systemd_scope:
cmd.extend(["systemd-run", "--scope"])
cmd.extend(self.__cmd)
+
+ if self.avc:
+ for i in ["install", "upgrade", "dist-upgrade"]:
+ if i in cmd:
+ if i == "install" and self.inst_avc:
+ cmd.insert(cmd.index(i) + 1, "--allow-vendor-change")
+ elif i in ["upgrade", "dist-upgrade"] and self.dup_avc:
+ cmd.insert(cmd.index(i) + 1, "--allow-vendor-change")
+
log.debug("Calling Zypper: %s", " ".join(cmd))
self.__call_result = __salt__["cmd.run_all"](cmd, **kwargs)
if self._check_result():
@@ -1490,6 +1537,8 @@ def install(
no_recommends=False,
root=None,
inclusion_detection=False,
+ novendorchange=True,
+ allowvendorchange=False,
**kwargs
):
"""
@@ -1537,6 +1586,13 @@ def install(
skip_verify
Skip the GPG verification check (e.g., ``--no-gpg-checks``)
+ novendorchange
+ DEPRECATED(use allowvendorchange): If set to True, do not allow vendor changes. Default: True
+
+ allowvendorchange
+ If set to True, vendor change is allowed. Default: False
+ If both allowvendorchange and novendorchange are passed, only allowvendorchange is used.
+
version
Can be either a version number, or the combination of a comparison
operator (<, >, <=, >=, =) and a version number (ex. '>1.2.3-4').
@@ -1702,6 +1758,7 @@ def install(
cmd_install.append(
kwargs.get("resolve_capabilities") and "--capability" or "--name"
)
+ # Install / patching / upgrade with vendor change support is only in SLE 15+ opensuse Leap 15+
if not refresh:
cmd_install.insert(0, "--no-refresh")
@@ -1738,6 +1795,7 @@ def install(
systemd_scope=systemd_scope,
root=root,
)
+ .allow_vendor_change(allowvendorchange, novendorchange)
.call(*cmd)
.splitlines()
):
@@ -1750,7 +1808,9 @@ def install(
while downgrades:
cmd = cmd_install + ["--force"] + downgrades[:500]
downgrades = downgrades[500:]
- __zypper__(no_repo_failure=ignore_repo_failure, root=root).call(*cmd)
+ __zypper__(no_repo_failure=ignore_repo_failure, root=root).allow_vendor_change(
+ allowvendorchange, novendorchange
+ ).call(*cmd)
_clean_cache()
new = (
@@ -1783,7 +1843,8 @@ def upgrade(
dryrun=False,
dist_upgrade=False,
fromrepo=None,
- novendorchange=False,
+ novendorchange=True,
+ allowvendorchange=False,
skip_verify=False,
no_recommends=False,
root=None,
@@ -1844,7 +1905,11 @@ def upgrade(
Specify a list of package repositories to upgrade from. Default: None
novendorchange
- If set to True, no allow vendor changes. Default: False
+ DEPRECATED(use allowvendorchange): If set to True, do not allow vendor changes. Default: True
+
+ allowvendorchange
+ If set to True, vendor change is allowed. Default: False
+ If both allowvendorchange and novendorchange are passed, only allowvendorchange is used.
skip_verify
Skip the GPG verification check (e.g., ``--no-gpg-checks``)
@@ -1927,28 +1992,18 @@ def upgrade(
cmd_update.extend(["--from" if dist_upgrade else "--repo", repo])
log.info("Targeting repos: %s", fromrepo)
- if dist_upgrade:
- if novendorchange:
- # TODO: Grains validation should be moved to Zypper class
- if __grains__["osrelease_info"][0] > 11:
- cmd_update.append("--no-allow-vendor-change")
- log.info("Disabling vendor changes")
- else:
- log.warning(
- "Disabling vendor changes is not supported on this Zypper version"
- )
+ if no_recommends:
+ cmd_update.append("--no-recommends")
+ log.info("Disabling recommendations")
- if no_recommends:
- cmd_update.append("--no-recommends")
- log.info("Disabling recommendations")
+ if dryrun:
+ # Creates a solver test case for debugging.
+ log.info("Executing debugsolver and performing a dry-run dist-upgrade")
+ __zypper__(systemd_scope=_systemd_scope(), root=root).allow_vendor_change(
+ allowvendorchange, novendorchange
+ ).noraise.call(*cmd_update + ["--debug-solver"])
- if dryrun:
- # Creates a solver test case for debugging.
- log.info("Executing debugsolver and performing a dry-run dist-upgrade")
- __zypper__(systemd_scope=_systemd_scope(), root=root).noraise.call(
- *cmd_update + ["--debug-solver"]
- )
- else:
+ if not dist_upgrade:
if name or pkgs:
try:
(pkg_params, _) = __salt__["pkg_resource.parse_targets"](
@@ -1962,7 +2017,9 @@ def upgrade(
old = list_pkgs(root=root, attr=diff_attr)
- __zypper__(systemd_scope=_systemd_scope(), root=root).noraise.call(*cmd_update)
+ __zypper__(systemd_scope=_systemd_scope(), root=root).allow_vendor_change(
+ allowvendorchange, novendorchange
+ ).noraise.call(*cmd_update)
_clean_cache()
new = list_pkgs(root=root, attr=diff_attr)
ret = salt.utils.data.compare_dicts(old, new)
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index 5e4c967520..e85c93da3b 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -137,6 +137,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
stdout_xml_snippet = '<?xml version="1.0"?><test foo="bar"/>'
sniffer = RunSniffer(stdout=stdout_xml_snippet)
+ zypper.__zypper__._reset()
with patch.dict("salt.modules.zypperpkg.__salt__", {"cmd.run_all": sniffer}):
self.assertEqual(zypper.__zypper__.call("foo"), stdout_xml_snippet)
self.assertEqual(len(sniffer.calls), 1)
@@ -628,13 +629,495 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
{"vim": "7.4.326-2.62", "fakepkg": ""},
)
+ def test_upgrade_without_vendor_change(self):
+ """
+ Dist-upgrade without vendor change option.
+ """
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]),
+ ):
+ ret = zypper.upgrade(dist_upgrade=True)
+ self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}})
+ zypper_mock.assert_any_call(
+ "dist-upgrade", "--auto-agree-with-licenses",
+ )
+
+ def test_refresh_zypper_flags(self):
+ zypper.__zypper__._reset()
+ with patch(
+ "salt.modules.zypperpkg.version", MagicMock(return_value="0.5")
+ ), patch.dict(
+ zypper.__salt__, {"lowpkg.version_cmp": MagicMock(side_effect=[-1, -1])}
+ ):
+ zypper.__zypper__.refresh_zypper_flags()
+ assert zypper.__zypper__.inst_avc == False
+ assert zypper.__zypper__.dup_avc == False
+ with patch(
+ "salt.modules.zypperpkg.version", MagicMock(return_value="1.11.34")
+ ), patch.dict(
+ zypper.__salt__, {"lowpkg.version_cmp": MagicMock(side_effect=[0, -1])}
+ ):
+ zypper.__zypper__.refresh_zypper_flags()
+ assert zypper.__zypper__.inst_avc == False
+ assert zypper.__zypper__.dup_avc == True
+ with patch(
+ "salt.modules.zypperpkg.version", MagicMock(return_value="1.14.8")
+ ), patch.dict(
+ zypper.__salt__, {"lowpkg.version_cmp": MagicMock(side_effect=[0, 0])}
+ ):
+ zypper.__zypper__.refresh_zypper_flags()
+ assert zypper.__zypper__.inst_avc == True
+ assert zypper.__zypper__.dup_avc == True
+
+ @patch("salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock())
+ def test_allow_vendor_change_function(self):
+ zypper.__zypper__._reset()
+ zypper.__zypper__.inst_avc = True
+ zypper.__zypper__.dup_avc = True
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(False, False)
+ assert zypper.__zypper__.avc == True
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(True, False)
+ assert zypper.__zypper__.avc == True
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(False, True)
+ assert zypper.__zypper__.avc == False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(True, True)
+ assert zypper.__zypper__.avc == True
+
+ zypper.__zypper__._reset()
+ zypper.__zypper__.inst_avc = False
+ zypper.__zypper__.dup_avc = True
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(False, False)
+ assert zypper.__zypper__.avc == True
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(True, False)
+ assert zypper.__zypper__.avc == True
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(False, True)
+ assert zypper.__zypper__.avc == False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(True, True)
+ assert zypper.__zypper__.avc == True
+
+ zypper.__zypper__._reset()
+ zypper.__zypper__.inst_avc = False
+ zypper.__zypper__.dup_avc = False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(False, False)
+ assert zypper.__zypper__.avc == False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(True, False)
+ assert zypper.__zypper__.avc == False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(False, True)
+ assert zypper.__zypper__.avc == False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.allow_vendor_change(True, True)
+ assert zypper.__zypper__.avc == False
+
+ @patch(
+ "salt.utils.environment.get_module_environment",
+ MagicMock(return_value={"SALT_RUNNING": "1"}),
+ )
+ def test_zypper_call_dist_upgrade_with_avc_true(self):
+ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None})
+ zypper.__zypper__._reset()
+ with patch.dict(zypper.__salt__, {"cmd.run_all": cmd_run_mock}), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch("salt.modules.zypperpkg.__zypper__._reset", MagicMock()):
+ zypper.__zypper__.dup_avc = True
+ zypper.__zypper__.avc = True
+ zypper.__zypper__.call("dist-upgrade")
+ cmd_run_mock.assert_any_call(
+ [
+ "zypper",
+ "--non-interactive",
+ "--no-refresh",
+ "dist-upgrade",
+ "--allow-vendor-change",
+ ],
+ output_loglevel="trace",
+ python_shell=False,
+ env={"SALT_RUNNING": "1"},
+ )
+
+ @patch(
+ "salt.utils.environment.get_module_environment",
+ MagicMock(return_value={"SALT_RUNNING": "1"}),
+ )
+ def test_zypper_call_dist_upgrade_with_avc_false(self):
+ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None})
+ zypper.__zypper__._reset()
+ with patch.dict(zypper.__salt__, {"cmd.run_all": cmd_run_mock}), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch("salt.modules.zypperpkg.__zypper__._reset", MagicMock()):
+ zypper.__zypper__.dup_avc = False
+ zypper.__zypper__.avc = False
+ zypper.__zypper__.call("dist-upgrade")
+ cmd_run_mock.assert_any_call(
+ ["zypper", "--non-interactive", "--no-refresh", "dist-upgrade",],
+ output_loglevel="trace",
+ python_shell=False,
+ env={"SALT_RUNNING": "1"},
+ )
+
+ @patch(
+ "salt.utils.environment.get_module_environment",
+ MagicMock(return_value={"SALT_RUNNING": "1"}),
+ )
+ def test_zypper_call_install_with_avc_true(self):
+ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None})
+ zypper.__zypper__._reset()
+ with patch.dict(zypper.__salt__, {"cmd.run_all": cmd_run_mock}), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch("salt.modules.zypperpkg.__zypper__._reset", MagicMock()):
+ zypper.__zypper__.inst_avc = True
+ zypper.__zypper__.avc = True
+ zypper.__zypper__.call("install")
+ cmd_run_mock.assert_any_call(
+ [
+ "zypper",
+ "--non-interactive",
+ "--no-refresh",
+ "install",
+ "--allow-vendor-change",
+ ],
+ output_loglevel="trace",
+ python_shell=False,
+ env={"SALT_RUNNING": "1"},
+ )
+
+ @patch(
+ "salt.utils.environment.get_module_environment",
+ MagicMock(return_value={"SALT_RUNNING": "1"}),
+ )
+ def test_zypper_call_install_with_avc_false(self):
+ cmd_run_mock = MagicMock(return_value={"retcode": 0, "stdout": None})
+ zypper.__zypper__._reset()
+ with patch.dict(zypper.__salt__, {"cmd.run_all": cmd_run_mock}), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch("salt.modules.zypperpkg.__zypper__._reset", MagicMock()):
+ zypper.__zypper__.inst_avc = False
+ zypper.__zypper__.dup_avc = True
+ zypper.__zypper__.avc = True
+ zypper.__zypper__.call("install")
+ cmd_run_mock.assert_any_call(
+ ["zypper", "--non-interactive", "--no-refresh", "install",],
+ output_loglevel="trace",
+ python_shell=False,
+ env={"SALT_RUNNING": "1"},
+ )
+
+ def test_upgrade_with_novendorchange_true(self):
+ """
+ Dist-upgrade without vendor change option.
+ """
+ zypper.__zypper__._reset()
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ) as refresh_flags_mock, patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]),
+ ):
+ ret = zypper.upgrade(dist_upgrade=True, novendorchange=True)
+ refresh_flags_mock.assert_not_called()
+ zypper_mock.assert_any_call(
+ "dist-upgrade", "--auto-agree-with-licenses",
+ )
+
+ def test_upgrade_with_novendorchange_false(self):
+ """
+ Perform dist-upgrade with novendorchange set to False.
+ """
+ zypper.__zypper__._reset()
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ zypper.__zypper__.inst_avc = True
+ zypper.__zypper__.dup_avc = True
+ with patch.dict(
+ zypper.__salt__,
+ {
+ "pkg_resource.version": MagicMock(return_value="1.15"),
+ "lowpkg.version_cmp": MagicMock(return_value=1),
+ },
+ ):
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ novendorchange=False,
+ )
+ assert zypper.__zypper__.avc == True
+
+ def test_upgrade_with_allowvendorchange_true(self):
+ """
+ Perform dist-upgrade with allowvendorchange set to True.
+ """
+ zypper.__zypper__._reset()
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ with patch.dict(
+ zypper.__salt__,
+ {
+ "pkg_resource.version": MagicMock(return_value="1.15"),
+ "lowpkg.version_cmp": MagicMock(return_value=1),
+ },
+ ):
+
+ zypper.__zypper__.inst_avc = True
+ zypper.__zypper__.dup_avc = True
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ allowvendorchange=True,
+ )
+ assert zypper.__zypper__.avc == True
+
+ def test_upgrade_with_allowvendorchange_false(self):
+ """
+ Perform dist-upgrade with allowvendorchange set to False.
+ """
+ zypper.__zypper__._reset()
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ), patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ with patch.dict(
+ zypper.__salt__,
+ {
+ "pkg_resource.version": MagicMock(return_value="1.15"),
+ "lowpkg.version_cmp": MagicMock(return_value=1),
+ },
+ ):
+
+ zypper.__zypper__.inst_avc = True
+ zypper.__zypper__.dup_avc = True
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ allowvendorchange=False,
+ )
+ assert zypper.__zypper__.avc == False
+
+ def test_upgrade_old_zypper(self):
+ zypper.__zypper__._reset()
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg.__zypper__.refresh_zypper_flags", MagicMock()
+ ) as refresh_flags_mock, patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ with patch.dict(
+ zypper.__salt__,
+ {
+ "pkg_resource.version": MagicMock(return_value="1.11"),
+ "lowpkg.version_cmp": MagicMock(return_value=-1),
+ },
+ ):
+ zypper.__zypper__.inst_avc = False
+ zypper.__zypper__.dup_avc = False
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ novendorchange=False,
+ )
+ zypper.__zypper__.avc = False
+
+ def test_upgrade_success(self):
+ """
+ Test system upgrade and dist-upgrade success.
+
+ :return:
+ """
+ with patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]),
+ ):
+ ret = zypper.upgrade()
+ self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}})
+ zypper_mock.assert_any_call("update", "--auto-agree-with-licenses")
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(
+ side_effect=[
+ {"kernel-default": "1.1"},
+ {"kernel-default": "1.1,1.2"},
+ ]
+ ),
+ ):
+ ret = zypper.upgrade()
+ self.assertDictEqual(
+ ret, {"kernel-default": {"old": "1.1", "new": "1.1,1.2"}}
+ )
+ zypper_mock.assert_any_call("update", "--auto-agree-with-licenses")
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1,1.2"}]),
+ ):
+ ret = zypper.upgrade()
+ self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.1,1.2"}})
+ zypper_mock.assert_any_call("update", "--auto-agree-with-licenses")
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ ret = zypper.upgrade(dist_upgrade=True, dryrun=True)
+ zypper_mock.assert_any_call(
+ "dist-upgrade", "--auto-agree-with-licenses", "--dry-run"
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--debug-solver",
+ )
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ ret = zypper.upgrade(
+ dist_upgrade=False, fromrepo=["Dummy", "Dummy2"], dryrun=False
+ )
+ zypper_mock.assert_any_call(
+ "update",
+ "--auto-agree-with-licenses",
+ "--repo",
+ "Dummy",
+ "--repo",
+ "Dummy2",
+ )
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ novendorchange=True,
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--from",
+ "Dummy",
+ "--from",
+ "Dummy2",
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--from",
+ "Dummy",
+ "--from",
+ "Dummy2",
+ "--debug-solver",
+ )
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ ret = zypper.upgrade(
+ dist_upgrade=False, fromrepo=["Dummy", "Dummy2"], dryrun=False
+ )
+ zypper_mock.assert_any_call(
+ "update",
+ "--auto-agree-with-licenses",
+ "--repo",
+ "Dummy",
+ "--repo",
+ "Dummy2",
+ )
+
def test_upgrade_kernel(self):
"""
Test kernel package upgrade success.
:return:
"""
- with patch.dict(zypper.__grains__, {"osrelease_info": [12, 1]}), patch(
+ with patch(
"salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
), patch(
"salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
@@ -672,6 +1155,53 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
},
)
+ def test_upgrade_failure(self):
+ """
+ Test system upgrade failure.
+
+ :return:
+ """
+ zypper_out = """
+Loading repository data...
+Reading installed packages...
+Computing distribution upgrade...
+Use 'zypper repos' to get the list of defined repositories.
+Repository 'DUMMY' not found by its alias, number, or URI.
+"""
+
+ class FailingZypperDummy:
+ def __init__(self):
+ self.stdout = zypper_out
+ self.stderr = ""
+ self.pid = 1234
+ self.exit_code = 555
+ self.noraise = MagicMock()
+ self.allow_vendor_change = self
+ self.SUCCESS_EXIT_CODES = [0]
+
+ def __call__(self, *args, **kwargs):
+ return self
+
+ with patch(
+ "salt.modules.zypperpkg.__zypper__", FailingZypperDummy()
+ ) as zypper_mock, patch(
+ "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True)
+ ), patch(
+ "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False)
+ ):
+ zypper_mock.noraise.call = MagicMock()
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ with self.assertRaises(CommandExecutionError) as cmd_exc:
+ ret = zypper.upgrade(dist_upgrade=True, fromrepo=["DUMMY"])
+ self.assertEqual(cmd_exc.exception.info["changes"], {})
+ self.assertEqual(cmd_exc.exception.info["result"]["stdout"], zypper_out)
+ zypper_mock.noraise.call.assert_called_with(
+ "dist-upgrade", "--auto-agree-with-licenses", "--from", "DUMMY",
+ )
+
def test_upgrade_available(self):
"""
Test whether or not an upgrade is available for a given package.
--
2.39.2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,47 @@
From 8e9f2587aea52c1d0a5c07d5f9bb77a23ae4d4a6 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 23 May 2023 10:40:02 +0100
Subject: [PATCH] Avoid conflicts with dependencies versions
(bsc#1211612) (#581)
This commit fixes the Salt requirements file that are used to
generate the "requires.txt" file that is included in Salt egginfo
in order to be consistent with the installed packages
of Salt dependencies.
This prevents issues when resolving and validating Salt dependencies
with "pkg_resources" Python module.
---
requirements/base.txt | 2 +-
requirements/zeromq.txt | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/requirements/base.txt b/requirements/base.txt
index c19d8804a2..437aa01d31 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -6,7 +6,7 @@ MarkupSafe
requests>=1.0.0
distro>=1.0.1
psutil>=5.0.0
-packaging>=21.3
+packaging>=17.1
looseversion
# We need contextvars for salt-ssh
contextvars
diff --git a/requirements/zeromq.txt b/requirements/zeromq.txt
index 1e9a815c1b..23d1ef25dc 100644
--- a/requirements/zeromq.txt
+++ b/requirements/zeromq.txt
@@ -1,5 +1,5 @@
-r base.txt
-r crypto.txt
-pyzmq>=20.0.0
+pyzmq>=17.1.2
pyzmq==25.0.2 ; sys_platform == "win32"
--
2.39.2

View File

@ -0,0 +1,26 @@
From 4d8c88d6e467c22ea74738743de5be6577f81085 Mon Sep 17 00:00:00 2001
From: Hubert Mantel <mantel@suse.de>
Date: Mon, 27 Nov 2017 13:55:13 +0100
Subject: [PATCH] avoid excessive syslogging by watchdog cronjob (#58)
---
pkg/old/suse/salt-minion | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pkg/old/suse/salt-minion b/pkg/old/suse/salt-minion
index 2e418094ed..73a91ebd62 100755
--- a/pkg/old/suse/salt-minion
+++ b/pkg/old/suse/salt-minion
@@ -55,7 +55,7 @@ WATCHDOG_CRON="/etc/cron.d/salt-minion"
set_watchdog() {
if [ ! -f $WATCHDOG_CRON ]; then
- echo -e '* * * * * root /usr/bin/salt-daemon-watcher --with-init\n' > $WATCHDOG_CRON
+ echo -e '-* * * * * root /usr/bin/salt-daemon-watcher --with-init\n' > $WATCHDOG_CRON
# Kick the watcher for 1 minute immediately, because cron will wake up only afterwards
/usr/bin/salt-daemon-watcher --with-init & disown
fi
--
2.39.2

View File

@ -0,0 +1,112 @@
From 2ca37fe7d2a03ad86ed738f2636fe240b9f4467e Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Tue, 6 Oct 2020 12:36:41 +0300
Subject: [PATCH] bsc#1176024: Fix file/directory user and group
ownership containing UTF-8 characters (#275)
* Fix check_perm typos of file module
* Fix UTF8 support for user/group ownership operations with file module and state
* Fix UTF8 support for user/group ownership operations with file module and state
Co-authored-by: Victor Zhestkov <vzhestkov@vz-thinkpad.vzhestkov.net>
---
salt/modules/file.py | 20 ++++++++++----------
salt/states/file.py | 12 ++++++++++--
2 files changed, 20 insertions(+), 12 deletions(-)
diff --git a/salt/modules/file.py b/salt/modules/file.py
index 69d7992f5a..4612d65511 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -245,7 +245,7 @@ def group_to_gid(group):
try:
if isinstance(group, int):
return group
- return grp.getgrnam(group).gr_gid
+ return grp.getgrnam(salt.utils.stringutils.to_str(group)).gr_gid
except KeyError:
return ""
@@ -336,7 +336,7 @@ def user_to_uid(user):
try:
if isinstance(user, int):
return user
- return pwd.getpwnam(user).pw_uid
+ return pwd.getpwnam(salt.utils.stringutils.to_str(user)).pw_uid
except KeyError:
return ""
@@ -5133,8 +5133,8 @@ def check_perms(
salt.utils.platform.is_windows() and not user_to_uid(user) == cur["uid"]
) or (
not salt.utils.platform.is_windows()
- and not user == cur["user"]
- and not user == cur["uid"]
+ and not salt.utils.stringutils.to_str(user) == cur["user"]
+ and not salt.utils.stringutils.to_str(user) == cur["uid"]
):
perms["cuser"] = user
@@ -5143,8 +5143,8 @@ def check_perms(
salt.utils.platform.is_windows() and not group_to_gid(group) == cur["gid"]
) or (
not salt.utils.platform.is_windows()
- and not group == cur["group"]
- and not group == cur["gid"]
+ and not salt.utils.stringutils.to_str(group) == cur["group"]
+ and not salt.utils.stringutils.to_str(group) == cur["gid"]
):
perms["cgroup"] = group
@@ -5188,8 +5188,8 @@ def check_perms(
salt.utils.platform.is_windows() and not user_to_uid(user) == post["uid"]
) or (
not salt.utils.platform.is_windows()
- and not user == post["user"]
- and not user == post["uid"]
+ and not salt.utils.stringutils.to_str(user) == post["user"]
+ and not salt.utils.stringutils.to_str(user) == post["uid"]
):
if __opts__["test"] is True:
ret["changes"]["user"] = user
@@ -5204,8 +5204,8 @@ def check_perms(
salt.utils.platform.is_windows() and not group_to_gid(group) == post["gid"]
) or (
not salt.utils.platform.is_windows()
- and not group == post["group"]
- and not group == post["gid"]
+ and not salt.utils.stringutils.to_str(group) == post["group"]
+ and not salt.utils.stringutils.to_str(group) == post["gid"]
):
if __opts__["test"] is True:
ret["changes"]["group"] = group
diff --git a/salt/states/file.py b/salt/states/file.py
index 9f32151b8b..024e5e34ce 100644
--- a/salt/states/file.py
+++ b/salt/states/file.py
@@ -864,9 +864,17 @@ def _check_dir_meta(name, user, group, mode, follow_symlinks=False):
if not stats:
changes["directory"] = "new"
return changes
- if user is not None and user != stats["user"] and user != stats.get("uid"):
+ if (
+ user is not None
+ and salt.utils.stringutils.to_str(user) != stats["user"]
+ and user != stats.get("uid")
+ ):
changes["user"] = user
- if group is not None and group != stats["group"] and group != stats.get("gid"):
+ if (
+ group is not None
+ and salt.utils.stringutils.to_str(group) != stats["group"]
+ and group != stats.get("gid")
+ ):
changes["group"] = group
# Normalize the dir mode
smode = salt.utils.files.normalize_mode(stats["mode"])
--
2.39.2

View File

@ -0,0 +1,30 @@
From b7a554e2dec3351c91c237497fe37cbc30d664bd Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <Victor.Zhestkov@suse.com>
Date: Thu, 1 Sep 2022 14:42:24 +0300
Subject: [PATCH] Change the delimeters to prevent possible tracebacks on
some packages with dpkg_lowpkg
* Use another separator on query to dpkg-query
* Fix the test test_dpkg_lowpkg::test_info
---
salt/modules/dpkg_lowpkg.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/modules/dpkg_lowpkg.py b/salt/modules/dpkg_lowpkg.py
index 4d716c8772..78990492cf 100644
--- a/salt/modules/dpkg_lowpkg.py
+++ b/salt/modules/dpkg_lowpkg.py
@@ -347,7 +347,7 @@ def _get_pkg_info(*packages, **kwargs):
if build_date:
pkg_data["build_date"] = build_date
pkg_data["build_date_time_t"] = build_date_t
- pkg_data["description"] = pkg_descr.split(":", 1)[-1]
+ pkg_data["description"] = pkg_descr
ret.append(pkg_data)
return ret
--
2.39.2

View File

@ -0,0 +1,37 @@
From fcb43735942ca1b796f656d5647e49a93f770bb2 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 10 Jan 2023 15:04:01 +0100
Subject: [PATCH] Control the collection of lvm grains via config
lvm grain collection can take a long time on systems with a lot of
volumes and volume groups. On one server we measured ~3 minutes, which
is way too long for grains.
This change is backwards-compatible, leaving the lvm grain collection
enabled by default. Users with a lot of lvm volumes/volume groups can
disable these grains in the minion config by setting
enable_lvm_grains: False
---
salt/grains/lvm.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/salt/grains/lvm.py b/salt/grains/lvm.py
index 586b187ddb..f5c406cb44 100644
--- a/salt/grains/lvm.py
+++ b/salt/grains/lvm.py
@@ -17,6 +17,10 @@ __salt__ = {
log = logging.getLogger(__name__)
+def __virtual__():
+ return __opts__.get("enable_lvm_grains", True)
+
+
def lvm():
"""
Return list of LVM devices
--
2.39.2

View File

@ -0,0 +1,351 @@
From 2fbc5b580661b094cf79cc5da0860745b72088e4 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 25 Jan 2022 17:08:57 +0100
Subject: [PATCH] Debian info_installed compatibility (#50453)
Remove unused variable
Get unit ticks installation time
Pass on unix ticks installation date time
Implement function to figure out package build time
Unify arch attribute
Add 'attr' support.
Use attr parameter in aptpkg
Add 'all_versions' output structure backward compatibility
Fix docstring
Add UT for generic test of function 'info'
Add UT for 'info' function with the parameter 'attr'
Add UT for info_installed's 'attr' param
Fix docstring
Add returned type check
Add UT for info_installed with 'all_versions=True' output structure
Refactor UT for 'owner' function
Refactor UT: move to decorators, add more checks
Schedule TODO for next refactoring of UT 'show' function
Refactor UT: get rid of old assertion way, flatten tests
Refactor UT: move to native assertions, cleanup noise, flatten complexity for better visibility what is tested
Lintfix: too many empty lines
Adjust architecture getter according to the lowpkg info
Fix wrong Git merge: missing function signature
Reintroducing reverted changes
Reintroducing changes from commit e20362f6f053eaa4144583604e6aac3d62838419
that got partially reverted by this commit:
https://github.com/openSUSE/salt/commit/d0ef24d113bdaaa29f180031b5da384cffe08c64#diff-820e6ce667fe3afddbc1b9cf1682fdef
---
salt/modules/aptpkg.py | 24 ++++-
salt/modules/dpkg_lowpkg.py | 110 ++++++++++++++++++----
tests/pytests/unit/modules/test_aptpkg.py | 52 ++++++++++
3 files changed, 167 insertions(+), 19 deletions(-)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 8e89744b5e..938e37cc9e 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -3440,6 +3440,15 @@ def info_installed(*names, **kwargs):
.. versionadded:: 2016.11.3
+ attr
+ Comma-separated package attributes. If no 'attr' is specified, all available attributes returned.
+
+ Valid attributes are:
+ version, vendor, release, build_date, build_date_time_t, install_date, install_date_time_t,
+ build_host, group, source_rpm, arch, epoch, size, license, signature, packager, url, summary, description.
+
+ .. versionadded:: Neon
+
CLI Example:
.. code-block:: bash
@@ -3450,11 +3459,19 @@ def info_installed(*names, **kwargs):
"""
kwargs = salt.utils.args.clean_kwargs(**kwargs)
failhard = kwargs.pop("failhard", True)
+ kwargs.pop("errors", None) # Only for compatibility with RPM
+ attr = kwargs.pop("attr", None) # Package attributes to return
+ all_versions = kwargs.pop(
+ "all_versions", False
+ ) # This is for backward compatible structure only
+
if kwargs:
salt.utils.args.invalid_kwargs(kwargs)
ret = dict()
- for pkg_name, pkg_nfo in __salt__["lowpkg.info"](*names, failhard=failhard).items():
+ for pkg_name, pkg_nfo in __salt__["lowpkg.info"](
+ *names, failhard=failhard, attr=attr
+ ).items():
t_nfo = dict()
if pkg_nfo.get("status", "ii")[1] != "i":
continue # return only packages that are really installed
@@ -3475,7 +3492,10 @@ def info_installed(*names, **kwargs):
else:
t_nfo[key] = value
- ret[pkg_name] = t_nfo
+ if all_versions:
+ ret.setdefault(pkg_name, []).append(t_nfo)
+ else:
+ ret[pkg_name] = t_nfo
return ret
diff --git a/salt/modules/dpkg_lowpkg.py b/salt/modules/dpkg_lowpkg.py
index eefd852c51..4d716c8772 100644
--- a/salt/modules/dpkg_lowpkg.py
+++ b/salt/modules/dpkg_lowpkg.py
@@ -234,6 +234,44 @@ def file_dict(*packages, **kwargs):
return {"errors": errors, "packages": ret}
+def _get_pkg_build_time(name):
+ """
+ Get package build time, if possible.
+
+ :param name:
+ :return:
+ """
+ iso_time = iso_time_t = None
+ changelog_dir = os.path.join("/usr/share/doc", name)
+ if os.path.exists(changelog_dir):
+ for fname in os.listdir(changelog_dir):
+ try:
+ iso_time_t = int(os.path.getmtime(os.path.join(changelog_dir, fname)))
+ iso_time = (
+ datetime.datetime.utcfromtimestamp(iso_time_t).isoformat() + "Z"
+ )
+ break
+ except OSError:
+ pass
+
+ # Packager doesn't care about Debian standards, therefore Plan B: brute-force it.
+ if not iso_time:
+ for pkg_f_path in __salt__["cmd.run"](
+ "dpkg-query -L {}".format(name)
+ ).splitlines():
+ if "changelog" in pkg_f_path.lower() and os.path.exists(pkg_f_path):
+ try:
+ iso_time_t = int(os.path.getmtime(pkg_f_path))
+ iso_time = (
+ datetime.datetime.utcfromtimestamp(iso_time_t).isoformat() + "Z"
+ )
+ break
+ except OSError:
+ pass
+
+ return iso_time, iso_time_t
+
+
def _get_pkg_info(*packages, **kwargs):
"""
Return list of package information. If 'packages' parameter is empty,
@@ -257,7 +295,7 @@ def _get_pkg_info(*packages, **kwargs):
cmd = (
"dpkg-query -W -f='package:" + bin_var + "\\n"
"revision:${binary:Revision}\\n"
- "architecture:${Architecture}\\n"
+ "arch:${Architecture}\\n"
"maintainer:${Maintainer}\\n"
"summary:${Summary}\\n"
"source:${source:Package}\\n"
@@ -299,10 +337,17 @@ def _get_pkg_info(*packages, **kwargs):
key, value = pkg_info_line.split(":", 1)
if value:
pkg_data[key] = value
- install_date = _get_pkg_install_time(pkg_data.get("package"))
- if install_date:
- pkg_data["install_date"] = install_date
- pkg_data["description"] = pkg_descr
+ install_date, install_date_t = _get_pkg_install_time(
+ pkg_data.get("package"), pkg_data.get("arch")
+ )
+ if install_date:
+ pkg_data["install_date"] = install_date
+ pkg_data["install_date_time_t"] = install_date_t # Unix ticks
+ build_date, build_date_t = _get_pkg_build_time(pkg_data.get("package"))
+ if build_date:
+ pkg_data["build_date"] = build_date
+ pkg_data["build_date_time_t"] = build_date_t
+ pkg_data["description"] = pkg_descr.split(":", 1)[-1]
ret.append(pkg_data)
return ret
@@ -327,24 +372,34 @@ def _get_pkg_license(pkg):
return ", ".join(sorted(licenses))
-def _get_pkg_install_time(pkg):
+def _get_pkg_install_time(pkg, arch):
"""
Return package install time, based on the /var/lib/dpkg/info/<package>.list
:return:
"""
- iso_time = None
+ iso_time = iso_time_t = None
+ loc_root = "/var/lib/dpkg/info"
if pkg is not None:
- location = "/var/lib/dpkg/info/{}.list".format(pkg)
- if os.path.exists(location):
- iso_time = (
- datetime.datetime.utcfromtimestamp(
- int(os.path.getmtime(location))
- ).isoformat()
- + "Z"
- )
+ locations = []
+ if arch is not None and arch != "all":
+ locations.append(os.path.join(loc_root, "{}:{}.list".format(pkg, arch)))
- return iso_time
+ locations.append(os.path.join(loc_root, "{}.list".format(pkg)))
+ for location in locations:
+ try:
+ iso_time_t = int(os.path.getmtime(location))
+ iso_time = (
+ datetime.datetime.utcfromtimestamp(iso_time_t).isoformat() + "Z"
+ )
+ break
+ except OSError:
+ pass
+
+ if iso_time is None:
+ log.debug('Unable to get package installation time for package "%s".', pkg)
+
+ return iso_time, iso_time_t
def _get_pkg_ds_avail():
@@ -394,6 +449,15 @@ def info(*packages, **kwargs):
.. versionadded:: 2016.11.3
+ attr
+ Comma-separated package attributes. If no 'attr' is specified, all available attributes returned.
+
+ Valid attributes are:
+ version, vendor, release, build_date, build_date_time_t, install_date, install_date_time_t,
+ build_host, group, source_rpm, arch, epoch, size, license, signature, packager, url, summary, description.
+
+ .. versionadded:: Neon
+
CLI Example:
.. code-block:: bash
@@ -408,6 +472,10 @@ def info(*packages, **kwargs):
kwargs = salt.utils.args.clean_kwargs(**kwargs)
failhard = kwargs.pop("failhard", True)
+ attr = kwargs.pop("attr", None) or None
+ if attr:
+ attr = attr.split(",")
+
if kwargs:
salt.utils.args.invalid_kwargs(kwargs)
@@ -435,6 +503,14 @@ def info(*packages, **kwargs):
lic = _get_pkg_license(pkg["package"])
if lic:
pkg["license"] = lic
- ret[pkg["package"]] = pkg
+
+ # Remove keys that aren't in attrs
+ pkg_name = pkg["package"]
+ if attr:
+ for k in list(pkg.keys())[:]:
+ if k not in attr:
+ del pkg[k]
+
+ ret[pkg_name] = pkg
return ret
diff --git a/tests/pytests/unit/modules/test_aptpkg.py b/tests/pytests/unit/modules/test_aptpkg.py
index b69402578a..4226957eeb 100644
--- a/tests/pytests/unit/modules/test_aptpkg.py
+++ b/tests/pytests/unit/modules/test_aptpkg.py
@@ -360,6 +360,58 @@ def test_info_installed(lowpkg_info_var):
assert len(aptpkg.info_installed()) == 1
+def test_info_installed_attr(lowpkg_info_var):
+ """
+ Test info_installed 'attr'.
+ This doesn't test 'attr' behaviour per se, since the underlying function is in dpkg.
+ The test should simply not raise exceptions for invalid parameter.
+
+ :return:
+ """
+ expected_pkg = {
+ "url": "http://www.gnu.org/software/wget/",
+ "packager": "Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>",
+ "name": "wget",
+ "install_date": "2016-08-30T22:20:15Z",
+ "description": "retrieves files from the web",
+ "version": "1.15-1ubuntu1.14.04.2",
+ "architecture": "amd64",
+ "group": "web",
+ "source": "wget",
+ }
+ mock = MagicMock(return_value=lowpkg_info_var)
+ with patch.dict(aptpkg.__salt__, {"lowpkg.info": mock}):
+ ret = aptpkg.info_installed("wget", attr="foo,bar")
+ assert ret["wget"] == expected_pkg
+
+
+def test_info_installed_all_versions(lowpkg_info_var):
+ """
+ Test info_installed 'all_versions'.
+ Since Debian won't return same name packages with the different names,
+ this should just return different structure, backward compatible with
+ the RPM equivalents.
+
+ :return:
+ """
+ expected_pkg = {
+ "url": "http://www.gnu.org/software/wget/",
+ "packager": "Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>",
+ "name": "wget",
+ "install_date": "2016-08-30T22:20:15Z",
+ "description": "retrieves files from the web",
+ "version": "1.15-1ubuntu1.14.04.2",
+ "architecture": "amd64",
+ "group": "web",
+ "source": "wget",
+ }
+ mock = MagicMock(return_value=lowpkg_info_var)
+ with patch.dict(aptpkg.__salt__, {"lowpkg.info": mock}):
+ ret = aptpkg.info_installed("wget", all_versions=True)
+ assert isinstance(ret, dict)
+ assert ret["wget"] == [expected_pkg]
+
+
def test_owner():
"""
Test - Return the name of the package that owns the file.
--
2.39.2

View File

@ -0,0 +1,80 @@
From 45b97042766e15a4336b141b40a03d68156771bc Mon Sep 17 00:00:00 2001
From: Marek Czernek <marek.czernek@suse.com>
Date: Thu, 14 Mar 2024 16:16:02 +0100
Subject: [PATCH] Decode oscap byte stream to string (bsc#1219001)
---
salt/modules/openscap.py | 5 +++--
tests/unit/modules/test_openscap.py | 10 +++++-----
2 files changed, 8 insertions(+), 7 deletions(-)
diff --git a/salt/modules/openscap.py b/salt/modules/openscap.py
index 216fd89eef..89712ae722 100644
--- a/salt/modules/openscap.py
+++ b/salt/modules/openscap.py
@@ -152,10 +152,11 @@ def xccdf_eval(xccdffile, ovalfiles=None, **kwargs):
if success:
tempdir = tempfile.mkdtemp()
proc = Popen(cmd_opts, stdout=PIPE, stderr=PIPE, cwd=tempdir)
- (stdoutdata, error) = proc.communicate()
+ (_, error) = proc.communicate()
+ error = error.decode('ascii', errors='ignore')
success = _OSCAP_EXIT_CODES_MAP.get(proc.returncode, False)
if proc.returncode < 0:
- error += "\nKilled by signal {}\n".format(proc.returncode).encode('ascii')
+ error += "\nKilled by signal {}\n".format(proc.returncode)
returncode = proc.returncode
if success:
__salt__["cp.push_dir"](tempdir)
diff --git a/tests/unit/modules/test_openscap.py b/tests/unit/modules/test_openscap.py
index 301c1869ec..6fbdfed7cf 100644
--- a/tests/unit/modules/test_openscap.py
+++ b/tests/unit/modules/test_openscap.py
@@ -218,7 +218,7 @@ class OpenscapTestCase(TestCase):
"salt.modules.openscap.Popen",
MagicMock(
return_value=Mock(
- **{"returncode": 0, "communicate.return_value": ("", "")}
+ **{"returncode": 0, "communicate.return_value": (bytes(0), bytes(0))}
)
),
):
@@ -269,7 +269,7 @@ class OpenscapTestCase(TestCase):
"salt.modules.openscap.Popen",
MagicMock(
return_value=Mock(
- **{"returncode": 0, "communicate.return_value": ("", "")}
+ **{"returncode": 0, "communicate.return_value": (bytes(0), bytes(0))}
)
),
):
@@ -323,7 +323,7 @@ class OpenscapTestCase(TestCase):
"salt.modules.openscap.Popen",
MagicMock(
return_value=Mock(
- **{"returncode": 2, "communicate.return_value": ("", "some error")}
+ **{"returncode": 2, "communicate.return_value": (bytes(0), bytes("some error", "UTF-8"))}
)
),
):
@@ -374,7 +374,7 @@ class OpenscapTestCase(TestCase):
"salt.modules.openscap.Popen",
MagicMock(
return_value=Mock(
- **{"returncode": 2, "communicate.return_value": ("", "some error")}
+ **{"returncode": 2, "communicate.return_value": (bytes(0), bytes("some error", "UTF-8"))}
)
),
):
@@ -423,7 +423,7 @@ class OpenscapTestCase(TestCase):
return_value=Mock(
**{
"returncode": 1,
- "communicate.return_value": ("", "evaluation error"),
+ "communicate.return_value": (bytes(0), bytes("evaluation error", "UTF-8")),
}
)
),
--
2.43.0

View File

@ -0,0 +1,39 @@
From f02e97df14e4927efbb5ddd3a2bbc5a650330b9e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 26 May 2023 16:50:51 +0100
Subject: [PATCH] Define __virtualname__ for transactional_update module
(#582)
This prevent problems with LazyLoader when importing this module,
which was wrongly exposing functions for this module under "state.*"
---
salt/modules/transactional_update.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/salt/modules/transactional_update.py b/salt/modules/transactional_update.py
index 6493966782..658ebccc6b 100644
--- a/salt/modules/transactional_update.py
+++ b/salt/modules/transactional_update.py
@@ -285,6 +285,8 @@ from salt.modules.state import _check_queue, _prior_running_states, _wait, runni
__func_alias__ = {"apply_": "apply"}
+__virtualname__ = "transactional_update"
+
log = logging.getLogger(__name__)
@@ -300,7 +302,7 @@ def __virtual__():
_prior_running_states, globals()
)
running = salt.utils.functools.namespaced_function(running, globals())
- return True
+ return __virtualname__
else:
return (False, "Module transactional_update requires a transactional system")
--
2.39.2

View File

@ -0,0 +1,101 @@
From 9942c488b1e74f2c6f187fcef3556fe53382bb4c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 13 Nov 2023 15:04:14 +0000
Subject: [PATCH] Dereference symlinks to set proper __cli opt
(bsc#1215963) (#611)
* Dereference symlinks to set proper __cli
* Add changelog entry
* Add unit tests to check path is expanded
---------
Co-authored-by: vzhestkov <vzhestkov@suse.com>
---
changelog/65435.fixed.md | 1 +
salt/config/__init__.py | 8 ++++++--
tests/pytests/unit/config/test_master_config.py | 13 +++++++++++++
tests/pytests/unit/config/test_minion_config.py | 13 +++++++++++++
4 files changed, 33 insertions(+), 2 deletions(-)
create mode 100644 changelog/65435.fixed.md
create mode 100644 tests/pytests/unit/config/test_master_config.py
create mode 100644 tests/pytests/unit/config/test_minion_config.py
diff --git a/changelog/65435.fixed.md b/changelog/65435.fixed.md
new file mode 100644
index 0000000000..5fa532891d
--- /dev/null
+++ b/changelog/65435.fixed.md
@@ -0,0 +1 @@
+Dereference symlinks to set proper __cli opt
diff --git a/salt/config/__init__.py b/salt/config/__init__.py
index 43182f3f92..d8258a4dbc 100644
--- a/salt/config/__init__.py
+++ b/salt/config/__init__.py
@@ -3747,7 +3747,9 @@ def apply_minion_config(
)
opts["fileserver_backend"][idx] = new_val
- opts["__cli"] = salt.utils.stringutils.to_unicode(os.path.basename(sys.argv[0]))
+ opts["__cli"] = salt.utils.stringutils.to_unicode(
+ os.path.basename(salt.utils.path.expand(sys.argv[0]))
+ )
# No ID provided. Will getfqdn save us?
using_ip_for_id = False
@@ -3949,7 +3951,9 @@ def apply_master_config(overrides=None, defaults=None):
)
opts["keep_acl_in_token"] = True
- opts["__cli"] = salt.utils.stringutils.to_unicode(os.path.basename(sys.argv[0]))
+ opts["__cli"] = salt.utils.stringutils.to_unicode(
+ os.path.basename(salt.utils.path.expand(sys.argv[0]))
+ )
if "environment" in opts:
if opts["saltenv"] is not None:
diff --git a/tests/pytests/unit/config/test_master_config.py b/tests/pytests/unit/config/test_master_config.py
new file mode 100644
index 0000000000..c9de8a7892
--- /dev/null
+++ b/tests/pytests/unit/config/test_master_config.py
@@ -0,0 +1,13 @@
+import salt.config
+from tests.support.mock import MagicMock, patch
+
+
+def test___cli_path_is_expanded():
+ defaults = salt.config.DEFAULT_MASTER_OPTS.copy()
+ overrides = {}
+ with patch(
+ "salt.utils.path.expand", MagicMock(return_value="/path/to/testcli")
+ ) as expand_mock:
+ opts = salt.config.apply_master_config(overrides, defaults)
+ assert expand_mock.called
+ assert opts["__cli"] == "testcli"
diff --git a/tests/pytests/unit/config/test_minion_config.py b/tests/pytests/unit/config/test_minion_config.py
new file mode 100644
index 0000000000..34aa84daa7
--- /dev/null
+++ b/tests/pytests/unit/config/test_minion_config.py
@@ -0,0 +1,13 @@
+import salt.config
+from tests.support.mock import MagicMock, patch
+
+
+def test___cli_path_is_expanded():
+ defaults = salt.config.DEFAULT_MINION_OPTS.copy()
+ overrides = {}
+ with patch(
+ "salt.utils.path.expand", MagicMock(return_value="/path/to/testcli")
+ ) as expand_mock:
+ opts = salt.config.apply_minion_config(overrides, defaults)
+ assert expand_mock.called
+ assert opts["__cli"] == "testcli"
--
2.42.0

View File

@ -0,0 +1,188 @@
From 05fbd376090c5d7f997c510db0abb62be54d6d40 Mon Sep 17 00:00:00 2001
From: Johannes Hahn <johannes.hahn@suse.com>
Date: Tue, 20 Feb 2024 15:38:08 +0100
Subject: [PATCH] Discover both *.yml and *.yaml playbooks (bsc#1211888)
Allow for 'playbook_extension' to be either a string or a tuple and
change the default behavior to discover both.
---
changelog/66048.changed.md | 1 +
salt/modules/ansiblegate.py | 46 +++++++++----------
.../pytests/unit/modules/test_ansiblegate.py | 3 ++
.../example_playbooks/playbook1.yaml | 5 ++
4 files changed, 30 insertions(+), 25 deletions(-)
create mode 100644 changelog/66048.changed.md
create mode 100644 tests/unit/files/playbooks/example_playbooks/playbook1.yaml
diff --git a/changelog/66048.changed.md b/changelog/66048.changed.md
new file mode 100644
index 0000000000..b042e0d313
--- /dev/null
+++ b/changelog/66048.changed.md
@@ -0,0 +1 @@
+Ansiblegate discover_playbooks was changed to find playbooks as either *.yml or *.yaml files
diff --git a/salt/modules/ansiblegate.py b/salt/modules/ansiblegate.py
index 2f60a7444f..920c374e5a 100644
--- a/salt/modules/ansiblegate.py
+++ b/salt/modules/ansiblegate.py
@@ -111,7 +111,7 @@ def __virtual__():
if proc.returncode != 0:
return (
False,
- "Failed to get the listing of ansible modules:\n{}".format(proc.stderr),
+ f"Failed to get the listing of ansible modules:\n{proc.stderr}",
)
module_funcs = dir(sys.modules[__name__])
@@ -240,7 +240,7 @@ def call(module, *args, **kwargs):
_kwargs = {k: v for (k, v) in kwargs.items() if not k.startswith("__pub")}
for key, value in _kwargs.items():
- module_args.append("{}={}".format(key, salt.utils.json.dumps(value)))
+ module_args.append(f"{key}={salt.utils.json.dumps(value)}")
with NamedTemporaryFile(mode="w") as inventory:
@@ -367,15 +367,15 @@ def playbooks(
if diff:
command.append("--diff")
if isinstance(extra_vars, dict):
- command.append("--extra-vars='{}'".format(json.dumps(extra_vars)))
+ command.append(f"--extra-vars='{json.dumps(extra_vars)}'")
elif isinstance(extra_vars, str) and extra_vars.startswith("@"):
- command.append("--extra-vars={}".format(extra_vars))
+ command.append(f"--extra-vars={extra_vars}")
if flush_cache:
command.append("--flush-cache")
if inventory:
- command.append("--inventory={}".format(inventory))
+ command.append(f"--inventory={inventory}")
if limit:
- command.append("--limit={}".format(limit))
+ command.append(f"--limit={limit}")
if list_hosts:
command.append("--list-hosts")
if list_tags:
@@ -383,25 +383,25 @@ def playbooks(
if list_tasks:
command.append("--list-tasks")
if module_path:
- command.append("--module-path={}".format(module_path))
+ command.append(f"--module-path={module_path}")
if skip_tags:
- command.append("--skip-tags={}".format(skip_tags))
+ command.append(f"--skip-tags={skip_tags}")
if start_at_task:
- command.append("--start-at-task={}".format(start_at_task))
+ command.append(f"--start-at-task={start_at_task}")
if syntax_check:
command.append("--syntax-check")
if tags:
- command.append("--tags={}".format(tags))
+ command.append(f"--tags={tags}")
if playbook_kwargs:
for key, value in playbook_kwargs.items():
key = key.replace("_", "-")
if value is True:
- command.append("--{}".format(key))
+ command.append(f"--{key}")
elif isinstance(value, str):
- command.append("--{}={}".format(key, value))
+ command.append(f"--{key}={value}")
elif isinstance(value, dict):
- command.append("--{}={}".format(key, json.dumps(value)))
- command.append("--forks={}".format(forks))
+ command.append(f"--{key}={json.dumps(value)}")
+ command.append(f"--forks={forks}")
cmd_kwargs = {
"env": {
"ANSIBLE_STDOUT_CALLBACK": "json",
@@ -502,7 +502,7 @@ def discover_playbooks(
List of paths to discover playbooks from.
:param playbook_extension:
- File extension of playbooks file to search for. Default: "yml"
+ File extension(s) of playbook files to search for, can be a string or tuple of strings. Default: (".yml", ".yaml")
:param hosts_filename:
Filename of custom playbook inventory to search for. Default: "hosts"
@@ -533,19 +533,17 @@ def discover_playbooks(
)
if not playbook_extension:
- playbook_extension = "yml"
+ playbook_extension = (".yml", ".yaml")
if not hosts_filename:
hosts_filename = "hosts"
if path:
if not os.path.isabs(path):
raise CommandExecutionError(
- "The given path is not an absolute path: {}".format(path)
+ f"The given path is not an absolute path: {path}"
)
if not os.path.isdir(path):
- raise CommandExecutionError(
- "The given path is not a directory: {}".format(path)
- )
+ raise CommandExecutionError(f"The given path is not a directory: {path}")
return {
path: _explore_path(path, playbook_extension, hosts_filename, syntax_check)
}
@@ -573,7 +571,7 @@ def _explore_path(path, playbook_extension, hosts_filename, syntax_check):
# Check files in the given path
for _f in os.listdir(path):
_path = os.path.join(path, _f)
- if os.path.isfile(_path) and _path.endswith("." + playbook_extension):
+ if os.path.isfile(_path) and _path.endswith(playbook_extension):
ret[_f] = {"fullpath": _path}
# Check for custom inventory file
if os.path.isfile(os.path.join(path, hosts_filename)):
@@ -584,9 +582,7 @@ def _explore_path(path, playbook_extension, hosts_filename, syntax_check):
# Check files in the 1st level of subdirectories
for _f2 in os.listdir(_path):
_path2 = os.path.join(_path, _f2)
- if os.path.isfile(_path2) and _path2.endswith(
- "." + playbook_extension
- ):
+ if os.path.isfile(_path2) and _path2.endswith(playbook_extension):
ret[os.path.join(_f, _f2)] = {"fullpath": _path2}
# Check for custom inventory file
if os.path.isfile(os.path.join(_path, hosts_filename)):
@@ -599,7 +595,7 @@ def _explore_path(path, playbook_extension, hosts_filename, syntax_check):
)
except Exception as exc:
raise CommandExecutionError(
- "There was an exception while discovering playbooks: {}".format(exc)
+ f"There was an exception while discovering playbooks: {exc}"
)
# Run syntax check validation
diff --git a/tests/pytests/unit/modules/test_ansiblegate.py b/tests/pytests/unit/modules/test_ansiblegate.py
index 6201809c22..272da721bf 100644
--- a/tests/pytests/unit/modules/test_ansiblegate.py
+++ b/tests/pytests/unit/modules/test_ansiblegate.py
@@ -198,6 +198,9 @@ def test_ansible_discover_playbooks_single_path():
assert ret[playbooks_dir]["playbook1.yml"] == {
"fullpath": os.path.join(playbooks_dir, "playbook1.yml")
}
+ assert ret[playbooks_dir]["playbook1.yaml"] == {
+ "fullpath": os.path.join(playbooks_dir, "playbook1.yaml")
+ }
assert ret[playbooks_dir]["example-playbook2/site.yml"] == {
"fullpath": os.path.join(playbooks_dir, "example-playbook2/site.yml"),
"custom_inventory": os.path.join(playbooks_dir, "example-playbook2/hosts"),
diff --git a/tests/unit/files/playbooks/example_playbooks/playbook1.yaml b/tests/unit/files/playbooks/example_playbooks/playbook1.yaml
new file mode 100644
index 0000000000..e258a101e1
--- /dev/null
+++ b/tests/unit/files/playbooks/example_playbooks/playbook1.yaml
@@ -0,0 +1,5 @@
+---
+- hosts: all
+ gather_facts: false
+ tasks:
+ - ping:
--
2.43.1

View File

@ -0,0 +1,130 @@
From c2a35c0c0aac093d0cc35181c1fda0162e22ac4c Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Mon, 8 Nov 2021 18:09:53 +0300
Subject: [PATCH] dnfnotify pkgset plugin implementation - 3002.2 (#450)
* dnfnotify pkgset plugin implementation
* Fix failing check
* Add error reporting if not possible to save cookie
* Try to create dir if not exists
* Show the exception message instead of file name
* Fix isort
---
scripts/suse/dnf/plugins/README.md | 21 +++++++++
scripts/suse/dnf/plugins/dnfnotify.conf | 2 +
scripts/suse/dnf/plugins/dnfnotify.py | 60 +++++++++++++++++++++++++
3 files changed, 83 insertions(+)
create mode 100644 scripts/suse/dnf/plugins/README.md
create mode 100644 scripts/suse/dnf/plugins/dnfnotify.conf
create mode 100644 scripts/suse/dnf/plugins/dnfnotify.py
diff --git a/scripts/suse/dnf/plugins/README.md b/scripts/suse/dnf/plugins/README.md
new file mode 100644
index 0000000000..b19428608e
--- /dev/null
+++ b/scripts/suse/dnf/plugins/README.md
@@ -0,0 +1,21 @@
+## What it is
+
+Plugin which provides a notification mechanism to Salt, if DNF is
+used outside of it.
+
+## Installation
+
+Configuration files are going to:
+
+ `/etc/dnf/plugins/[name].conf`
+
+Plugin itself goes to:
+
+ `%{python_sitelib}/dnf-plugins/[name].py`
+ The path to dnf-plugins directory is Python version dependant.
+
+## Permissions
+
+User: root
+Group: root
+Mode: 644
diff --git a/scripts/suse/dnf/plugins/dnfnotify.conf b/scripts/suse/dnf/plugins/dnfnotify.conf
new file mode 100644
index 0000000000..e7002aa3e9
--- /dev/null
+++ b/scripts/suse/dnf/plugins/dnfnotify.conf
@@ -0,0 +1,2 @@
+[main]
+enabled = 1
diff --git a/scripts/suse/dnf/plugins/dnfnotify.py b/scripts/suse/dnf/plugins/dnfnotify.py
new file mode 100644
index 0000000000..6e9df85f71
--- /dev/null
+++ b/scripts/suse/dnf/plugins/dnfnotify.py
@@ -0,0 +1,60 @@
+import hashlib
+import os
+
+import dnf
+from dnfpluginscore import _, logger
+
+
+class DnfNotifyPlugin(dnf.Plugin):
+ def __init__(self, base, cli):
+ super().__init__(base, cli)
+ self.base = base
+ self.cookie_file = "/var/cache/salt/minion/rpmdb.cookie"
+ if os.path.exists("/var/lib/rpm/rpmdb.sqlite"):
+ self.rpmdb_file = "/var/lib/rpm/rpmdb.sqlite"
+ else:
+ self.rpmdb_file = "/var/lib/rpm/Packages"
+
+ def transaction(self):
+ if "SALT_RUNNING" not in os.environ:
+ try:
+ ck_dir = os.path.dirname(self.cookie_file)
+ if not os.path.exists(ck_dir):
+ os.makedirs(ck_dir)
+ with open(self.cookie_file, "w") as ck_fh:
+ ck_fh.write(
+ "{chksum} {mtime}\n".format(
+ chksum=self._get_checksum(), mtime=self._get_mtime()
+ )
+ )
+ except OSError as e:
+ logger.error(_("Unable to save cookie file: %s"), e)
+
+ def _get_mtime(self):
+ """
+ Get the modified time of the RPM Database.
+
+ Returns:
+ Unix ticks
+ """
+ return (
+ os.path.exists(self.rpmdb_file)
+ and int(os.path.getmtime(self.rpmdb_file))
+ or 0
+ )
+
+ def _get_checksum(self):
+ """
+ Get the checksum of the RPM Database.
+
+ Returns:
+ hexdigest
+ """
+ digest = hashlib.sha256()
+ with open(self.rpmdb_file, "rb") as rpm_db_fh:
+ while True:
+ buff = rpm_db_fh.read(0x1000)
+ if not buff:
+ break
+ digest.update(buff)
+ return digest.hexdigest()
--
2.39.2

View File

@ -0,0 +1,254 @@
From 4021f938ed1b64acd47ccaefc111197a1118ee4f Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 15 May 2024 11:48:46 +0200
Subject: [PATCH] Do not call the async wrapper calls with the separate
thread
* Do not run method with the distinct thread
* Move test_asynchronous.py to pytests
---
salt/utils/asynchronous.py | 25 +----
tests/pytests/unit/utils/test_asynchronous.py | 92 +++++++++++++++++++
tests/unit/utils/test_asynchronous.py | 81 ----------------
3 files changed, 94 insertions(+), 104 deletions(-)
create mode 100644 tests/pytests/unit/utils/test_asynchronous.py
delete mode 100644 tests/unit/utils/test_asynchronous.py
diff --git a/salt/utils/asynchronous.py b/salt/utils/asynchronous.py
index 88596a4a20..55a50cbcbf 100644
--- a/salt/utils/asynchronous.py
+++ b/salt/utils/asynchronous.py
@@ -2,11 +2,8 @@
Helpers/utils for working with tornado asynchronous stuff
"""
-
import contextlib
import logging
-import sys
-import threading
import salt.ext.tornado.concurrent
import salt.ext.tornado.ioloop
@@ -111,30 +108,12 @@ class SyncWrapper:
def _wrap(self, key):
def wrap(*args, **kwargs):
- results = []
- thread = threading.Thread(
- target=self._target,
- args=(key, args, kwargs, results, self.io_loop),
+ return self.io_loop.run_sync(
+ lambda: getattr(self.obj, key)(*args, **kwargs)
)
- thread.start()
- thread.join()
- if results[0]:
- return results[1]
- else:
- exc_info = results[1]
- raise exc_info[1].with_traceback(exc_info[2])
return wrap
- def _target(self, key, args, kwargs, results, io_loop):
- try:
- result = io_loop.run_sync(lambda: getattr(self.obj, key)(*args, **kwargs))
- results.append(True)
- results.append(result)
- except Exception: # pylint: disable=broad-except
- results.append(False)
- results.append(sys.exc_info())
-
def __enter__(self):
return self
diff --git a/tests/pytests/unit/utils/test_asynchronous.py b/tests/pytests/unit/utils/test_asynchronous.py
new file mode 100644
index 0000000000..2b5613e2bf
--- /dev/null
+++ b/tests/pytests/unit/utils/test_asynchronous.py
@@ -0,0 +1,92 @@
+import tornado.gen
+import tornado.ioloop
+
+import salt.utils.asynchronous as asynchronous
+
+
+class HelperA:
+
+ async_methods = [
+ "sleep",
+ ]
+
+ def __init__(self, io_loop=None):
+ pass
+
+ @tornado.gen.coroutine
+ def sleep(self):
+ yield tornado.gen.sleep(0.1)
+ raise tornado.gen.Return(True)
+
+
+class HelperB:
+
+ async_methods = [
+ "sleep",
+ ]
+
+ def __init__(self, a=None, io_loop=None):
+ if a is None:
+ a = asynchronous.SyncWrapper(HelperA)
+ self.a = a
+
+ @tornado.gen.coroutine
+ def sleep(self):
+ yield tornado.gen.sleep(0.1)
+ self.a.sleep()
+ raise tornado.gen.Return(False)
+
+
+def test_helpers():
+ """
+ Test that the helper classes do what we expect within a regular asynchronous env
+ """
+ io_loop = tornado.ioloop.IOLoop(make_current=False)
+ ret = io_loop.run_sync(lambda: HelperA().sleep())
+ assert ret is True
+
+ ret = io_loop.run_sync(lambda: HelperB().sleep())
+ assert ret is False
+
+
+def test_basic_wrap():
+ """
+ Test that we can wrap an asynchronous caller.
+ """
+ sync = asynchronous.SyncWrapper(HelperA)
+ ret = sync.sleep()
+ assert ret is True
+
+
+def test_basic_wrap_series():
+ """
+ Test that we can wrap an asynchronous caller and call the method in series.
+ """
+ sync = asynchronous.SyncWrapper(HelperA)
+ ret = sync.sleep()
+ assert ret is True
+ ret = sync.sleep()
+ assert ret is True
+
+
+def test_double():
+ """
+ Test when the asynchronous wrapper object itself creates a wrap of another thing
+
+ This works fine since the second wrap is based on the first's IOLoop so we
+ don't have to worry about complex start/stop mechanics
+ """
+ sync = asynchronous.SyncWrapper(HelperB)
+ ret = sync.sleep()
+ assert ret is False
+
+
+def test_double_sameloop():
+ """
+ Test asynchronous wrappers initiated from the same IOLoop, to ensure that
+ we don't wire up both to the same IOLoop (since it causes MANY problems).
+ """
+ a = asynchronous.SyncWrapper(HelperA)
+ sync = asynchronous.SyncWrapper(HelperB, (a,))
+ ret = sync.sleep()
+ assert ret is False
diff --git a/tests/unit/utils/test_asynchronous.py b/tests/unit/utils/test_asynchronous.py
deleted file mode 100644
index e5bd974cb6..0000000000
--- a/tests/unit/utils/test_asynchronous.py
+++ /dev/null
@@ -1,81 +0,0 @@
-import salt.ext.tornado.gen
-import salt.ext.tornado.testing
-import salt.utils.asynchronous as asynchronous
-from salt.ext.tornado.testing import AsyncTestCase
-
-
-class HelperA:
-
- async_methods = [
- "sleep",
- ]
-
- def __init__(self, io_loop=None):
- pass
-
- @salt.ext.tornado.gen.coroutine
- def sleep(self):
- yield salt.ext.tornado.gen.sleep(0.1)
- raise salt.ext.tornado.gen.Return(True)
-
-
-class HelperB:
-
- async_methods = [
- "sleep",
- ]
-
- def __init__(self, a=None, io_loop=None):
- if a is None:
- a = asynchronous.SyncWrapper(HelperA)
- self.a = a
-
- @salt.ext.tornado.gen.coroutine
- def sleep(self):
- yield salt.ext.tornado.gen.sleep(0.1)
- self.a.sleep()
- raise salt.ext.tornado.gen.Return(False)
-
-
-class TestSyncWrapper(AsyncTestCase):
- @salt.ext.tornado.testing.gen_test
- def test_helpers(self):
- """
- Test that the helper classes do what we expect within a regular asynchronous env
- """
- ha = HelperA()
- ret = yield ha.sleep()
- self.assertTrue(ret)
-
- hb = HelperB()
- ret = yield hb.sleep()
- self.assertFalse(ret)
-
- def test_basic_wrap(self):
- """
- Test that we can wrap an asynchronous caller.
- """
- sync = asynchronous.SyncWrapper(HelperA)
- ret = sync.sleep()
- self.assertTrue(ret)
-
- def test_double(self):
- """
- Test when the asynchronous wrapper object itself creates a wrap of another thing
-
- This works fine since the second wrap is based on the first's IOLoop so we
- don't have to worry about complex start/stop mechanics
- """
- sync = asynchronous.SyncWrapper(HelperB)
- ret = sync.sleep()
- self.assertFalse(ret)
-
- def test_double_sameloop(self):
- """
- Test asynchronous wrappers initiated from the same IOLoop, to ensure that
- we don't wire up both to the same IOLoop (since it causes MANY problems).
- """
- a = asynchronous.SyncWrapper(HelperA)
- sync = asynchronous.SyncWrapper(HelperB, (a,))
- ret = sync.sleep()
- self.assertFalse(ret)
--
2.45.0

View File

@ -0,0 +1,155 @@
From da544d7ab09899717e57a02321928ceaf3c6465c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 22 Aug 2023 11:43:46 +0100
Subject: [PATCH] Do not fail on bad message pack message (bsc#1213441,
CVE-2023-20897) (#595)
* Do not fail on bad message pack message
Fix unit test after backporting to openSUSE/release/3006.0
* Better error message when inconsistent decoded payload
---------
Co-authored-by: Daniel A. Wozniak <dwozniak@vmware.com>
---
salt/channel/server.py | 10 +++
salt/transport/zeromq.py | 6 +-
tests/pytests/unit/transport/test_zeromq.py | 69 +++++++++++++++++++++
3 files changed, 84 insertions(+), 1 deletion(-)
diff --git a/salt/channel/server.py b/salt/channel/server.py
index a2117f2934..b6d51fef08 100644
--- a/salt/channel/server.py
+++ b/salt/channel/server.py
@@ -22,6 +22,7 @@ import salt.utils.minions
import salt.utils.platform
import salt.utils.stringutils
import salt.utils.verify
+from salt.exceptions import SaltDeserializationError
from salt.utils.cache import CacheCli
try:
@@ -252,6 +253,15 @@ class ReqServerChannel:
return False
def _decode_payload(self, payload):
+ # Sometimes msgpack deserialization of random bytes could be successful,
+ # so we need to ensure payload in good shape to process this function.
+ if (
+ not isinstance(payload, dict)
+ or "enc" not in payload
+ or "load" not in payload
+ ):
+ raise SaltDeserializationError("bad load received on socket!")
+
# we need to decrypt it
if payload["enc"] == "aes":
try:
diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py
index 3ec7f7726c..7cc6b9987f 100644
--- a/salt/transport/zeromq.py
+++ b/salt/transport/zeromq.py
@@ -428,7 +428,11 @@ class RequestServer(salt.transport.base.DaemonizedRequestServer):
@salt.ext.tornado.gen.coroutine
def handle_message(self, stream, payload):
- payload = self.decode_payload(payload)
+ try:
+ payload = self.decode_payload(payload)
+ except salt.exceptions.SaltDeserializationError:
+ self.stream.send(self.encode_payload({"msg": "bad load"}))
+ return
# XXX: Is header really needed?
reply = yield self.message_handler(payload)
self.stream.send(self.encode_payload(reply))
diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py
index 10bb4917b8..c7cbc53864 100644
--- a/tests/pytests/unit/transport/test_zeromq.py
+++ b/tests/pytests/unit/transport/test_zeromq.py
@@ -11,6 +11,7 @@ import threading
import time
import uuid
+import msgpack
import pytest
import salt.channel.client
@@ -1404,3 +1405,71 @@ async def test_req_chan_auth_v2_new_minion_without_master_pub(pki_dir, io_loop):
assert "sig" in ret
ret = client.auth.handle_signin_response(signin_payload, ret)
assert ret == "retry"
+
+
+async def test_req_server_garbage_request(io_loop):
+ """
+ Validate invalid msgpack messages will not raise exceptions in the
+ RequestServers's message handler.
+ """
+ opts = salt.config.master_config("")
+ request_server = salt.transport.zeromq.RequestServer(opts)
+
+ def message_handler(payload):
+ return payload
+
+ request_server.post_fork(message_handler, io_loop)
+
+ byts = msgpack.dumps({"foo": "bar"})
+ badbyts = byts[:3] + b"^M" + byts[3:]
+
+ valid_response = msgpack.dumps({"msg": "bad load"})
+
+ with MagicMock() as stream:
+ request_server.stream = stream
+
+ try:
+ await request_server.handle_message(stream, badbyts)
+ except Exception as exc: # pylint: disable=broad-except
+ pytest.fail("Exception was raised {}".format(exc))
+
+ request_server.stream.send.assert_called_once_with(valid_response)
+
+
+async def test_req_chan_bad_payload_to_decode(pki_dir, io_loop):
+ opts = {
+ "master_uri": "tcp://127.0.0.1:4506",
+ "interface": "127.0.0.1",
+ "ret_port": 4506,
+ "ipv6": False,
+ "sock_dir": ".",
+ "pki_dir": str(pki_dir.joinpath("minion")),
+ "id": "minion",
+ "__role": "minion",
+ "keysize": 4096,
+ "max_minions": 0,
+ "auto_accept": False,
+ "open_mode": False,
+ "key_pass": None,
+ "publish_port": 4505,
+ "auth_mode": 1,
+ "acceptance_wait_time": 3,
+ "acceptance_wait_time_max": 3,
+ }
+ SMaster.secrets["aes"] = {
+ "secret": multiprocessing.Array(
+ ctypes.c_char,
+ salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()),
+ ),
+ "reload": salt.crypt.Crypticle.generate_key_string,
+ }
+ master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master")))
+ master_opts["master_sign_pubkey"] = False
+ server = salt.channel.server.ReqServerChannel.factory(master_opts)
+
+ with pytest.raises(salt.exceptions.SaltDeserializationError):
+ server._decode_payload(None)
+ with pytest.raises(salt.exceptions.SaltDeserializationError):
+ server._decode_payload({})
+ with pytest.raises(salt.exceptions.SaltDeserializationError):
+ server._decode_payload(12345)
--
2.41.0

View File

@ -0,0 +1,46 @@
From 4060d4cd24ac0fbcf83c1521553921d76c070a57 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 21 Sep 2018 17:31:39 +0200
Subject: [PATCH] Do not load pip state if there is no 3rd party
dependencies
Safe import 3rd party dependency
---
salt/modules/pip.py | 13 ++++++++++++-
1 file changed, 12 insertions(+), 1 deletion(-)
diff --git a/salt/modules/pip.py b/salt/modules/pip.py
index c4de0c2984..a60bdca0bb 100644
--- a/salt/modules/pip.py
+++ b/salt/modules/pip.py
@@ -96,6 +96,12 @@ import salt.utils.url
import salt.utils.versions
from salt.exceptions import CommandExecutionError, CommandNotFoundError
+try:
+ import pkg_resources
+except ImportError:
+ pkg_resources = None
+
+
# This needs to be named logger so we don't shadow it in pip.install
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
@@ -114,7 +120,12 @@ def __virtual__():
entire filesystem. If it's not installed in a conventional location, the
user is required to provide the location of pip each time it is used.
"""
- return "pip"
+ if pkg_resources is None:
+ ret = False, 'Package dependency "pkg_resource" is missing'
+ else:
+ ret = "pip"
+
+ return ret
def _pip_bin_env(cwd, bin_env):
--
2.39.2

View File

@ -0,0 +1,39 @@
From da6adc6984f21c0d93afff0b0ff55d0eb0ee3e9f Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 17 Aug 2021 11:52:00 +0200
Subject: [PATCH] Don't use shell="/sbin/nologin" in requisites
Using shell="/sbin/nologin" in an onlyif/unless requisite does not
really make sense since the condition can't be run. shell=/sbin/nologin
is also a common argument, e.g. for user.present.
Fixes: bsc#1188259
---
salt/state.py | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/salt/state.py b/salt/state.py
index cb434a91e7..cda84a0fcb 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -986,9 +986,14 @@ class State:
cmd_opts[run_cmd_arg] = low_data.get(run_cmd_arg)
if "shell" in low_data and "shell" not in cmd_opts_exclude:
- cmd_opts["shell"] = low_data["shell"]
+ shell = low_data["shell"]
elif "shell" in self.opts["grains"]:
- cmd_opts["shell"] = self.opts["grains"].get("shell")
+ shell = self.opts["grains"].get("shell")
+ else:
+ shell = None
+ # /sbin/nologin always causes the onlyif / unless cmd to fail
+ if shell is not None and shell != "/sbin/nologin":
+ cmd_opts["shell"] = shell
if "onlyif" in low_data:
_ret = self._run_check_onlyif(low_data, cmd_opts)
--
2.39.2

View File

@ -0,0 +1,34 @@
From e7ef0b5a46cc69a9237033d8dc4dbc60c0802a20 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Mon, 31 Jan 2022 10:24:26 +0100
Subject: [PATCH] Drop serial from event.unpack in cli.batch_async
---
salt/cli/batch_async.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py
index 09aa85258b..1012ce37cc 100644
--- a/salt/cli/batch_async.py
+++ b/salt/cli/batch_async.py
@@ -9,7 +9,6 @@ import logging
import salt.client
import salt.ext.tornado
-import tornado
from salt.cli.batch import batch_get_eauth, batch_get_opts, get_bnum
log = logging.getLogger(__name__)
@@ -109,7 +108,7 @@ class BatchAsync:
if not self.event:
return
try:
- mtag, data = self.event.unpack(raw, self.event.serial)
+ mtag, data = self.event.unpack(raw)
for (pattern, op) in self.patterns:
if mtag.startswith(pattern[:-1]):
minion = data["id"]
--
2.39.2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,346 @@
From 5303cc612bcbdb1ec45ede397ca1e2ca12ba3bd3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 1 Dec 2023 10:59:30 +0000
Subject: [PATCH] Enable "KeepAlive" probes for Salt SSH executions
(bsc#1211649) (#610)
* Enable KeepAlive probes for Salt SSH connections (bsc#1211649)
* Add tests for Salt SSH keepalive options
* Add changelog file
* Make changes suggested by pre-commit
---
changelog/65488.added.md | 1 +
salt/client/ssh/__init__.py | 32 +++++++++---
salt/client/ssh/client.py | 13 ++++-
salt/client/ssh/shell.py | 12 +++++
salt/config/__init__.py | 6 +++
salt/utils/parsers.py | 19 +++++++
tests/pytests/unit/client/ssh/test_single.py | 55 ++++++++++++++++++++
tests/pytests/unit/client/ssh/test_ssh.py | 3 ++
8 files changed, 133 insertions(+), 8 deletions(-)
create mode 100644 changelog/65488.added.md
diff --git a/changelog/65488.added.md b/changelog/65488.added.md
new file mode 100644
index 0000000000..78476cec11
--- /dev/null
+++ b/changelog/65488.added.md
@@ -0,0 +1 @@
+Enable "KeepAlive" probes for Salt SSH executions
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index 1e143f9e30..1d8426b7c2 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -50,8 +50,8 @@ import salt.utils.thin
import salt.utils.url
import salt.utils.verify
from salt._logging import LOG_LEVELS
-from salt._logging.mixins import MultiprocessingStateMixin
from salt._logging.impl import LOG_LOCK
+from salt._logging.mixins import MultiprocessingStateMixin
from salt.template import compile_template
from salt.utils.process import Process
from salt.utils.zeromq import zmq
@@ -307,6 +307,18 @@ class SSH(MultiprocessingStateMixin):
"ssh_timeout", salt.config.DEFAULT_MASTER_OPTS["ssh_timeout"]
)
+ self.opts.get("timeout", salt.config.DEFAULT_MASTER_OPTS["timeout"]),
+ "keepalive": self.opts.get(
+ "ssh_keepalive",
+ salt.config.DEFAULT_MASTER_OPTS["ssh_keepalive"],
+ ),
+ "keepalive_interval": self.opts.get(
+ "ssh_keepalive_interval",
+ salt.config.DEFAULT_MASTER_OPTS["ssh_keepalive_interval"],
+ ),
+ "keepalive_count_max": self.opts.get(
+ "ssh_keepalive_count_max",
+ salt.config.DEFAULT_MASTER_OPTS["ssh_keepalive_count_max"],
+ ),
"sudo": self.opts.get(
"ssh_sudo", salt.config.DEFAULT_MASTER_OPTS["ssh_sudo"]
),
@@ -557,7 +569,7 @@ class SSH(MultiprocessingStateMixin):
mods=self.mods,
fsclient=self.fsclient,
thin=self.thin,
- **target
+ **target,
)
if salt.utils.path.which("ssh-copy-id"):
# we have ssh-copy-id, use it!
@@ -573,7 +585,7 @@ class SSH(MultiprocessingStateMixin):
mods=self.mods,
fsclient=self.fsclient,
thin=self.thin,
- **target
+ **target,
)
stdout, stderr, retcode = single.cmd_block()
try:
@@ -601,7 +613,7 @@ class SSH(MultiprocessingStateMixin):
fsclient=self.fsclient,
thin=self.thin,
mine=mine,
- **target
+ **target,
)
ret = {"id": single.id}
stdout, stderr, retcode = single.run()
@@ -1022,7 +1034,10 @@ class Single:
remote_port_forwards=None,
winrm=False,
ssh_options=None,
- **kwargs
+ keepalive=True,
+ keepalive_interval=60,
+ keepalive_count_max=3,
+ **kwargs,
):
# Get mine setting and mine_functions if defined in kwargs (from roster)
self.mine = mine
@@ -1081,6 +1096,9 @@ class Single:
"priv": priv,
"priv_passwd": priv_passwd,
"timeout": timeout,
+ "keepalive": keepalive,
+ "keepalive_interval": keepalive_interval,
+ "keepalive_count_max": keepalive_count_max,
"sudo": sudo,
"tty": tty,
"mods": self.mods,
@@ -1302,7 +1320,7 @@ class Single:
self.id,
fsclient=self.fsclient,
minion_opts=self.minion_opts,
- **self.target
+ **self.target,
)
opts_pkg = pre_wrapper["test.opts_pkg"]() # pylint: disable=E1102
@@ -1388,7 +1406,7 @@ class Single:
self.id,
fsclient=self.fsclient,
minion_opts=self.minion_opts,
- **self.target
+ **self.target,
)
wrapper.fsclient.opts["cachedir"] = opts["cachedir"]
self.wfuncs = salt.loader.ssh_wrapper(opts, wrapper, self.context)
diff --git a/salt/client/ssh/client.py b/salt/client/ssh/client.py
index 0b67598fc6..a00f5de423 100644
--- a/salt/client/ssh/client.py
+++ b/salt/client/ssh/client.py
@@ -52,6 +52,9 @@ class SSHClient:
("ssh_priv_passwd", str),
("ssh_identities_only", bool),
("ssh_remote_port_forwards", str),
+ ("ssh_keepalive", bool),
+ ("ssh_keepalive_interval", int),
+ ("ssh_keepalive_count_max", int),
("ssh_options", list),
("ssh_max_procs", int),
("ssh_askpass", bool),
@@ -108,7 +111,15 @@ class SSHClient:
return sane_kwargs
def _prep_ssh(
- self, tgt, fun, arg=(), timeout=None, tgt_type="glob", kwarg=None, context=None, **kwargs
+ self,
+ tgt,
+ fun,
+ arg=(),
+ timeout=None,
+ tgt_type="glob",
+ kwarg=None,
+ context=None,
+ **kwargs
):
"""
Prepare the arguments
diff --git a/salt/client/ssh/shell.py b/salt/client/ssh/shell.py
index bc1ad034df..182e2c19e3 100644
--- a/salt/client/ssh/shell.py
+++ b/salt/client/ssh/shell.py
@@ -85,6 +85,9 @@ class Shell:
remote_port_forwards=None,
winrm=False,
ssh_options=None,
+ keepalive=True,
+ keepalive_interval=None,
+ keepalive_count_max=None,
):
self.opts = opts
# ssh <ipv6>, but scp [<ipv6]:/path
@@ -95,6 +98,9 @@ class Shell:
self.priv = priv
self.priv_passwd = priv_passwd
self.timeout = timeout
+ self.keepalive = keepalive
+ self.keepalive_interval = keepalive_interval
+ self.keepalive_count_max = keepalive_count_max
self.sudo = sudo
self.tty = tty
self.mods = mods
@@ -130,6 +136,9 @@ class Shell:
if self.opts.get("_ssh_version", (0,)) > (4, 9):
options.append("GSSAPIAuthentication=no")
options.append("ConnectTimeout={}".format(self.timeout))
+ if self.keepalive:
+ options.append(f"ServerAliveInterval={self.keepalive_interval}")
+ options.append(f"ServerAliveCountMax={self.keepalive_count_max}")
if self.opts.get("ignore_host_keys"):
options.append("StrictHostKeyChecking=no")
if self.opts.get("no_host_keys"):
@@ -165,6 +174,9 @@ class Shell:
if self.opts["_ssh_version"] > (4, 9):
options.append("GSSAPIAuthentication=no")
options.append("ConnectTimeout={}".format(self.timeout))
+ if self.keepalive:
+ options.append(f"ServerAliveInterval={self.keepalive_interval}")
+ options.append(f"ServerAliveCountMax={self.keepalive_count_max}")
if self.opts.get("ignore_host_keys"):
options.append("StrictHostKeyChecking=no")
if self.opts.get("no_host_keys"):
diff --git a/salt/config/__init__.py b/salt/config/__init__.py
index d8258a4dbc..68f2b0f674 100644
--- a/salt/config/__init__.py
+++ b/salt/config/__init__.py
@@ -822,6 +822,9 @@ VALID_OPTS = immutabletypes.freeze(
"ssh_scan_ports": str,
"ssh_scan_timeout": float,
"ssh_identities_only": bool,
+ "ssh_keepalive": bool,
+ "ssh_keepalive_interval": int,
+ "ssh_keepalive_count_max": int,
"ssh_log_file": str,
"ssh_config_file": str,
"ssh_merge_pillar": bool,
@@ -1592,6 +1595,9 @@ DEFAULT_MASTER_OPTS = immutabletypes.freeze(
"ssh_scan_ports": "22",
"ssh_scan_timeout": 0.01,
"ssh_identities_only": False,
+ "ssh_keepalive": True,
+ "ssh_keepalive_interval": 60,
+ "ssh_keepalive_count_max": 3,
"ssh_log_file": os.path.join(salt.syspaths.LOGS_DIR, "ssh"),
"ssh_config_file": os.path.join(salt.syspaths.HOME_DIR, ".ssh", "config"),
"cluster_mode": False,
diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py
index dc125de7d7..6c7f9f2f66 100644
--- a/salt/utils/parsers.py
+++ b/salt/utils/parsers.py
@@ -3383,6 +3383,25 @@ class SaltSSHOptionParser(
"-R parameters."
),
)
+ ssh_group.add_option(
+ "--disable-keepalive",
+ default=True,
+ action="store_false",
+ dest="ssh_keepalive",
+ help=(
+ "Disable KeepAlive probes (ServerAliveInterval) for the SSH connection."
+ ),
+ )
+ ssh_group.add_option(
+ "--keepalive-interval",
+ dest="ssh_keepalive_interval",
+ help=("Define the value for ServerAliveInterval option."),
+ )
+ ssh_group.add_option(
+ "--keepalive-count-max",
+ dest="ssh_keepalive_count_max",
+ help=("Define the value for ServerAliveCountMax option."),
+ )
ssh_group.add_option(
"--ssh-option",
dest="ssh_options",
diff --git a/tests/pytests/unit/client/ssh/test_single.py b/tests/pytests/unit/client/ssh/test_single.py
index c88a1c2127..8d87da8700 100644
--- a/tests/pytests/unit/client/ssh/test_single.py
+++ b/tests/pytests/unit/client/ssh/test_single.py
@@ -63,6 +63,61 @@ def test_single_opts(opts, target):
**target,
)
+ assert single.shell._ssh_opts() == ""
+ expected_cmd = (
+ "ssh login1 "
+ "-o KbdInteractiveAuthentication=no -o "
+ "PasswordAuthentication=yes -o ConnectTimeout=65 -o ServerAliveInterval=60 "
+ "-o ServerAliveCountMax=3 -o Port=22 "
+ "-o IdentityFile=/etc/salt/pki/master/ssh/salt-ssh.rsa "
+ "-o User=root date +%s"
+ )
+ assert single.shell._cmd_str("date +%s") == expected_cmd
+
+
+def test_single_opts_custom_keepalive_options(opts, target):
+ """Sanity check for ssh.Single options with custom keepalive"""
+
+ single = ssh.Single(
+ opts,
+ opts["argv"],
+ "localhost",
+ mods={},
+ fsclient=None,
+ thin=salt.utils.thin.thin_path(opts["cachedir"]),
+ mine=False,
+ keepalive_interval=15,
+ keepalive_count_max=5,
+ **target,
+ )
+
+ assert single.shell._ssh_opts() == ""
+ expected_cmd = (
+ "ssh login1 "
+ "-o KbdInteractiveAuthentication=no -o "
+ "PasswordAuthentication=yes -o ConnectTimeout=65 -o ServerAliveInterval=15 "
+ "-o ServerAliveCountMax=5 -o Port=22 "
+ "-o IdentityFile=/etc/salt/pki/master/ssh/salt-ssh.rsa "
+ "-o User=root date +%s"
+ )
+ assert single.shell._cmd_str("date +%s") == expected_cmd
+
+
+def test_single_opts_disable_keepalive(opts, target):
+ """Sanity check for ssh.Single options with custom keepalive"""
+
+ single = ssh.Single(
+ opts,
+ opts["argv"],
+ "localhost",
+ mods={},
+ fsclient=None,
+ thin=salt.utils.thin.thin_path(opts["cachedir"]),
+ mine=False,
+ keepalive=False,
+ **target,
+ )
+
assert single.shell._ssh_opts() == ""
expected_cmd = (
"ssh login1 "
diff --git a/tests/pytests/unit/client/ssh/test_ssh.py b/tests/pytests/unit/client/ssh/test_ssh.py
index cece16026c..23223ba8ec 100644
--- a/tests/pytests/unit/client/ssh/test_ssh.py
+++ b/tests/pytests/unit/client/ssh/test_ssh.py
@@ -78,6 +78,9 @@ def roster():
("ssh_scan_ports", "test", True),
("ssh_scan_timeout", 1.0, True),
("ssh_timeout", 1, False),
+ ("ssh_keepalive", True, True),
+ ("ssh_keepalive_interval", 30, True),
+ ("ssh_keepalive_count_max", 3, True),
("ssh_log_file", "/tmp/test", True),
("raw_shell", True, True),
("refresh_cache", True, True),
--
2.42.0

View File

@ -0,0 +1,68 @@
From e9d52cb97d619a76355c5aa1d03b733c125c0f22 Mon Sep 17 00:00:00 2001
From: Maximilian Meister <mmeister@suse.de>
Date: Thu, 3 May 2018 15:52:23 +0200
Subject: [PATCH] enable passing a unix_socket for mysql returners
(bsc#1091371)
quick fix for:
https://bugzilla.suse.com/show_bug.cgi?id=1091371
the upstream patch will go through some bigger refactoring of
the mysql drivers to be cleaner
this patch should only be temporary and can be dropped again once
the refactor is done upstream
Signed-off-by: Maximilian Meister <mmeister@suse.de>
---
salt/returners/mysql.py | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/salt/returners/mysql.py b/salt/returners/mysql.py
index 67b44004ac..a220f11465 100644
--- a/salt/returners/mysql.py
+++ b/salt/returners/mysql.py
@@ -17,6 +17,7 @@ config. These are the defaults:
mysql.pass: 'salt'
mysql.db: 'salt'
mysql.port: 3306
+ mysql.unix_socket: '/tmp/mysql.sock'
SSL is optional. The defaults are set to None. If you do not want to use SSL,
either exclude these options or set them to None.
@@ -42,6 +43,7 @@ optional. The following ssl options are simply for illustration purposes:
alternative.mysql.ssl_ca: '/etc/pki/mysql/certs/localhost.pem'
alternative.mysql.ssl_cert: '/etc/pki/mysql/certs/localhost.crt'
alternative.mysql.ssl_key: '/etc/pki/mysql/certs/localhost.key'
+ alternative.mysql.unix_socket: '/tmp/mysql.sock'
Should you wish the returner data to be cleaned out every so often, set
`keep_jobs_seconds` to the number of hours for the jobs to live in the
@@ -197,6 +199,7 @@ def _get_options(ret=None):
"ssl_ca": None,
"ssl_cert": None,
"ssl_key": None,
+ "unix_socket": "/tmp/mysql.sock",
}
attrs = {
@@ -208,6 +211,7 @@ def _get_options(ret=None):
"ssl_ca": "ssl_ca",
"ssl_cert": "ssl_cert",
"ssl_key": "ssl_key",
+ "unix_socket": "unix_socket",
}
_options = salt.returners.get_returner_options(
@@ -266,6 +270,7 @@ def _get_serv(ret=None, commit=False):
db=_options.get("db"),
port=_options.get("port"),
ssl=ssl_options,
+ unix_socket=_options.get("unix_socket"),
)
try:
--
2.39.2

View File

@ -0,0 +1,425 @@
From 17452801e950b3f49a9ec7ef444e3d57862cd9bf Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 7 Jul 2021 15:41:48 +0100
Subject: [PATCH] Enhance openscap module: add "xccdf_eval" call (#386)
* Enhance openscap module: add xccdf_eval call
* Allow 'tailoring_file' and 'tailoring_id' parameters
* Fix wrong reference to subprocess.PIPE in openscap unit tests
* Add changes suggested by pre-commit
Co-authored-by: Michael Calmer <mc@suse.de>
Fix error handling in openscap module (bsc#1188647) (#409)
---
changelog/59756.added | 1 +
salt/modules/openscap.py | 116 +++++++++++++-
tests/unit/modules/test_openscap.py | 234 ++++++++++++++++++++++++++++
3 files changed, 350 insertions(+), 1 deletion(-)
create mode 100644 changelog/59756.added
diff --git a/changelog/59756.added b/changelog/59756.added
new file mode 100644
index 0000000000..a59fb21eef
--- /dev/null
+++ b/changelog/59756.added
@@ -0,0 +1 @@
+adding new call for openscap xccdf eval supporting new parameters
diff --git a/salt/modules/openscap.py b/salt/modules/openscap.py
index 770c8e7c04..216fd89eef 100644
--- a/salt/modules/openscap.py
+++ b/salt/modules/openscap.py
@@ -4,6 +4,7 @@ Module for OpenSCAP Management
"""
+import os.path
import shlex
import shutil
import tempfile
@@ -55,6 +56,117 @@ _OSCAP_EXIT_CODES_MAP = {
}
+def xccdf_eval(xccdffile, ovalfiles=None, **kwargs):
+ """
+ Run ``oscap xccdf eval`` commands on minions.
+ It uses cp.push_dir to upload the generated files to the salt master
+ in the master's minion files cachedir
+ (defaults to ``/var/cache/salt/master/minions/minion-id/files``)
+
+ It needs ``file_recv`` set to ``True`` in the master configuration file.
+
+ xccdffile
+ the path to the xccdf file to evaluate
+
+ ovalfiles
+ additional oval definition files
+
+ profile
+ the name of Profile to be evaluated
+
+ rule
+ the name of a single rule to be evaluated
+
+ oval_results
+ save OVAL results as well (True or False)
+
+ results
+ write XCCDF Results into given file
+
+ report
+ write HTML report into given file
+
+ fetch_remote_resources
+ download remote content referenced by XCCDF (True or False)
+
+ tailoring_file
+ use given XCCDF Tailoring file
+
+ tailoring_id
+ use given DS component as XCCDF Tailoring file
+
+ remediate
+ automatically execute XCCDF fix elements for failed rules.
+ Use of this option is always at your own risk. (True or False)
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' openscap.xccdf_eval /usr/share/openscap/scap-yast2sec-xccdf.xml profile=Default
+
+ """
+ success = True
+ error = None
+ upload_dir = None
+ returncode = None
+ if not ovalfiles:
+ ovalfiles = []
+
+ cmd_opts = ["oscap", "xccdf", "eval"]
+ if kwargs.get("oval_results"):
+ cmd_opts.append("--oval-results")
+ if "results" in kwargs:
+ cmd_opts.append("--results")
+ cmd_opts.append(kwargs["results"])
+ if "report" in kwargs:
+ cmd_opts.append("--report")
+ cmd_opts.append(kwargs["report"])
+ if "profile" in kwargs:
+ cmd_opts.append("--profile")
+ cmd_opts.append(kwargs["profile"])
+ if "rule" in kwargs:
+ cmd_opts.append("--rule")
+ cmd_opts.append(kwargs["rule"])
+ if "tailoring_file" in kwargs:
+ cmd_opts.append("--tailoring-file")
+ cmd_opts.append(kwargs["tailoring_file"])
+ if "tailoring_id" in kwargs:
+ cmd_opts.append("--tailoring-id")
+ cmd_opts.append(kwargs["tailoring_id"])
+ if kwargs.get("fetch_remote_resources"):
+ cmd_opts.append("--fetch-remote-resources")
+ if kwargs.get("remediate"):
+ cmd_opts.append("--remediate")
+ cmd_opts.append(xccdffile)
+ cmd_opts.extend(ovalfiles)
+
+ if not os.path.exists(xccdffile):
+ success = False
+ error = "XCCDF File '{}' does not exist".format(xccdffile)
+ for ofile in ovalfiles:
+ if success and not os.path.exists(ofile):
+ success = False
+ error = "Oval File '{}' does not exist".format(ofile)
+
+ if success:
+ tempdir = tempfile.mkdtemp()
+ proc = Popen(cmd_opts, stdout=PIPE, stderr=PIPE, cwd=tempdir)
+ (stdoutdata, error) = proc.communicate()
+ success = _OSCAP_EXIT_CODES_MAP.get(proc.returncode, False)
+ if proc.returncode < 0:
+ error += "\nKilled by signal {}\n".format(proc.returncode).encode('ascii')
+ returncode = proc.returncode
+ if success:
+ __salt__["cp.push_dir"](tempdir)
+ upload_dir = tempdir
+ shutil.rmtree(tempdir, ignore_errors=True)
+
+ return dict(
+ success=success, upload_dir=upload_dir, error=error, returncode=returncode
+ )
+
+
def xccdf(params):
"""
Run ``oscap xccdf`` commands on minions.
@@ -92,7 +204,9 @@ def xccdf(params):
tempdir = tempfile.mkdtemp()
proc = Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE, cwd=tempdir)
(stdoutdata, error) = proc.communicate()
- success = _OSCAP_EXIT_CODES_MAP[proc.returncode]
+ success = _OSCAP_EXIT_CODES_MAP.get(proc.returncode, False)
+ if proc.returncode < 0:
+ error += "\nKilled by signal {}\n".format(proc.returncode).encode('ascii')
returncode = proc.returncode
if success:
__salt__["cp.push_dir"](tempdir)
diff --git a/tests/unit/modules/test_openscap.py b/tests/unit/modules/test_openscap.py
index 045c37f7c9..301c1869ec 100644
--- a/tests/unit/modules/test_openscap.py
+++ b/tests/unit/modules/test_openscap.py
@@ -21,6 +21,7 @@ class OpenscapTestCase(TestCase):
"salt.modules.openscap.tempfile.mkdtemp",
Mock(return_value=self.random_temp_dir),
),
+ patch("salt.modules.openscap.os.path.exists", Mock(return_value=True)),
]
for patcher in patchers:
self.apply_patch(patcher)
@@ -211,3 +212,236 @@ class OpenscapTestCase(TestCase):
"returncode": None,
},
)
+
+ def test_new_openscap_xccdf_eval_success(self):
+ with patch(
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+ **{"returncode": 0, "communicate.return_value": ("", "")}
+ )
+ ),
+ ):
+ response = openscap.xccdf_eval(
+ self.policy_file,
+ profile="Default",
+ oval_results=True,
+ results="results.xml",
+ report="report.html",
+ )
+
+ self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1)
+ expected_cmd = [
+ "oscap",
+ "xccdf",
+ "eval",
+ "--oval-results",
+ "--results",
+ "results.xml",
+ "--report",
+ "report.html",
+ "--profile",
+ "Default",
+ self.policy_file,
+ ]
+ openscap.Popen.assert_called_once_with(
+ expected_cmd,
+ cwd=openscap.tempfile.mkdtemp.return_value,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ )
+ openscap.__salt__["cp.push_dir"].assert_called_once_with(
+ self.random_temp_dir
+ )
+ self.assertEqual(openscap.shutil.rmtree.call_count, 1)
+ self.assertEqual(
+ response,
+ {
+ "upload_dir": self.random_temp_dir,
+ "error": "",
+ "success": True,
+ "returncode": 0,
+ },
+ )
+
+ def test_new_openscap_xccdf_eval_success_with_extra_ovalfiles(self):
+ with patch(
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+ **{"returncode": 0, "communicate.return_value": ("", "")}
+ )
+ ),
+ ):
+ response = openscap.xccdf_eval(
+ self.policy_file,
+ ["/usr/share/xml/another-oval.xml", "/usr/share/xml/oval.xml"],
+ profile="Default",
+ oval_results=True,
+ results="results.xml",
+ report="report.html",
+ )
+
+ self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1)
+ expected_cmd = [
+ "oscap",
+ "xccdf",
+ "eval",
+ "--oval-results",
+ "--results",
+ "results.xml",
+ "--report",
+ "report.html",
+ "--profile",
+ "Default",
+ self.policy_file,
+ "/usr/share/xml/another-oval.xml",
+ "/usr/share/xml/oval.xml",
+ ]
+ openscap.Popen.assert_called_once_with(
+ expected_cmd,
+ cwd=openscap.tempfile.mkdtemp.return_value,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ )
+ openscap.__salt__["cp.push_dir"].assert_called_once_with(
+ self.random_temp_dir
+ )
+ self.assertEqual(openscap.shutil.rmtree.call_count, 1)
+ self.assertEqual(
+ response,
+ {
+ "upload_dir": self.random_temp_dir,
+ "error": "",
+ "success": True,
+ "returncode": 0,
+ },
+ )
+
+ def test_new_openscap_xccdf_eval_success_with_failing_rules(self):
+ with patch(
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+ **{"returncode": 2, "communicate.return_value": ("", "some error")}
+ )
+ ),
+ ):
+ response = openscap.xccdf_eval(
+ self.policy_file,
+ profile="Default",
+ oval_results=True,
+ results="results.xml",
+ report="report.html",
+ )
+
+ self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1)
+ expected_cmd = [
+ "oscap",
+ "xccdf",
+ "eval",
+ "--oval-results",
+ "--results",
+ "results.xml",
+ "--report",
+ "report.html",
+ "--profile",
+ "Default",
+ self.policy_file,
+ ]
+ openscap.Popen.assert_called_once_with(
+ expected_cmd,
+ cwd=openscap.tempfile.mkdtemp.return_value,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ )
+ openscap.__salt__["cp.push_dir"].assert_called_once_with(
+ self.random_temp_dir
+ )
+ self.assertEqual(openscap.shutil.rmtree.call_count, 1)
+ self.assertEqual(
+ response,
+ {
+ "upload_dir": self.random_temp_dir,
+ "error": "some error",
+ "success": True,
+ "returncode": 2,
+ },
+ )
+
+ def test_new_openscap_xccdf_eval_success_ignore_unknown_params(self):
+ with patch(
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+ **{"returncode": 2, "communicate.return_value": ("", "some error")}
+ )
+ ),
+ ):
+ response = openscap.xccdf_eval(
+ "/policy/file",
+ param="Default",
+ profile="Default",
+ oval_results=True,
+ results="results.xml",
+ report="report.html",
+ )
+
+ self.assertEqual(
+ response,
+ {
+ "upload_dir": self.random_temp_dir,
+ "error": "some error",
+ "success": True,
+ "returncode": 2,
+ },
+ )
+ expected_cmd = [
+ "oscap",
+ "xccdf",
+ "eval",
+ "--oval-results",
+ "--results",
+ "results.xml",
+ "--report",
+ "report.html",
+ "--profile",
+ "Default",
+ "/policy/file",
+ ]
+ openscap.Popen.assert_called_once_with(
+ expected_cmd,
+ cwd=openscap.tempfile.mkdtemp.return_value,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ )
+
+ def test_new_openscap_xccdf_eval_evaluation_error(self):
+ with patch(
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+ **{
+ "returncode": 1,
+ "communicate.return_value": ("", "evaluation error"),
+ }
+ )
+ ),
+ ):
+ response = openscap.xccdf_eval(
+ self.policy_file,
+ profile="Default",
+ oval_results=True,
+ results="results.xml",
+ report="report.html",
+ )
+
+ self.assertEqual(
+ response,
+ {
+ "upload_dir": None,
+ "error": "evaluation error",
+ "success": False,
+ "returncode": 1,
+ },
+ )
--
2.39.2

25
fix-bsc-1065792.patch Normal file
View File

@ -0,0 +1,25 @@
From 42a5e5d1a898d7b8bdb56a94decf525204ebccb8 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 14 Dec 2017 16:21:40 +0100
Subject: [PATCH] Fix bsc#1065792
---
salt/states/service.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/salt/states/service.py b/salt/states/service.py
index 93c7c4fb07..0d8a4efa03 100644
--- a/salt/states/service.py
+++ b/salt/states/service.py
@@ -78,6 +78,7 @@ def __virtual__():
Only make these states available if a service provider has been detected or
assigned for this minion
"""
+ __salt__._load_all()
if "service.start" in __salt__:
return __virtualname__
else:
--
2.39.2

View File

@ -0,0 +1,69 @@
From 3403a7391df785be31b6fbe401a8229c2007ac19 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 2 Oct 2023 10:44:05 +0100
Subject: [PATCH] Fix calculation of SLS context vars when trailing dots
on targetted sls/state (bsc#1213518) (#598)
* Fix calculation of SLS context vars when trailing dots on targetted state
* Add changelog file
---
changelog/63411.fixed.md | 1 +
salt/utils/templates.py | 5 +++--
tests/unit/utils/test_templates.py | 14 ++++++++++++++
3 files changed, 18 insertions(+), 2 deletions(-)
create mode 100644 changelog/63411.fixed.md
diff --git a/changelog/63411.fixed.md b/changelog/63411.fixed.md
new file mode 100644
index 0000000000..65340e3652
--- /dev/null
+++ b/changelog/63411.fixed.md
@@ -0,0 +1 @@
+Fix calculation of SLS context vars when trailing dots on targetted state
diff --git a/salt/utils/templates.py b/salt/utils/templates.py
index 4a8adf2a14..8639ea703e 100644
--- a/salt/utils/templates.py
+++ b/salt/utils/templates.py
@@ -113,8 +113,9 @@ def generate_sls_context(tmplpath, sls):
sls_context = {}
- # Normalize SLS as path.
- slspath = sls.replace(".", "/")
+ # Normalize SLS as path and remove possible trailing slashes
+ # to prevent matching issues and wrong vars calculation
+ slspath = sls.replace(".", "/").rstrip("/")
if tmplpath:
# Normalize template path
diff --git a/tests/unit/utils/test_templates.py b/tests/unit/utils/test_templates.py
index 4ba2f52d7b..264b4ae801 100644
--- a/tests/unit/utils/test_templates.py
+++ b/tests/unit/utils/test_templates.py
@@ -320,6 +320,20 @@ class WrapRenderTestCase(TestCase):
slspath="foo",
)
+ def test_generate_sls_context__one_level_init_implicit_with_trailing_dot(self):
+ """generate_sls_context - Basic one level with implicit init.sls with trailing dot"""
+ self._test_generated_sls_context(
+ "/tmp/foo/init.sls",
+ "foo.",
+ tplfile="foo/init.sls",
+ tpldir="foo",
+ tpldot="foo",
+ slsdotpath="foo",
+ slscolonpath="foo",
+ sls_path="foo",
+ slspath="foo",
+ )
+
def test_generate_sls_context__one_level_init_explicit(self):
"""generate_sls_context - Basic one level with explicit init.sls"""
self._test_generated_sls_context(
--
2.42.0

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,544 @@
From 5710bc3ff3887762182f8326bd74f40d3872a69f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 1 Feb 2024 11:50:16 +0000
Subject: [PATCH] Fix "CVE-2024-22231" and "CVE-2024-22232"
(bsc#1219430, bsc#1219431) (#621)
* Fix CVE-2024-22231 and CVE-2024-22232
* Add changelogs for CVE-2024-22231 and CVE-2024-22232
* Fix linter issue
* Add credit
* Fix wart in patch
* Clean up test fixtures
* Fix test on windows
* Update changelog file name
* Fix fileroots tests
---------
Co-authored-by: Daniel A. Wozniak <dwozniak@vmware.com>
---
changelog/565.security.md | 4 +
salt/fileserver/__init__.py | 9 +-
salt/fileserver/roots.py | 26 +++++
salt/master.py | 15 ++-
tests/pytests/unit/fileserver/test_roots.py | 58 +++++++--
tests/pytests/unit/test_fileserver.py | 123 ++++++++++++++++++++
tests/pytests/unit/test_master.py | 33 ++++++
tests/unit/test_fileserver.py | 79 -------------
8 files changed, 250 insertions(+), 97 deletions(-)
create mode 100644 changelog/565.security.md
create mode 100644 tests/pytests/unit/test_fileserver.py
delete mode 100644 tests/unit/test_fileserver.py
diff --git a/changelog/565.security.md b/changelog/565.security.md
new file mode 100644
index 00000000000..5d7ec8202ba
--- /dev/null
+++ b/changelog/565.security.md
@@ -0,0 +1,4 @@
+CVE-2024-22231 Prevent directory traversal when creating syndic cache directory on the master
+CVE-2024-22232 Prevent directory traversal attacks in the master's serve_file method.
+These vulerablities were discovered and reported by:
+Yudi Zhao(Huawei Nebula Security Lab),Chenwei Jiang(Huawei Nebula Security Lab)
diff --git a/salt/fileserver/__init__.py b/salt/fileserver/__init__.py
index 99f12387f91..4eca98d14a4 100644
--- a/salt/fileserver/__init__.py
+++ b/salt/fileserver/__init__.py
@@ -568,11 +568,6 @@ class Fileserver:
saltenv = salt.utils.stringutils.to_unicode(saltenv)
back = self.backends(back)
kwargs = {}
- fnd = {"path": "", "rel": ""}
- if os.path.isabs(path):
- return fnd
- if "../" in path:
- return fnd
if salt.utils.url.is_escaped(path):
# don't attempt to find URL query arguments in the path
path = salt.utils.url.unescape(path)
@@ -588,6 +583,10 @@ class Fileserver:
args = comp.split("=", 1)
kwargs[args[0]] = args[1]
+ fnd = {"path": "", "rel": ""}
+ if os.path.isabs(path) or "../" in path:
+ return fnd
+
if "env" in kwargs:
# "env" is not supported; Use "saltenv".
kwargs.pop("env")
diff --git a/salt/fileserver/roots.py b/salt/fileserver/roots.py
index a02b597c6f8..e2ea92029c3 100644
--- a/salt/fileserver/roots.py
+++ b/salt/fileserver/roots.py
@@ -27,6 +27,7 @@ import salt.utils.hashutils
import salt.utils.path
import salt.utils.platform
import salt.utils.stringutils
+import salt.utils.verify
import salt.utils.versions
log = logging.getLogger(__name__)
@@ -98,6 +99,11 @@ def find_file(path, saltenv="base", **kwargs):
if saltenv == "__env__":
root = root.replace("__env__", actual_saltenv)
full = os.path.join(root, path)
+
+ # Refuse to serve file that is not under the root.
+ if not salt.utils.verify.clean_path(root, full, subdir=True):
+ continue
+
if os.path.isfile(full) and not salt.fileserver.is_file_ignored(__opts__, full):
fnd["path"] = full
fnd["rel"] = path
@@ -128,6 +134,26 @@ def serve_file(load, fnd):
ret["dest"] = fnd["rel"]
gzip = load.get("gzip", None)
fpath = os.path.normpath(fnd["path"])
+
+ actual_saltenv = saltenv = load["saltenv"]
+ if saltenv not in __opts__["file_roots"]:
+ if "__env__" in __opts__["file_roots"]:
+ log.debug(
+ "salt environment '%s' maps to __env__ file_roots directory", saltenv
+ )
+ saltenv = "__env__"
+ else:
+ return fnd
+ file_in_root = False
+ for root in __opts__["file_roots"][saltenv]:
+ if saltenv == "__env__":
+ root = root.replace("__env__", actual_saltenv)
+ # Refuse to serve file that is not under the root.
+ if salt.utils.verify.clean_path(root, fpath, subdir=True):
+ file_in_root = True
+ if not file_in_root:
+ return ret
+
with salt.utils.files.fopen(fpath, "rb") as fp_:
fp_.seek(load["loc"])
data = fp_.read(__opts__["file_buffer_size"])
diff --git a/salt/master.py b/salt/master.py
index 3d2ba1e29de..425b4121481 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -1038,7 +1038,10 @@ class MWorker(salt.utils.process.SignalHandlingProcess):
"""
key = payload["enc"]
load = payload["load"]
- ret = {"aes": self._handle_aes, "clear": self._handle_clear}[key](load)
+ if key == "aes":
+ ret = self._handle_aes(load)
+ else:
+ ret = self._handle_clear(load)
raise salt.ext.tornado.gen.Return(ret)
def _post_stats(self, start, cmd):
@@ -1213,7 +1216,7 @@ class AESFuncs(TransportMethods):
"_dir_list",
"_symlink_list",
"_file_envs",
- "_ext_nodes", # To keep compatibility with old Salt minion versions
+ "_ext_nodes", # To keep compatibility with old Salt minion versions
)
def __init__(self, opts, context=None):
@@ -1746,10 +1749,16 @@ class AESFuncs(TransportMethods):
self.mminion.returners[fstr](load["jid"], load["load"])
# Register the syndic
+
+ # We are creating a path using user suplied input. Use the
+ # clean_path to prevent a directory traversal.
+ root = os.path.join(self.opts["cachedir"], "syndics")
syndic_cache_path = os.path.join(
self.opts["cachedir"], "syndics", load["id"]
)
- if not os.path.exists(syndic_cache_path):
+ if salt.utils.verify.clean_path(
+ root, syndic_cache_path
+ ) and not os.path.exists(syndic_cache_path):
path_name = os.path.split(syndic_cache_path)[0]
if not os.path.exists(path_name):
os.makedirs(path_name)
diff --git a/tests/pytests/unit/fileserver/test_roots.py b/tests/pytests/unit/fileserver/test_roots.py
index 96bceb0fd3d..c1660280bc5 100644
--- a/tests/pytests/unit/fileserver/test_roots.py
+++ b/tests/pytests/unit/fileserver/test_roots.py
@@ -5,6 +5,7 @@
import copy
import pathlib
import shutil
+import sys
import textwrap
import pytest
@@ -28,14 +29,14 @@ def unicode_dirname():
return "соль"
-@pytest.fixture(autouse=True)
+@pytest.fixture
def testfile(tmp_path):
fp = tmp_path / "testfile"
fp.write_text("This is a testfile")
return fp
-@pytest.fixture(autouse=True)
+@pytest.fixture
def tmp_state_tree(tmp_path, testfile, unicode_filename, unicode_dirname):
dirname = tmp_path / "roots_tmp_state_tree"
dirname.mkdir(parents=True, exist_ok=True)
@@ -54,11 +55,15 @@ def tmp_state_tree(tmp_path, testfile, unicode_filename, unicode_dirname):
@pytest.fixture
-def configure_loader_modules(tmp_state_tree, temp_salt_master):
- opts = temp_salt_master.config.copy()
+def testfilepath(tmp_state_tree, testfile):
+ return tmp_state_tree / testfile.name
+
+
+@pytest.fixture
+def configure_loader_modules(tmp_state_tree, master_opts):
overrides = {"file_roots": {"base": [str(tmp_state_tree)]}}
- opts.update(overrides)
- return {roots: {"__opts__": opts}}
+ master_opts.update(overrides)
+ return {roots: {"__opts__": master_opts}}
def test_file_list(unicode_filename):
@@ -75,17 +80,17 @@ def test_find_file(tmp_state_tree):
assert full_path_to_file == ret["path"]
-def test_serve_file(testfile):
+def test_serve_file(testfilepath):
with patch.dict(roots.__opts__, {"file_buffer_size": 262144}):
load = {
"saltenv": "base",
- "path": str(testfile),
+ "path": str(testfilepath),
"loc": 0,
}
- fnd = {"path": str(testfile), "rel": "testfile"}
+ fnd = {"path": str(testfilepath), "rel": "testfile"}
ret = roots.serve_file(load, fnd)
- with salt.utils.files.fopen(str(testfile), "rb") as fp_:
+ with salt.utils.files.fopen(str(testfilepath), "rb") as fp_:
data = fp_.read()
assert ret == {"data": data, "dest": "testfile"}
@@ -277,3 +282,36 @@ def test_update_mtime_map_unicode_error(tmp_path):
},
"backend": "roots",
}
+
+
+def test_find_file_not_in_root(tmp_state_tree):
+ """
+ Fileroots should never 'find' a file that is outside of it's root.
+ """
+ badfile = pathlib.Path(tmp_state_tree).parent / "bar"
+ badfile.write_text("Bad file")
+ badpath = f"../bar"
+ ret = roots.find_file(badpath)
+ assert ret == {"path": "", "rel": ""}
+ badpath = f"{tmp_state_tree / '..' / 'bar'}"
+ ret = roots.find_file(badpath)
+ assert ret == {"path": "", "rel": ""}
+
+
+def test_serve_file_not_in_root(tmp_state_tree):
+ """
+ Fileroots should never 'serve' a file that is outside of it's root.
+ """
+ badfile = pathlib.Path(tmp_state_tree).parent / "bar"
+ badfile.write_text("Bad file")
+ badpath = f"../bar"
+ load = {"path": "salt://|..\\bar", "saltenv": "base", "loc": 0}
+ fnd = {
+ "path": f"{tmp_state_tree / '..' / 'bar'}",
+ "rel": f"{pathlib.Path('..') / 'bar'}",
+ }
+ ret = roots.serve_file(load, fnd)
+ if "win" in sys.platform:
+ assert ret == {"data": "", "dest": "..\\bar"}
+ else:
+ assert ret == {"data": "", "dest": "../bar"}
diff --git a/tests/pytests/unit/test_fileserver.py b/tests/pytests/unit/test_fileserver.py
new file mode 100644
index 00000000000..8dd3ea0a27d
--- /dev/null
+++ b/tests/pytests/unit/test_fileserver.py
@@ -0,0 +1,123 @@
+import datetime
+import os
+import time
+
+import salt.fileserver
+import salt.utils.files
+
+
+def test_diff_with_diffent_keys():
+ """
+ Test that different maps are indeed reported different
+ """
+ map1 = {"file1": 1234}
+ map2 = {"file2": 1234}
+ assert salt.fileserver.diff_mtime_map(map1, map2) is True
+
+
+def test_diff_with_diffent_values():
+ """
+ Test that different maps are indeed reported different
+ """
+ map1 = {"file1": 12345}
+ map2 = {"file1": 1234}
+ assert salt.fileserver.diff_mtime_map(map1, map2) is True
+
+
+def test_whitelist():
+ opts = {
+ "fileserver_backend": ["roots", "git", "s3fs", "hgfs", "svn"],
+ "extension_modules": "",
+ }
+ fs = salt.fileserver.Fileserver(opts)
+ assert sorted(fs.servers.whitelist) == sorted(
+ ["git", "gitfs", "hg", "hgfs", "svn", "svnfs", "roots", "s3fs"]
+ ), fs.servers.whitelist
+
+
+def test_future_file_list_cache_file_ignored(tmp_path):
+ opts = {
+ "fileserver_backend": ["roots"],
+ "cachedir": tmp_path,
+ "extension_modules": "",
+ }
+
+ back_cachedir = os.path.join(tmp_path, "file_lists/roots")
+ os.makedirs(os.path.join(back_cachedir))
+
+ # Touch a couple files
+ for filename in ("base.p", "foo.txt"):
+ with salt.utils.files.fopen(os.path.join(back_cachedir, filename), "wb") as _f:
+ if filename == "base.p":
+ _f.write(b"\x80")
+
+ # Set modification time to file list cache file to 1 year in the future
+ now = datetime.datetime.utcnow()
+ future = now + datetime.timedelta(days=365)
+ mod_time = time.mktime(future.timetuple())
+ os.utime(os.path.join(back_cachedir, "base.p"), (mod_time, mod_time))
+
+ list_cache = os.path.join(back_cachedir, "base.p")
+ w_lock = os.path.join(back_cachedir, ".base.w")
+ ret = salt.fileserver.check_file_list_cache(opts, "files", list_cache, w_lock)
+ assert (
+ ret[1] is True
+ ), "Cache file list cache file is not refreshed when future modification time"
+
+
+def test_file_server_url_escape(tmp_path):
+ (tmp_path / "srv").mkdir()
+ (tmp_path / "srv" / "salt").mkdir()
+ (tmp_path / "foo").mkdir()
+ (tmp_path / "foo" / "bar").write_text("Bad file")
+ fileroot = str(tmp_path / "srv" / "salt")
+ badfile = str(tmp_path / "foo" / "bar")
+ opts = {
+ "fileserver_backend": ["roots"],
+ "extension_modules": "",
+ "optimization_order": [
+ 0,
+ ],
+ "file_roots": {
+ "base": [fileroot],
+ },
+ "file_ignore_regex": "",
+ "file_ignore_glob": "",
+ }
+ fs = salt.fileserver.Fileserver(opts)
+ ret = fs.find_file(
+ "salt://|..\\..\\..\\foo/bar",
+ "base",
+ )
+ assert ret == {"path": "", "rel": ""}
+
+
+def test_file_server_serve_url_escape(tmp_path):
+ (tmp_path / "srv").mkdir()
+ (tmp_path / "srv" / "salt").mkdir()
+ (tmp_path / "foo").mkdir()
+ (tmp_path / "foo" / "bar").write_text("Bad file")
+ fileroot = str(tmp_path / "srv" / "salt")
+ badfile = str(tmp_path / "foo" / "bar")
+ opts = {
+ "fileserver_backend": ["roots"],
+ "extension_modules": "",
+ "optimization_order": [
+ 0,
+ ],
+ "file_roots": {
+ "base": [fileroot],
+ },
+ "file_ignore_regex": "",
+ "file_ignore_glob": "",
+ "file_buffer_size": 2048,
+ }
+ fs = salt.fileserver.Fileserver(opts)
+ ret = fs.serve_file(
+ {
+ "path": "salt://|..\\..\\..\\foo/bar",
+ "saltenv": "base",
+ "loc": 0,
+ }
+ )
+ assert ret == {"data": "", "dest": ""}
diff --git a/tests/pytests/unit/test_master.py b/tests/pytests/unit/test_master.py
index 98c796912aa..d338307d1f8 100644
--- a/tests/pytests/unit/test_master.py
+++ b/tests/pytests/unit/test_master.py
@@ -1,3 +1,4 @@
+import pathlib
import time
import pytest
@@ -249,3 +250,35 @@ def test_mworker_pass_context():
loadler_pillars_mock.call_args_list[0][1].get("pack").get("__context__")
== test_context
)
+
+
+def test_syndic_return_cache_dir_creation(encrypted_requests):
+ """master's cachedir for a syndic will be created by AESFuncs._syndic_return method"""
+ cachedir = pathlib.Path(encrypted_requests.opts["cachedir"])
+ assert not (cachedir / "syndics").exists()
+ encrypted_requests._syndic_return(
+ {
+ "id": "mamajama",
+ "jid": "",
+ "return": {},
+ }
+ )
+ assert (cachedir / "syndics").exists()
+ assert (cachedir / "syndics" / "mamajama").exists()
+
+
+def test_syndic_return_cache_dir_creation_traversal(encrypted_requests):
+ """
+ master's AESFuncs._syndic_return method cachdir creation is not vulnerable to a directory traversal
+ """
+ cachedir = pathlib.Path(encrypted_requests.opts["cachedir"])
+ assert not (cachedir / "syndics").exists()
+ encrypted_requests._syndic_return(
+ {
+ "id": "../mamajama",
+ "jid": "",
+ "return": {},
+ }
+ )
+ assert not (cachedir / "syndics").exists()
+ assert not (cachedir / "mamajama").exists()
diff --git a/tests/unit/test_fileserver.py b/tests/unit/test_fileserver.py
deleted file mode 100644
index c290b16b7e4..00000000000
--- a/tests/unit/test_fileserver.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""
- :codeauthor: Joao Mesquita <jmesquita@sangoma.com>
-"""
-
-
-import datetime
-import os
-import time
-
-import salt.utils.files
-from salt import fileserver
-from tests.support.helpers import with_tempdir
-from tests.support.mixins import LoaderModuleMockMixin
-from tests.support.unit import TestCase
-
-
-class MapDiffTestCase(TestCase):
- def test_diff_with_diffent_keys(self):
- """
- Test that different maps are indeed reported different
- """
- map1 = {"file1": 1234}
- map2 = {"file2": 1234}
- assert fileserver.diff_mtime_map(map1, map2) is True
-
- def test_diff_with_diffent_values(self):
- """
- Test that different maps are indeed reported different
- """
- map1 = {"file1": 12345}
- map2 = {"file1": 1234}
- assert fileserver.diff_mtime_map(map1, map2) is True
-
-
-class VCSBackendWhitelistCase(TestCase, LoaderModuleMockMixin):
- def setup_loader_modules(self):
- return {fileserver: {}}
-
- def test_whitelist(self):
- opts = {
- "fileserver_backend": ["roots", "git", "s3fs", "hgfs", "svn"],
- "extension_modules": "",
- }
- fs = fileserver.Fileserver(opts)
- assert sorted(fs.servers.whitelist) == sorted(
- ["git", "gitfs", "hg", "hgfs", "svn", "svnfs", "roots", "s3fs"]
- ), fs.servers.whitelist
-
- @with_tempdir()
- def test_future_file_list_cache_file_ignored(self, cachedir):
- opts = {
- "fileserver_backend": ["roots"],
- "cachedir": cachedir,
- "extension_modules": "",
- }
-
- back_cachedir = os.path.join(cachedir, "file_lists/roots")
- os.makedirs(os.path.join(back_cachedir))
-
- # Touch a couple files
- for filename in ("base.p", "foo.txt"):
- with salt.utils.files.fopen(
- os.path.join(back_cachedir, filename), "wb"
- ) as _f:
- if filename == "base.p":
- _f.write(b"\x80")
-
- # Set modification time to file list cache file to 1 year in the future
- now = datetime.datetime.utcnow()
- future = now + datetime.timedelta(days=365)
- mod_time = time.mktime(future.timetuple())
- os.utime(os.path.join(back_cachedir, "base.p"), (mod_time, mod_time))
-
- list_cache = os.path.join(back_cachedir, "base.p")
- w_lock = os.path.join(back_cachedir, ".base.w")
- ret = fileserver.check_file_list_cache(opts, "files", list_cache, w_lock)
- assert (
- ret[1] is True
- ), "Cache file list cache file is not refreshed when future modification time"
--
2.43.0

View File

@ -0,0 +1,39 @@
From 7be26299bc7b6ec2065ab13857f088dc500ee882 Mon Sep 17 00:00:00 2001
From: Jochen Breuer <jbreuer@suse.de>
Date: Thu, 6 Sep 2018 17:15:18 +0200
Subject: [PATCH] Fix for SUSE Expanded Support detection
A SUSE ES installation has both, the centos-release and redhat-release
file. Since os_data only used the centos-release file to detect a
CentOS installation, this lead to SUSE ES being detected as CentOS.
This change also adds a check for redhat-release and then marks the
'lsb_distrib_id' as RedHat.
---
salt/grains/core.py | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 710c57f28f..1199ad274f 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -2279,6 +2279,15 @@ def _legacy_linux_distribution_data(grains, os_release, lsb_has_error):
log.trace("Parsing distrib info from /etc/centos-release")
# CentOS Linux
grains["lsb_distrib_id"] = "CentOS"
+ # Maybe CentOS Linux; could also be SUSE Expanded Support.
+ # SUSE ES has both, centos-release and redhat-release.
+ if os.path.isfile("/etc/redhat-release"):
+ with salt.utils.files.fopen("/etc/redhat-release") as ifile:
+ for line in ifile:
+ if "red hat enterprise linux server" in line.lower():
+ # This is a SUSE Expanded Support Rhel installation
+ grains["lsb_distrib_id"] = "RedHat"
+ break
with salt.utils.files.fopen("/etc/centos-release") as ifile:
for line in ifile:
# Need to pull out the version and codename
--
2.39.2

File diff suppressed because it is too large Load Diff

52
fix-issue-2068-test.patch Normal file
View File

@ -0,0 +1,52 @@
From b0e713d6946526b894837406c0760c262e4312a1 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 9 Jan 2019 16:08:19 +0100
Subject: [PATCH] Fix issue #2068 test
Skip injecting `__call__` if chunk is not dict.
This also fixes `integration/modules/test_state.py:StateModuleTest.test_exclude` that tests `include` and `exclude` state directives containing the only list of strings.
Minor update: more correct is-dict check.
---
salt/state.py | 9 ++++++---
1 file changed, 6 insertions(+), 3 deletions(-)
diff --git a/salt/state.py b/salt/state.py
index 8352a8defc..cb434a91e7 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -12,6 +12,7 @@ The data sent to the state calls is as follows:
"""
+import collections
import copy
import datetime
import fnmatch
@@ -3507,16 +3508,18 @@ class State:
"""
for chunk in high:
state = high[chunk]
+ if not isinstance(state, collections.Mapping):
+ continue
for state_ref in state:
needs_default = True
+ if not isinstance(state[state_ref], list):
+ continue
for argset in state[state_ref]:
if isinstance(argset, str):
needs_default = False
break
if needs_default:
- order = state[state_ref].pop(-1)
- state[state_ref].append("__call__")
- state[state_ref].append(order)
+ state[state_ref].insert(-1, "__call__")
def call_high(self, high, orchestration_jid=None):
"""
--
2.39.2

View File

@ -0,0 +1,30 @@
From 5158ebce305d961a2d2e3cb3f889b0cde593c4a0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Ond=C5=99ej=20Hole=C4=8Dek?= <oholecek@aaannz.eu>
Date: Mon, 10 May 2021 16:23:19 +0200
Subject: [PATCH] Fix missing minion returns in batch mode (#360)
Don't close pub if there are pending events, otherwise events will be lost
resulting in empty minion returns.
Co-authored-by: Denis V. Meltsaykin <dmeltsaykin@mirantis.com>
---
salt/client/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/client/__init__.py b/salt/client/__init__.py
index bcda56c9b4..b2617e4554 100644
--- a/salt/client/__init__.py
+++ b/salt/client/__init__.py
@@ -976,7 +976,7 @@ class LocalClient:
self._clean_up_subscriptions(pub_data["jid"])
finally:
- if not was_listening:
+ if not was_listening and not self.event.pending_events:
self.event.close_pub()
def cmd_full_return(
--
2.39.2

View File

@ -0,0 +1,62 @@
From aaf593d17f51a517e0adb6e9ec1c0d768ab5f855 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Mon, 2 Oct 2023 14:24:27 +0200
Subject: [PATCH] Fix optimization_order opt to prevent test fails
---
tests/pytests/unit/grains/test_core.py | 4 ++--
tests/pytests/unit/loader/test_loader.py | 2 +-
tests/pytests/unit/test_config.py | 2 +-
3 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/tests/pytests/unit/grains/test_core.py b/tests/pytests/unit/grains/test_core.py
index 993c723950..36545287b9 100644
--- a/tests/pytests/unit/grains/test_core.py
+++ b/tests/pytests/unit/grains/test_core.py
@@ -156,7 +156,7 @@ def test_network_grains_secondary_ip(tmp_path):
opts = {
"cachedir": str(cache_dir),
"extension_modules": str(extmods),
- "optimization_order": [0],
+ "optimization_order": [0, 1, 2],
}
with patch("salt.utils.network.interfaces", side_effect=[data]):
grains = salt.loader.grain_funcs(opts)
@@ -243,7 +243,7 @@ def test_network_grains_cache(tmp_path):
opts = {
"cachedir": str(cache_dir),
"extension_modules": str(extmods),
- "optimization_order": [0],
+ "optimization_order": [0, 1, 2],
}
with patch(
"salt.utils.network.interfaces", side_effect=[call_1, call_2]
diff --git a/tests/pytests/unit/loader/test_loader.py b/tests/pytests/unit/loader/test_loader.py
index f4a4b51a58..86348749db 100644
--- a/tests/pytests/unit/loader/test_loader.py
+++ b/tests/pytests/unit/loader/test_loader.py
@@ -57,7 +57,7 @@ def test_raw_mod_functions():
"Ensure functions loaded by raw_mod are LoaderFunc instances"
opts = {
"extension_modules": "",
- "optimization_order": [0],
+ "optimization_order": [0, 1, 2],
}
ret = salt.loader.raw_mod(opts, "grains", "get")
for k, v in ret.items():
diff --git a/tests/pytests/unit/test_config.py b/tests/pytests/unit/test_config.py
index cb343cb75e..76d5605360 100644
--- a/tests/pytests/unit/test_config.py
+++ b/tests/pytests/unit/test_config.py
@@ -16,7 +16,7 @@ def test_call_id_function(tmp_path):
"cachedir": str(cache_dir),
"extension_modules": str(extmods),
"grains": {"osfinger": "meh"},
- "optimization_order": [0],
+ "optimization_order": [0, 1, 2],
}
ret = salt.config.call_id_function(opts)
assert ret == "meh"
--
2.42.0

View File

@ -0,0 +1,50 @@
From 5f6488ab9211927c421e3d87a4ee84fe659ceb8b Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Mon, 27 Jun 2022 18:03:49 +0300
Subject: [PATCH] Fix ownership of salt thin directory when using the
Salt Bundle
---
salt/client/ssh/ssh_py_shim.py | 25 ++++++++++++++++++++++++-
1 file changed, 24 insertions(+), 1 deletion(-)
diff --git a/salt/client/ssh/ssh_py_shim.py b/salt/client/ssh/ssh_py_shim.py
index 293ea1b7fa..95171f7aea 100644
--- a/salt/client/ssh/ssh_py_shim.py
+++ b/salt/client/ssh/ssh_py_shim.py
@@ -292,7 +292,30 @@ def main(argv): # pylint: disable=W0613
os.makedirs(OPTIONS.saltdir)
cache_dir = os.path.join(OPTIONS.saltdir, "running_data", "var", "cache")
os.makedirs(os.path.join(cache_dir, "salt"))
- os.symlink("salt", os.path.relpath(os.path.join(cache_dir, "venv-salt-minion")))
+ os.symlink(
+ "salt", os.path.relpath(os.path.join(cache_dir, "venv-salt-minion"))
+ )
+ if os.path.exists(OPTIONS.saltdir) and (
+ "SUDO_UID" in os.environ or "SUDO_GID" in os.environ
+ ):
+ try:
+ sudo_uid = int(os.environ.get("SUDO_UID", -1))
+ except ValueError:
+ sudo_uid = -1
+ try:
+ sudo_gid = int(os.environ.get("SUDO_GID", -1))
+ except ValueError:
+ sudo_gid = -1
+ dstat = os.stat(OPTIONS.saltdir)
+ if (sudo_uid != -1 and dstat.st_uid != sudo_uid) or (
+ sudo_gid != -1 and dstat.st_gid != sudo_gid
+ ):
+ os.chown(OPTIONS.saltdir, sudo_uid, sudo_gid)
+ for dir_path, dir_names, file_names in os.walk(OPTIONS.saltdir):
+ for dir_name in dir_names:
+ os.lchown(os.path.join(dir_path, dir_name), sudo_uid, sudo_gid)
+ for file_name in file_names:
+ os.lchown(os.path.join(dir_path, file_name), sudo_uid, sudo_gid)
if venv_salt_call is None:
# Use Salt thin only if Salt Bundle (venv-salt-minion) is not available
--
2.39.2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,253 @@
From c25c8081ded775f3574b0bc999d809ce14701ba5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 3 Aug 2023 10:07:28 +0100
Subject: [PATCH] Fix regression: multiple values for keyword argument
'saltenv' (bsc#1212844) (#590)
* fix passing wrong keyword arguments to cp.cache_file in pkg.installed with sources
* Drop `**kwargs` usage and be explicit about the supported keyword arguments.
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
* Add regression test for https://github.com/saltstack/salt/issues/64118
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
* Add changelog file
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
---------
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
Co-authored-by: Massimiliano Torromeo <massimiliano.torromeo@gmail.com>
Co-authored-by: Pedro Algarvio <palgarvio@vmware.com>
---
changelog/64118.fixed.md | 1 +
salt/modules/win_pkg.py | 25 +++++++-----
salt/states/pkg.py | 4 +-
tests/pytests/unit/modules/test_win_pkg.py | 2 +-
tests/pytests/unit/states/test_pkg.py | 46 +++++++++++++++++++---
5 files changed, 62 insertions(+), 16 deletions(-)
create mode 100644 changelog/64118.fixed.md
diff --git a/changelog/64118.fixed.md b/changelog/64118.fixed.md
new file mode 100644
index 0000000000..e7251827e9
--- /dev/null
+++ b/changelog/64118.fixed.md
@@ -0,0 +1 @@
+Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg`
diff --git a/salt/modules/win_pkg.py b/salt/modules/win_pkg.py
index 3aa7c7919a..e80dd19322 100644
--- a/salt/modules/win_pkg.py
+++ b/salt/modules/win_pkg.py
@@ -1298,7 +1298,7 @@ def _repo_process_pkg_sls(filename, short_path_name, ret, successful_verbose):
successful_verbose[short_path_name] = []
-def _get_source_sum(source_hash, file_path, saltenv, **kwargs):
+def _get_source_sum(source_hash, file_path, saltenv, verify_ssl=True):
"""
Extract the hash sum, whether it is in a remote hash file, or just a string.
"""
@@ -1315,7 +1315,7 @@ def _get_source_sum(source_hash, file_path, saltenv, **kwargs):
# The source_hash is a file on a server
try:
cached_hash_file = __salt__["cp.cache_file"](
- source_hash, saltenv, verify_ssl=kwargs.get("verify_ssl", True)
+ source_hash, saltenv=saltenv, verify_ssl=verify_ssl
)
except MinionError as exc:
log.exception("Failed to cache %s", source_hash, exc_info=exc)
@@ -1671,7 +1671,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
try:
cached_file = __salt__["cp.cache_file"](
cache_file,
- saltenv,
+ saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
@@ -1686,7 +1686,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
try:
cached_file = __salt__["cp.cache_file"](
cache_file,
- saltenv,
+ saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
@@ -1706,7 +1706,9 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
# It's not cached. Cache it, mate.
try:
cached_pkg = __salt__["cp.cache_file"](
- installer, saltenv, verify_ssl=kwargs.get("verify_ssl", True)
+ installer,
+ saltenv=saltenv,
+ verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
msg = "Failed to cache {}".format(installer)
@@ -1730,7 +1732,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
try:
cached_pkg = __salt__["cp.cache_file"](
installer,
- saltenv,
+ saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
@@ -1754,7 +1756,12 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
# Compare the hash sums
source_hash = pkginfo[version_num].get("source_hash", False)
if source_hash:
- source_sum = _get_source_sum(source_hash, cached_pkg, saltenv, **kwargs)
+ source_sum = _get_source_sum(
+ source_hash,
+ cached_pkg,
+ saltenv=saltenv,
+ verify_ssl=kwargs.get("verify_ssl", True),
+ )
log.debug(
"pkg.install: Source %s hash: %s",
source_sum["hash_type"],
@@ -2126,7 +2133,7 @@ def remove(name=None, pkgs=None, **kwargs):
try:
cached_pkg = __salt__["cp.cache_file"](
uninstaller,
- saltenv,
+ saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
@@ -2150,7 +2157,7 @@ def remove(name=None, pkgs=None, **kwargs):
try:
cached_pkg = __salt__["cp.cache_file"](
uninstaller,
- saltenv,
+ saltenv=saltenv,
verify_ssl=kwargs.get("verify_ssl", True),
)
except MinionError as exc:
diff --git a/salt/states/pkg.py b/salt/states/pkg.py
index 12fbc87a1a..a605b23107 100644
--- a/salt/states/pkg.py
+++ b/salt/states/pkg.py
@@ -760,7 +760,9 @@ def _find_install_targets(
err = "Unable to cache {0}: {1}"
try:
cached_path = __salt__["cp.cache_file"](
- version_string, saltenv=kwargs["saltenv"], **kwargs
+ version_string,
+ saltenv=kwargs["saltenv"],
+ verify_ssl=kwargs.get("verify_ssl", True),
)
except CommandExecutionError as exc:
problems.append(err.format(version_string, exc))
diff --git a/tests/pytests/unit/modules/test_win_pkg.py b/tests/pytests/unit/modules/test_win_pkg.py
index 76234fb77e..6d435f00a5 100644
--- a/tests/pytests/unit/modules/test_win_pkg.py
+++ b/tests/pytests/unit/modules/test_win_pkg.py
@@ -262,7 +262,7 @@ def test_pkg_install_verify_ssl_false():
result = win_pkg.install(name="nsis", version="3.02", verify_ssl=False)
mock_cp.assert_called_once_with(
"http://download.sourceforge.net/project/nsis/NSIS%203/3.02/nsis-3.02-setup.exe",
- "base",
+ saltenv="base",
verify_ssl=False,
)
assert expected == result
diff --git a/tests/pytests/unit/states/test_pkg.py b/tests/pytests/unit/states/test_pkg.py
index b852f27b00..f58be11011 100644
--- a/tests/pytests/unit/states/test_pkg.py
+++ b/tests/pytests/unit/states/test_pkg.py
@@ -3,6 +3,7 @@ import logging
import pytest
import salt.modules.beacons as beaconmod
+import salt.modules.cp as cp
import salt.modules.pkg_resource as pkg_resource
import salt.modules.yumpkg as yumpkg
import salt.states.beacon as beaconstate
@@ -15,19 +16,28 @@ log = logging.getLogger(__name__)
@pytest.fixture
-def configure_loader_modules():
+def configure_loader_modules(minion_opts):
return {
+ cp: {
+ "__opts__": minion_opts,
+ },
pkg: {
"__env__": "base",
"__salt__": {},
"__grains__": {"os": "CentOS", "os_family": "RedHat"},
- "__opts__": {"test": False, "cachedir": ""},
+ "__opts__": minion_opts,
"__instance_id__": "",
"__low__": {},
"__utils__": {"state.gen_tag": state_utils.gen_tag},
},
- beaconstate: {"__salt__": {}, "__opts__": {}},
- beaconmod: {"__salt__": {}, "__opts__": {}},
+ beaconstate: {
+ "__salt__": {},
+ "__opts__": minion_opts,
+ },
+ beaconmod: {
+ "__salt__": {},
+ "__opts__": minion_opts,
+ },
pkg_resource: {
"__salt__": {},
"__grains__": {"os": "CentOS", "os_family": "RedHat"},
@@ -35,7 +45,7 @@ def configure_loader_modules():
yumpkg: {
"__salt__": {},
"__grains__": {"osarch": "x86_64", "osmajorrelease": 7},
- "__opts__": {},
+ "__opts__": minion_opts,
},
}
@@ -563,6 +573,32 @@ def test_installed_with_changes_test_true(list_pkgs):
assert ret["changes"] == expected
+def test_installed_with_sources(list_pkgs, tmp_path):
+ """
+ Test pkg.installed with passing `sources`
+ """
+
+ list_pkgs = MagicMock(return_value=list_pkgs)
+ pkg_source = tmp_path / "pkga-package-0.3.0.deb"
+
+ with patch.dict(
+ pkg.__salt__,
+ {
+ "cp.cache_file": cp.cache_file,
+ "pkg.list_pkgs": list_pkgs,
+ "pkg_resource.pack_sources": pkg_resource.pack_sources,
+ "lowpkg.bin_pkg_info": MagicMock(),
+ },
+ ), patch("salt.fileclient.get_file_client", return_value=MagicMock()):
+ try:
+ ret = pkg.installed("install-pkgd", sources=[{"pkga": str(pkg_source)}])
+ assert ret["result"] is False
+ except TypeError as exc:
+ if "got multiple values for keyword argument 'saltenv'" in str(exc):
+ pytest.fail(f"TypeError should have not been raised: {exc}")
+ raise exc from None
+
+
@pytest.mark.parametrize("action", ["removed", "purged"])
def test_removed_purged_with_changes_test_true(list_pkgs, action):
"""
--
2.41.0

View File

@ -0,0 +1,53 @@
From 42cfb51fa01e13fe043a62536ba37fd472bc2688 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Tue, 12 Apr 2022 10:08:17 +0300
Subject: [PATCH] Fix regression with depending client.ssh on psutil
(bsc#1197533)
---
salt/client/ssh/__init__.py | 14 ++++++++++++--
1 file changed, 12 insertions(+), 2 deletions(-)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index d5a679821e..b120e0002e 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -12,7 +12,6 @@ import hashlib
import logging
import multiprocessing
import os
-import psutil
import queue
import re
import shlex
@@ -420,6 +419,16 @@ class SSH(MultiprocessingStateMixin):
self.__parsed_rosters[self.ROSTER_UPDATE_FLAG] = False
return
+ def _pid_exists(self, pid):
+ """
+ Check if specified pid is alive
+ """
+ try:
+ os.kill(pid, 0)
+ except OSError:
+ return False
+ return True
+
def _update_roster(self, hostname=None, user=None):
"""
Update default flat roster with the passed in information.
@@ -639,7 +648,8 @@ class SSH(MultiprocessingStateMixin):
pid_running = (
False
if cached_session["pid"] == 0
- else cached_session.get("running", False) or psutil.pid_exists(cached_session["pid"])
+ else cached_session.get("running", False)
+ or self._pid_exists(cached_session["pid"])
)
if (
pid_running and prev_session_running < self.max_pid_wait
--
2.39.2

View File

@ -0,0 +1,128 @@
From 4dbd5534a39fbfaebad32a00d0e6c512d840b0fd Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Thu, 31 Mar 2022 13:39:57 +0300
Subject: [PATCH] Fix salt-ssh opts poisoning (bsc#1197637) - 3004 (#501)
* Fix salt-ssh opts poisoning
* Pass proper __opts__ to roster modules
* Remove redundant copy.deepcopy for opts from handle_routine
---
salt/client/ssh/__init__.py | 17 ++++++++++-------
salt/loader/__init__.py | 7 ++++++-
2 files changed, 16 insertions(+), 8 deletions(-)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index e6837df4e5..a527c03de6 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -338,7 +338,7 @@ class SSH(MultiprocessingStateMixin):
self.session_flock_file = os.path.join(
self.opts["cachedir"], "salt-ssh.session.lock"
)
- self.ssh_session_grace_time = int(self.opts.get("ssh_session_grace_time", 3))
+ self.ssh_session_grace_time = int(self.opts.get("ssh_session_grace_time", 1))
# __setstate__ and __getstate__ are only used on spawning platforms.
def __setstate__(self, state):
@@ -571,7 +571,6 @@ class SSH(MultiprocessingStateMixin):
"""
LOG_LOCK.release()
salt.loader.LOAD_LOCK.release()
- opts = copy.deepcopy(opts)
single = Single(
opts,
opts["argv"],
@@ -608,6 +607,7 @@ class SSH(MultiprocessingStateMixin):
Spin up the needed threads or processes and execute the subsequent
routines
"""
+ opts = copy.deepcopy(self.opts)
que = multiprocessing.Queue()
running = {}
targets_queue = deque(self.targets.keys())
@@ -618,7 +618,7 @@ class SSH(MultiprocessingStateMixin):
if not self.targets:
log.error("No matching targets found in roster.")
break
- if len(running) < self.opts.get("ssh_max_procs", 25) and not init:
+ if len(running) < opts.get("ssh_max_procs", 25) and not init:
if targets_queue:
host = targets_queue.popleft()
else:
@@ -636,7 +636,7 @@ class SSH(MultiprocessingStateMixin):
pid_running = (
False
if cached_session["pid"] == 0
- else psutil.pid_exists(cached_session["pid"])
+ else cached_session.get("running", False) or psutil.pid_exists(cached_session["pid"])
)
if (
pid_running and prev_session_running < self.max_pid_wait
@@ -651,9 +651,10 @@ class SSH(MultiprocessingStateMixin):
"salt-ssh/session",
host,
{
- "pid": 0,
+ "pid": os.getpid(),
"master_id": self.master_id,
"ts": time.time(),
+ "running": True,
},
)
for default in self.defaults:
@@ -681,7 +682,7 @@ class SSH(MultiprocessingStateMixin):
continue
args = (
que,
- self.opts,
+ opts,
host,
self.targets[host],
mine,
@@ -717,6 +718,7 @@ class SSH(MultiprocessingStateMixin):
"pid": routine.pid,
"master_id": self.master_id,
"ts": time.time(),
+ "running": True,
},
)
continue
@@ -768,12 +770,13 @@ class SSH(MultiprocessingStateMixin):
"pid": 0,
"master_id": self.master_id,
"ts": time.time(),
+ "running": False,
},
)
if len(rets) >= len(self.targets):
break
# Sleep when limit or all threads started
- if len(running) >= self.opts.get("ssh_max_procs", 25) or len(
+ if len(running) >= opts.get("ssh_max_procs", 25) or len(
self.targets
) >= len(running):
time.sleep(0.1)
diff --git a/salt/loader/__init__.py b/salt/loader/__init__.py
index 32f8a7702c..bbe4269839 100644
--- a/salt/loader/__init__.py
+++ b/salt/loader/__init__.py
@@ -757,7 +757,12 @@ def roster(opts, runner=None, utils=None, whitelist=None, loaded_base_name=None,
opts,
tag="roster",
whitelist=whitelist,
- pack={"__runner__": runner, "__utils__": utils, "__context__": context},
+ pack={
+ "__runner__": runner,
+ "__utils__": utils,
+ "__context__": context,
+ "__opts__": opts,
+ },
extra_module_dirs=utils.module_dirs if utils else None,
loaded_base_name=loaded_base_name,
)
--
2.39.2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,141 @@
From b4b2c59bfd479d59faeaf0e4d26d672828a519c8 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 25 Nov 2020 15:09:41 +0300
Subject: [PATCH] Fix salt.utils.stringutils.to_str calls to make it
working with numeric uid/gid
Fix upstream tests to work with 3006.
---
salt/modules/file.py | 22 ++++++++++++-------
salt/states/file.py | 11 ++++++++--
.../unit/modules/file/test_file_check.py | 10 ++++-----
3 files changed, 28 insertions(+), 15 deletions(-)
diff --git a/salt/modules/file.py b/salt/modules/file.py
index 4612d65511..55b236fe41 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -5127,14 +5127,20 @@ def check_perms(
is_dir = os.path.isdir(name)
is_link = os.path.islink(name)
+ def __safe_to_str(s):
+ try:
+ return salt.utils.stringutils.to_str(s)
+ except:
+ return salt.utils.stringutils.to_str(str(s))
+
# Check and make user/group/mode changes, then verify they were successful
if user:
if (
salt.utils.platform.is_windows() and not user_to_uid(user) == cur["uid"]
) or (
not salt.utils.platform.is_windows()
- and not salt.utils.stringutils.to_str(user) == cur["user"]
- and not salt.utils.stringutils.to_str(user) == cur["uid"]
+ and not __safe_to_str(user) == cur["user"]
+ and not user == cur["uid"]
):
perms["cuser"] = user
@@ -5143,8 +5149,8 @@ def check_perms(
salt.utils.platform.is_windows() and not group_to_gid(group) == cur["gid"]
) or (
not salt.utils.platform.is_windows()
- and not salt.utils.stringutils.to_str(group) == cur["group"]
- and not salt.utils.stringutils.to_str(group) == cur["gid"]
+ and not __safe_to_str(group) == cur["group"]
+ and not group == cur["gid"]
):
perms["cgroup"] = group
@@ -5188,8 +5194,8 @@ def check_perms(
salt.utils.platform.is_windows() and not user_to_uid(user) == post["uid"]
) or (
not salt.utils.platform.is_windows()
- and not salt.utils.stringutils.to_str(user) == post["user"]
- and not salt.utils.stringutils.to_str(user) == post["uid"]
+ and not __safe_to_str(user) == post["user"]
+ and not user == post["uid"]
):
if __opts__["test"] is True:
ret["changes"]["user"] = user
@@ -5204,8 +5210,8 @@ def check_perms(
salt.utils.platform.is_windows() and not group_to_gid(group) == post["gid"]
) or (
not salt.utils.platform.is_windows()
- and not salt.utils.stringutils.to_str(group) == post["group"]
- and not salt.utils.stringutils.to_str(group) == post["gid"]
+ and not __safe_to_str(group) == post["group"]
+ and not group == post["gid"]
):
if __opts__["test"] is True:
ret["changes"]["group"] = group
diff --git a/salt/states/file.py b/salt/states/file.py
index 024e5e34ce..9630ff7096 100644
--- a/salt/states/file.py
+++ b/salt/states/file.py
@@ -864,15 +864,22 @@ def _check_dir_meta(name, user, group, mode, follow_symlinks=False):
if not stats:
changes["directory"] = "new"
return changes
+
+ def __safe_to_str(s):
+ try:
+ return salt.utils.stringutils.to_str(s)
+ except:
+ return salt.utils.stringutils.to_str(str(s))
+
if (
user is not None
- and salt.utils.stringutils.to_str(user) != stats["user"]
+ and __safe_to_str(user) != stats["user"]
and user != stats.get("uid")
):
changes["user"] = user
if (
group is not None
- and salt.utils.stringutils.to_str(group) != stats["group"]
+ and __safe_to_str(group) != stats["group"]
and group != stats.get("gid")
):
changes["group"] = group
diff --git a/tests/pytests/unit/modules/file/test_file_check.py b/tests/pytests/unit/modules/file/test_file_check.py
index ce86acd7fc..2294e6760b 100644
--- a/tests/pytests/unit/modules/file/test_file_check.py
+++ b/tests/pytests/unit/modules/file/test_file_check.py
@@ -17,7 +17,7 @@ def configure_loader_modules():
return {
filemod: {
"__context__": {},
- "__opts__": {"test": False},
+ "__opts__": {"test": True},
}
}
@@ -172,7 +172,7 @@ def test_check_managed_changes_follow_symlinks(a_link, tfile):
),
# no user/group changes needed by id
(
- {"user": 3001, "group": 4001},
+ {"user": 2001, "group": 1001},
{},
),
],
@@ -184,9 +184,9 @@ def test_check_perms_user_group_name_and_id(input, expected):
stat_out = {
"user": "luser",
"group": "lgroup",
- "uid": 3001,
- "gid": 4001,
- "mode": "123",
+ "uid": 2001,
+ "gid": 1001,
+ "mode": "0123",
}
patch_stats = patch(
--
2.39.2

View File

@ -0,0 +1,118 @@
From 38de9af6bd243d35464713e0ee790255d3b40a7e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 23 Jun 2023 13:02:51 +0100
Subject: [PATCH] Fix some issues detected in "salt-support" CLI, module
and tests (bsc#1211591) (#580)
* saltsupport: avoid debug traceback due missing import
* Use yaml and json wrappers provides by Salt utils
* Remove unnecessary call to deprecated setup_logfile_logger
* Move unittest saltsupport tests to proper place
* Fix test assertion error due wrong capturing of message
---
salt/cli/support/__init__.py | 4 ++--
salt/cli/support/collector.py | 6 ++----
tests/{pytests => }/unit/cli/test_support.py | 0
tests/unit/modules/test_saltsupport.py | 6 +++---
4 files changed, 7 insertions(+), 9 deletions(-)
rename tests/{pytests => }/unit/cli/test_support.py (100%)
diff --git a/salt/cli/support/__init__.py b/salt/cli/support/__init__.py
index 59c2609e07..0a7da72e93 100644
--- a/salt/cli/support/__init__.py
+++ b/salt/cli/support/__init__.py
@@ -6,7 +6,7 @@ import os
import jinja2
import salt.exceptions
-import yaml
+import salt.utils.yaml
log = logging.getLogger(__name__)
@@ -48,7 +48,7 @@ def get_profile(profile, caller, runner):
try:
rendered_template = _render_profile(profile_path, caller, runner)
log.debug("\n{d}\n{t}\n{d}\n".format(d="-" * 80, t=rendered_template))
- data.update(yaml.load(rendered_template))
+ data.update(salt.utils.yaml.load(rendered_template))
except Exception as ex:
log.debug(ex, exc_info=True)
raise salt.exceptions.SaltException(
diff --git a/salt/cli/support/collector.py b/salt/cli/support/collector.py
index 1879cc5220..0ba987580c 100644
--- a/salt/cli/support/collector.py
+++ b/salt/cli/support/collector.py
@@ -1,6 +1,5 @@
import builtins as exceptions
import copy
-import json
import logging
import os
import sys
@@ -16,10 +15,10 @@ import salt.cli.support.intfunc
import salt.cli.support.localrunner
import salt.defaults.exitcodes
import salt.exceptions
-import salt.ext.six as six
import salt.output.table_out
import salt.runner
import salt.utils.files
+import salt.utils.json
import salt.utils.parsers
import salt.utils.platform
import salt.utils.process
@@ -169,7 +168,7 @@ class SupportDataCollector:
content = None
if content is None:
- data = json.loads(json.dumps(data))
+ data = salt.utils.json.loads(salt.utils.json.dumps(data))
if isinstance(data, dict) and data.get("return"):
data = data.get("return")
content = yaml.safe_dump(data, default_flow_style=False, indent=4)
@@ -506,7 +505,6 @@ class SaltSupport(salt.utils.parsers.SaltSupportOptionParser):
self.out.error(ex)
else:
if self.config["log_level"] not in ("quiet",):
- self.setup_logfile_logger()
salt.utils.verify.verify_log(self.config)
salt.cli.support.log = log # Pass update logger so trace is available
diff --git a/tests/pytests/unit/cli/test_support.py b/tests/unit/cli/test_support.py
similarity index 100%
rename from tests/pytests/unit/cli/test_support.py
rename to tests/unit/cli/test_support.py
diff --git a/tests/unit/modules/test_saltsupport.py b/tests/unit/modules/test_saltsupport.py
index 4ef04246b9..2afdd69b3e 100644
--- a/tests/unit/modules/test_saltsupport.py
+++ b/tests/unit/modules/test_saltsupport.py
@@ -251,8 +251,8 @@ professor: Farnsworth
with pytest.raises(salt.exceptions.SaltInvocationError) as err:
support.sync("group-name")
assert (
- ' Support archive "/mnt/storage/three-support-222-222.bz2" was not found'
- in str(err)
+ 'Support archive "/mnt/storage/three-support-222-222.bz2" was not found'
+ in str(err.value)
)
@patch("tempfile.mkstemp", MagicMock(return_value=(0, "dummy")))
@@ -274,7 +274,7 @@ professor: Farnsworth
with pytest.raises(salt.exceptions.SaltInvocationError) as err:
support.sync("group-name", name="lost.bz2")
- assert ' Support archive "lost.bz2" was not found' in str(err)
+ assert 'Support archive "lost.bz2" was not found' in str(err.value)
@patch("tempfile.mkstemp", MagicMock(return_value=(0, "dummy")))
@patch("os.path.exists", MagicMock(return_value=False))
--
2.41.0

View File

@ -0,0 +1,243 @@
From 4555f215614c2f2d5c4b5c376264df9b3f23a55b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 18 Jun 2024 15:55:31 +0100
Subject: [PATCH] Fix "status.diskusage" and exclude some tests to run
when testing Salt Bundle (#659)
* Show warning instead of crashing when stats cannot be fetched
* Skip tests that are not compatible with Salt Bundle
* test_syndic_eauth: do not produce error if docker service is not running
* test_cmdmod: assert properly in case of DeprecationsWarnings
* Include path as part of output in case of errors
Co-authored-by: Marek Czernek <marek.czernek@suse.com>
---------
Co-authored-by: Marek Czernek <marek.czernek@suse.com>
---
salt/modules/status.py | 14 +++++++++-----
tests/integration/modules/test_pip.py | 5 +++++
tests/integration/ssh/test_state.py | 5 +++++
tests/pytests/functional/modules/test_pip.py | 4 ++++
.../functional/modules/test_virtualenv_mod.py | 5 +++++
tests/pytests/functional/states/test_pip_state.py | 4 ++++
tests/pytests/integration/cli/test_syndic_eauth.py | 3 +++
tests/pytests/integration/modules/test_cmdmod.py | 4 +++-
.../pytests/integration/netapi/test_ssh_client.py | 6 ++++++
tests/pytests/integration/ssh/conftest.py | 9 +++++++++
tests/unit/utils/test_thin.py | 4 ++++
11 files changed, 57 insertions(+), 6 deletions(-)
create mode 100644 tests/pytests/integration/ssh/conftest.py
diff --git a/salt/modules/status.py b/salt/modules/status.py
index 33e5d7b8df5..8d6241a9dce 100644
--- a/salt/modules/status.py
+++ b/salt/modules/status.py
@@ -1053,11 +1053,15 @@ def diskusage(*args):
ret = {}
for path in selected:
if os.path.exists(path):
- fsstats = os.statvfs(path)
- blksz = fsstats.f_bsize
- available = fsstats.f_bavail * blksz
- total = fsstats.f_blocks * blksz
- ret[path] = {"available": available, "total": total}
+ try:
+ fsstats = os.statvfs(path)
+ blksz = fsstats.f_bsize
+ available = fsstats.f_bavail * blksz
+ total = fsstats.f_blocks * blksz
+ ret[path] = {"available": available, "total": total}
+ except OSError as exc:
+ log.warning("Cannot get stats from '{}': {}".format(path, exc))
+ ret[path] = {"available": None, "total": None}
return ret
diff --git a/tests/integration/modules/test_pip.py b/tests/integration/modules/test_pip.py
index d57e9cd2aea..85045dec90b 100644
--- a/tests/integration/modules/test_pip.py
+++ b/tests/integration/modules/test_pip.py
@@ -2,6 +2,7 @@ import os
import pprint
import re
import shutil
+import sys
import tempfile
import pytest
@@ -16,6 +17,10 @@ from tests.support.runtests import RUNTIME_VARS
@pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False)
+@pytest.mark.skipif(
+ "venv-salt-minion" in sys.executable,
+ reason="Skipping for Salt Bundle (tests are not compatible)",
+)
@pytest.mark.windows_whitelisted
class PipModuleTest(ModuleCase):
def setUp(self):
diff --git a/tests/integration/ssh/test_state.py b/tests/integration/ssh/test_state.py
index 69245454e85..daa478b45be 100644
--- a/tests/integration/ssh/test_state.py
+++ b/tests/integration/ssh/test_state.py
@@ -2,6 +2,7 @@ import glob
import logging
import os
import shutil
+import sys
import threading
import time
@@ -18,6 +19,10 @@ log = logging.getLogger(__name__)
@pytest.mark.slow_test
+@pytest.mark.skipif(
+ "venv-salt-minion" in sys.executable,
+ reason="Skipping for Salt Bundle (tests are not compatible)",
+)
class SSHStateTest(SSHCase):
"""
testing the state system with salt-ssh
diff --git a/tests/pytests/functional/modules/test_pip.py b/tests/pytests/functional/modules/test_pip.py
index e04baa7c43f..1f0104e3e6d 100644
--- a/tests/pytests/functional/modules/test_pip.py
+++ b/tests/pytests/functional/modules/test_pip.py
@@ -23,6 +23,10 @@ from tests.support.helpers import VirtualEnv
@pytest.mark.requires_network
@pytest.mark.slow_test
@pytest.mark.skip_if_binaries_missing("virtualenv", reason="Needs virtualenv binary")
+@pytest.mark.skipif(
+ "venv-salt-minion" in sys.executable,
+ reason="Skipping for Salt Bundle (tests are not compatible)",
+)
def test_list_available_packages(modules, pip_version, tmp_path):
with VirtualEnv(venv_dir=tmp_path, pip_requirement=pip_version) as virtualenv:
virtualenv.install("-U", pip_version)
diff --git a/tests/pytests/functional/modules/test_virtualenv_mod.py b/tests/pytests/functional/modules/test_virtualenv_mod.py
index 2b6abf91e23..69e1866c6e3 100644
--- a/tests/pytests/functional/modules/test_virtualenv_mod.py
+++ b/tests/pytests/functional/modules/test_virtualenv_mod.py
@@ -1,4 +1,5 @@
import shutil
+import sys
import pytest
@@ -68,6 +69,10 @@ def test_clear(virtualenv, venv_dir, modules):
bool(salt.utils.path.which("transactional-update")),
reason="Skipping on transactional systems",
)
+@pytest.mark.skipif(
+ "venv-salt-minion" in sys.executable,
+ reason="Skipping for Salt Bundle (tests are not compatible)",
+)
def test_virtualenv_ver(virtualenv, venv_dir):
ret = virtualenv.create(str(venv_dir))
assert ret
diff --git a/tests/pytests/functional/states/test_pip_state.py b/tests/pytests/functional/states/test_pip_state.py
index 1f2080f1f86..28c1f9fd1f3 100644
--- a/tests/pytests/functional/states/test_pip_state.py
+++ b/tests/pytests/functional/states/test_pip_state.py
@@ -84,6 +84,10 @@ def create_virtualenv(modules):
bool(salt.utils.path.which("transactional-update")),
reason="Skipping on transactional systems",
)
+@pytest.mark.skipif(
+ "venv-salt-minion" in sys.executable,
+ reason="Skipping for Salt Bundle (tests are not compatible)",
+)
def test_pip_installed_removed(modules, states):
"""
Tests installed and removed states
diff --git a/tests/pytests/integration/cli/test_syndic_eauth.py b/tests/pytests/integration/cli/test_syndic_eauth.py
index dde4c25bc91..f2d36c13abb 100644
--- a/tests/pytests/integration/cli/test_syndic_eauth.py
+++ b/tests/pytests/integration/cli/test_syndic_eauth.py
@@ -68,6 +68,9 @@ def syndic_network():
try:
network = client.networks.create(name="syndic_test_net", ipam=ipam_config)
yield network.name
+ except Exception as e:
+ # Docker failed, it's gonna be an environment issue, let's just skip
+ pytest.skip(f"Docker failed with error {e}")
finally:
if network is not None:
network.remove()
diff --git a/tests/pytests/integration/modules/test_cmdmod.py b/tests/pytests/integration/modules/test_cmdmod.py
index d0b993ddbcf..20a6f808933 100644
--- a/tests/pytests/integration/modules/test_cmdmod.py
+++ b/tests/pytests/integration/modules/test_cmdmod.py
@@ -75,7 +75,9 @@ def test_blacklist_glob(salt_call_cli):
)
assert (
- ret.stderr.rstrip()
+ ret.stderr.rstrip().split("\n")[
+ -1
+ ] # Taking only the last line in case of DeprecationWarnings
== "Error running 'cmd.run': The shell command \"bad_command --foo\" is not permitted"
)
diff --git a/tests/pytests/integration/netapi/test_ssh_client.py b/tests/pytests/integration/netapi/test_ssh_client.py
index 42db6d0eacd..457c151c94f 100644
--- a/tests/pytests/integration/netapi/test_ssh_client.py
+++ b/tests/pytests/integration/netapi/test_ssh_client.py
@@ -1,3 +1,5 @@
+import sys
+
import pytest
import salt.netapi
@@ -8,6 +10,10 @@ from tests.support.mock import patch
pytestmark = [
pytest.mark.slow_test,
pytest.mark.requires_sshd_server,
+ pytest.mark.skipif(
+ "venv-salt-minion" in sys.executable,
+ reason="Skipping for Salt Bundle (tests are not compatible)",
+ ),
]
diff --git a/tests/pytests/integration/ssh/conftest.py b/tests/pytests/integration/ssh/conftest.py
new file mode 100644
index 00000000000..ba6e5f2773a
--- /dev/null
+++ b/tests/pytests/integration/ssh/conftest.py
@@ -0,0 +1,9 @@
+import sys
+
+import pytest
+
+
+@pytest.fixture(scope="package", autouse=True)
+def _auto_skip_on_salt_bundle():
+ if "venv-salt-minion" in sys.executable:
+ pytest.skip("Skipping for Salt Bundle (tests are not compatible)")
diff --git a/tests/unit/utils/test_thin.py b/tests/unit/utils/test_thin.py
index c4e9c3b3bef..b31199976c8 100644
--- a/tests/unit/utils/test_thin.py
+++ b/tests/unit/utils/test_thin.py
@@ -1383,6 +1383,10 @@ class SSHThinTestCase(TestCase):
"virtualenv", reason="Needs virtualenv binary"
)
@pytest.mark.skip_on_windows(reason="salt-ssh does not deploy to/from windows")
+ @pytest.mark.skipif(
+ "venv-salt-minion" in sys.executable,
+ reason="Skipping for Salt Bundle (tests are not compatible)",
+ )
def test_thin_dir(self):
"""
Test the thin dir to make sure salt-call can run
--
2.44.0

View File

@ -0,0 +1,772 @@
From 737b0bd931c07239d50e7395eb7425c06f485848 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 14 Mar 2024 13:03:00 +0000
Subject: [PATCH] Fix tests failures and errors when detected on VM
execution from Salt Shaker (#636)
* test_chmod: fix test expectation
* test_pkg: Adjust package expectation for SUSE family
* test_docker_network: Skip non-supported operation for SUSE family
* Fix tests failing due wrong docker-py version
* test_version: skip test in packaged scenario when setup.py is missing
* Fix issue related to docker version used during testing
* Fix test errors when setup.py is not available
* test_loader: do not run if setup.py is missing
* test_install: Fix test errors when setup.py is not available
* test_master: use a right service name expected on SUSE family
* test_jinja_filters: prevent test failure when which binary is not available
* Prevent errors when x509 utils cannot be loaded
* test_thin: skip test if virtualenv binary is missing
---
tests/integration/pillar/test_git_pillar.py | 12 +++++++++++-
tests/pytests/functional/cache/test_consul.py | 5 +++++
tests/pytests/functional/cache/test_mysql.py | 5 +++++
tests/pytests/functional/loader/test_loader.py | 9 +++++++++
.../functional/modules/state/test_jinja_filters.py | 4 ++--
tests/pytests/functional/modules/test_cmdmod.py | 2 +-
tests/pytests/functional/modules/test_dockermod.py | 8 +++++++-
tests/pytests/functional/modules/test_pkg.py | 2 ++
tests/pytests/functional/modules/test_swarm.py | 6 +++++-
tests/pytests/functional/states/rabbitmq/conftest.py | 11 +++++++++++
.../functional/states/rabbitmq/test_cluster.py | 7 ++++++-
.../functional/states/rabbitmq/test_plugin.py | 8 +++++++-
.../functional/states/rabbitmq/test_policy.py | 7 ++++++-
.../functional/states/rabbitmq/test_upstream.py | 7 ++++++-
.../pytests/functional/states/rabbitmq/test_user.py | 7 ++++++-
.../pytests/functional/states/rabbitmq/test_vhost.py | 7 ++++++-
.../pytests/functional/states/test_docker_network.py | 7 ++++++-
tests/pytests/functional/states/test_pkg.py | 2 +-
tests/pytests/functional/test_version.py | 9 +++++++++
tests/pytests/integration/modules/test_virt.py | 5 +++++
tests/pytests/integration/modules/test_x509_v2.py | 2 +-
tests/pytests/integration/ssh/test_log.py | 7 ++++++-
tests/pytests/integration/ssh/test_master.py | 2 +-
tests/pytests/integration/ssh/test_py_versions.py | 7 ++++++-
tests/pytests/integration/ssh/test_ssh_setup.py | 7 ++++++-
tests/pytests/integration/states/test_x509_v2.py | 2 +-
tests/pytests/scenarios/setup/test_install.py | 8 ++++++++
tests/pytests/unit/modules/test_pip.py | 8 ++++++++
tests/pytests/unit/utils/test_x509.py | 3 ++-
tests/unit/states/test_pip_state.py | 6 ++++++
tests/unit/utils/test_thin.py | 3 +++
31 files changed, 164 insertions(+), 21 deletions(-)
diff --git a/tests/integration/pillar/test_git_pillar.py b/tests/integration/pillar/test_git_pillar.py
index 5b4cbda95c9..d56785f97c2 100644
--- a/tests/integration/pillar/test_git_pillar.py
+++ b/tests/integration/pillar/test_git_pillar.py
@@ -79,6 +79,7 @@ from salt.utils.gitfs import (
PYGIT2_VERSION,
FileserverConfigError,
)
+from salt.utils.versions import Version
from tests.support.gitfs import ( # pylint: disable=unused-import
PASSWORD,
USERNAME,
@@ -101,11 +102,20 @@ try:
except Exception: # pylint: disable=broad-except
HAS_PYGIT2 = False
+docker = pytest.importorskip("docker")
+
INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
pytestmark = [
SKIP_INITIAL_PHOTONOS_FAILURES,
pytest.mark.skip_on_platforms(windows=True, darwin=True),
- pytest.mark.skipif(INSIDE_CONTAINER, reason="Communication problems between containers."),
+ pytest.mark.skipif(
+ INSIDE_CONTAINER, reason="Communication problems between containers."
+ ),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/functional/cache/test_consul.py b/tests/pytests/functional/cache/test_consul.py
index c6e16d2588e..30dc6925f26 100644
--- a/tests/pytests/functional/cache/test_consul.py
+++ b/tests/pytests/functional/cache/test_consul.py
@@ -8,6 +8,7 @@ from saltfactories.utils import random_string
import salt.cache
import salt.loader
+from salt.utils.versions import Version
from tests.pytests.functional.cache.helpers import run_common_cache_tests
docker = pytest.importorskip("docker")
@@ -20,6 +21,10 @@ pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_if_binaries_missing("dockerd"),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/functional/cache/test_mysql.py b/tests/pytests/functional/cache/test_mysql.py
index e15fc732a4a..93c6c7c6f6f 100644
--- a/tests/pytests/functional/cache/test_mysql.py
+++ b/tests/pytests/functional/cache/test_mysql.py
@@ -5,6 +5,7 @@ import pytest
import salt.cache
import salt.loader
+from salt.utils.versions import Version
from tests.pytests.functional.cache.helpers import run_common_cache_tests
from tests.support.pytest.mysql import * # pylint: disable=wildcard-import,unused-wildcard-import
@@ -18,6 +19,10 @@ pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_if_binaries_missing("dockerd"),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/functional/loader/test_loader.py b/tests/pytests/functional/loader/test_loader.py
index 963d33f59c3..e81ef126ca3 100644
--- a/tests/pytests/functional/loader/test_loader.py
+++ b/tests/pytests/functional/loader/test_loader.py
@@ -1,14 +1,23 @@
import json
+import os
import pytest
from salt.utils.versions import Version
from tests.support.helpers import SaltVirtualEnv
from tests.support.pytest.helpers import FakeSaltExtension
+from tests.support.runtests import RUNTIME_VARS
+
+MISSING_SETUP_PY_FILE = not os.path.exists(
+ os.path.join(RUNTIME_VARS.CODE_DIR, "setup.py")
+)
pytestmark = [
# These are slow because they create a virtualenv and install salt in it
pytest.mark.slow_test,
+ pytest.mark.skipif(
+ MISSING_SETUP_PY_FILE, reason="This test only work if setup.py is available"
+ ),
]
diff --git a/tests/pytests/functional/modules/state/test_jinja_filters.py b/tests/pytests/functional/modules/state/test_jinja_filters.py
index 220310aaaf0..cc8ffcb731b 100644
--- a/tests/pytests/functional/modules/state/test_jinja_filters.py
+++ b/tests/pytests/functional/modules/state/test_jinja_filters.py
@@ -798,9 +798,9 @@ def _filter_id(value):
),
Filter(
name="which",
- expected={"ret": salt.utils.path.which("which")},
+ expected={"ret": salt.utils.path.which("ls")},
sls="""
- {% set result = 'which' | which() %}
+ {% set result = 'ls' | which() %}
test:
module.run:
- name: test.echo
diff --git a/tests/pytests/functional/modules/test_cmdmod.py b/tests/pytests/functional/modules/test_cmdmod.py
index d30b474c6d2..adaf469c283 100644
--- a/tests/pytests/functional/modules/test_cmdmod.py
+++ b/tests/pytests/functional/modules/test_cmdmod.py
@@ -105,7 +105,7 @@ def test_run(cmdmod):
template="jinja",
python_shell=True,
)
- == "func-tests-minion"
+ == "func-tests-minion-opts"
)
assert cmdmod.run("grep f", stdin="one\ntwo\nthree\nfour\nfive\n") == "four\nfive"
assert cmdmod.run('echo "a=b" | sed -e s/=/:/g', python_shell=True) == "a:b"
diff --git a/tests/pytests/functional/modules/test_dockermod.py b/tests/pytests/functional/modules/test_dockermod.py
index a5b40869352..eb0cc20f9ff 100644
--- a/tests/pytests/functional/modules/test_dockermod.py
+++ b/tests/pytests/functional/modules/test_dockermod.py
@@ -8,7 +8,9 @@ import pytest
from saltfactories.utils import random_string
from saltfactories.utils.functional import StateResult
-pytest.importorskip("docker")
+from salt.utils.versions import Version
+
+docker = pytest.importorskip("docker")
log = logging.getLogger(__name__)
@@ -18,6 +20,10 @@ pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_if_binaries_missing("docker", "dockerd", check_all=False),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run inside a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/functional/modules/test_pkg.py b/tests/pytests/functional/modules/test_pkg.py
index 707361c227b..7cedd32bf6c 100644
--- a/tests/pytests/functional/modules/test_pkg.py
+++ b/tests/pytests/functional/modules/test_pkg.py
@@ -67,6 +67,8 @@ def test_pkg(grains):
_pkg = "units"
elif grains["os_family"] == "Debian":
_pkg = "ifenslave"
+ elif grains["os_family"] == "Suse":
+ _pkg = "wget"
return _pkg
diff --git a/tests/pytests/functional/modules/test_swarm.py b/tests/pytests/functional/modules/test_swarm.py
index 9dc70f5b3dc..fc3c2b739cd 100644
--- a/tests/pytests/functional/modules/test_swarm.py
+++ b/tests/pytests/functional/modules/test_swarm.py
@@ -20,7 +20,11 @@ pytest.importorskip("docker")
def docker_version(shell, grains):
ret = shell.run("docker", "--version")
assert ret.returncode == 0
- return salt.utils.versions.Version(ret.stdout.split(",")[0].split()[-1].strip())
+ # Example output:
+ # Docker version 24.0.7-ce, build 311b9ff0aa93
+ return salt.utils.versions.Version(
+ ret.stdout.split(",")[0].split()[-1].split("-")[0].strip()
+ )
@pytest.fixture
diff --git a/tests/pytests/functional/states/rabbitmq/conftest.py b/tests/pytests/functional/states/rabbitmq/conftest.py
index d8ccc1761b8..60f8206a088 100644
--- a/tests/pytests/functional/states/rabbitmq/conftest.py
+++ b/tests/pytests/functional/states/rabbitmq/conftest.py
@@ -5,8 +5,19 @@ import attr
import pytest
from saltfactories.utils import random_string
+from salt.utils.versions import Version
+
log = logging.getLogger(__name__)
+docker = pytest.importorskip("docker")
+
+pytestmark = [
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
+]
+
@attr.s(kw_only=True, slots=True)
class RabbitMQImage:
diff --git a/tests/pytests/functional/states/rabbitmq/test_cluster.py b/tests/pytests/functional/states/rabbitmq/test_cluster.py
index 210b22a2360..df85f04f78d 100644
--- a/tests/pytests/functional/states/rabbitmq/test_cluster.py
+++ b/tests/pytests/functional/states/rabbitmq/test_cluster.py
@@ -9,8 +9,9 @@ import pytest
import salt.modules.rabbitmq as rabbitmq
import salt.states.rabbitmq_cluster as rabbitmq_cluster
+from salt.utils.versions import Version
-pytest.importorskip("docker")
+docker = pytest.importorskip("docker")
log = logging.getLogger(__name__)
@@ -22,6 +23,10 @@ pytestmark = [
"docker", "dockerd", reason="Docker not installed"
),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/functional/states/rabbitmq/test_plugin.py b/tests/pytests/functional/states/rabbitmq/test_plugin.py
index f1191490536..6ed4cdc9238 100644
--- a/tests/pytests/functional/states/rabbitmq/test_plugin.py
+++ b/tests/pytests/functional/states/rabbitmq/test_plugin.py
@@ -9,11 +9,13 @@ import pytest
import salt.modules.rabbitmq as rabbitmq
import salt.states.rabbitmq_plugin as rabbitmq_plugin
+from salt.utils.versions import Version
from tests.support.mock import patch
log = logging.getLogger(__name__)
-pytest.importorskip("docker")
+docker = pytest.importorskip("docker")
+
INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
@@ -23,6 +25,10 @@ pytestmark = [
"docker", "dockerd", reason="Docker not installed"
),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/functional/states/rabbitmq/test_policy.py b/tests/pytests/functional/states/rabbitmq/test_policy.py
index 7ccf6a522e0..c648c9ff947 100644
--- a/tests/pytests/functional/states/rabbitmq/test_policy.py
+++ b/tests/pytests/functional/states/rabbitmq/test_policy.py
@@ -9,11 +9,12 @@ import pytest
import salt.modules.rabbitmq as rabbitmq
import salt.states.rabbitmq_policy as rabbitmq_policy
+from salt.utils.versions import Version
from tests.support.mock import MagicMock, patch
log = logging.getLogger(__name__)
-pytest.importorskip("docker")
+docker = pytest.importorskip("docker")
INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
@@ -23,6 +24,10 @@ pytestmark = [
"docker", "dockerd", reason="Docker not installed"
),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/functional/states/rabbitmq/test_upstream.py b/tests/pytests/functional/states/rabbitmq/test_upstream.py
index c7bcf3b0d44..0a9686d6948 100644
--- a/tests/pytests/functional/states/rabbitmq/test_upstream.py
+++ b/tests/pytests/functional/states/rabbitmq/test_upstream.py
@@ -9,10 +9,11 @@ import pytest
import salt.modules.rabbitmq as rabbitmq
import salt.states.rabbitmq_upstream as rabbitmq_upstream
+from salt.utils.versions import Version
log = logging.getLogger(__name__)
-pytest.importorskip("docker")
+docker = pytest.importorskip("docker")
INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
@@ -22,6 +23,10 @@ pytestmark = [
"docker", "dockerd", reason="Docker not installed"
),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/functional/states/rabbitmq/test_user.py b/tests/pytests/functional/states/rabbitmq/test_user.py
index 31723df7be8..a6b0766087f 100644
--- a/tests/pytests/functional/states/rabbitmq/test_user.py
+++ b/tests/pytests/functional/states/rabbitmq/test_user.py
@@ -9,10 +9,11 @@ import pytest
import salt.modules.rabbitmq as rabbitmq
import salt.states.rabbitmq_user as rabbitmq_user
+from salt.utils.versions import Version
log = logging.getLogger(__name__)
-pytest.importorskip("docker")
+docker = pytest.importorskip("docker")
INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
@@ -22,6 +23,10 @@ pytestmark = [
"docker", "dockerd", reason="Docker not installed"
),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/functional/states/rabbitmq/test_vhost.py b/tests/pytests/functional/states/rabbitmq/test_vhost.py
index d6ac6901a25..f3553c03e58 100644
--- a/tests/pytests/functional/states/rabbitmq/test_vhost.py
+++ b/tests/pytests/functional/states/rabbitmq/test_vhost.py
@@ -9,10 +9,11 @@ import pytest
import salt.modules.rabbitmq as rabbitmq
import salt.states.rabbitmq_vhost as rabbitmq_vhost
+from salt.utils.versions import Version
log = logging.getLogger(__name__)
-pytest.importorskip("docker")
+docker = pytest.importorskip("docker")
INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
@@ -22,6 +23,10 @@ pytestmark = [
"docker", "dockerd", reason="Docker not installed"
),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/functional/states/test_docker_network.py b/tests/pytests/functional/states/test_docker_network.py
index 0da01ed8bac..19868d03ad1 100644
--- a/tests/pytests/functional/states/test_docker_network.py
+++ b/tests/pytests/functional/states/test_docker_network.py
@@ -220,10 +220,15 @@ def test_present_with_containers(network, docker, docker_network, container):
@pytest.mark.parametrize("reconnect", [True, False])
-def test_present_with_reconnect(network, docker, docker_network, container, reconnect):
+def test_present_with_reconnect(
+ network, docker, docker_network, container, reconnect, grains
+):
"""
Test reconnecting with containers not passed to state
"""
+ if grains["os_family"] == "Suse":
+ pytest.skip("This test is failing for SUSE family")
+
with network() as net:
ret = docker_network.present(name=net.name, driver="bridge")
assert ret.result is True
diff --git a/tests/pytests/functional/states/test_pkg.py b/tests/pytests/functional/states/test_pkg.py
index 12318c996d1..864c1d025f3 100644
--- a/tests/pytests/functional/states/test_pkg.py
+++ b/tests/pytests/functional/states/test_pkg.py
@@ -55,7 +55,7 @@ def PKG_TARGETS(grains):
else:
_PKG_TARGETS = ["units", "zsh-html"]
elif grains["os_family"] == "Suse":
- _PKG_TARGETS = ["lynx", "htop"]
+ _PKG_TARGETS = ["iotop", "screen"]
return _PKG_TARGETS
diff --git a/tests/pytests/functional/test_version.py b/tests/pytests/functional/test_version.py
index dfa8850557e..3b85c05ccc6 100644
--- a/tests/pytests/functional/test_version.py
+++ b/tests/pytests/functional/test_version.py
@@ -1,14 +1,23 @@
import json
import logging
+import os
import pytest
from tests.support.helpers import SaltVirtualEnv
from tests.support.pytest.helpers import FakeSaltExtension
+from tests.support.runtests import RUNTIME_VARS
+
+MISSING_SETUP_PY_FILE = not os.path.exists(
+ os.path.join(RUNTIME_VARS.CODE_DIR, "setup.py")
+)
pytestmark = [
# These are slow because they create a virtualenv and install salt in it
pytest.mark.slow_test,
+ pytest.mark.skipif(
+ MISSING_SETUP_PY_FILE, reason="This test only work if setup.py is available"
+ ),
]
log = logging.getLogger(__name__)
diff --git a/tests/pytests/integration/modules/test_virt.py b/tests/pytests/integration/modules/test_virt.py
index 1b7f30154a7..572923764bb 100644
--- a/tests/pytests/integration/modules/test_virt.py
+++ b/tests/pytests/integration/modules/test_virt.py
@@ -9,6 +9,7 @@ from xml.etree import ElementTree
import pytest
import salt.version
+from salt.utils.versions import Version
from tests.support.virt import SaltVirtMinionContainerFactory
docker = pytest.importorskip("docker")
@@ -21,6 +22,10 @@ pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_if_binaries_missing("docker"),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/integration/modules/test_x509_v2.py b/tests/pytests/integration/modules/test_x509_v2.py
index 2fd005778c5..cc8712e45cd 100644
--- a/tests/pytests/integration/modules/test_x509_v2.py
+++ b/tests/pytests/integration/modules/test_x509_v2.py
@@ -11,7 +11,7 @@ from pathlib import Path
import pytest
from saltfactories.utils import random_string
-import salt.utils.x509 as x509util
+x509util = pytest.importorskip("salt.utils.x509")
try:
import cryptography
diff --git a/tests/pytests/integration/ssh/test_log.py b/tests/pytests/integration/ssh/test_log.py
index 683feb8bd91..a63dd72373d 100644
--- a/tests/pytests/integration/ssh/test_log.py
+++ b/tests/pytests/integration/ssh/test_log.py
@@ -8,9 +8,10 @@ import time
import pytest
from saltfactories.utils import random_string
+from salt.utils.versions import Version
from tests.support.helpers import Keys
-pytest.importorskip("docker")
+docker = pytest.importorskip("docker")
INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
@@ -20,6 +21,10 @@ pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_if_binaries_missing("dockerd"),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/integration/ssh/test_master.py b/tests/pytests/integration/ssh/test_master.py
index 0c2f482cf9f..c658123726b 100644
--- a/tests/pytests/integration/ssh/test_master.py
+++ b/tests/pytests/integration/ssh/test_master.py
@@ -23,7 +23,7 @@ def test_service(salt_ssh_cli, grains):
os_release = grains["osrelease"]
if os_family == "RedHat":
service = "crond"
- elif os_family == "Arch":
+ elif os_family in ["Suse", "Arch"]:
service = "sshd"
elif os_family == "MacOS":
service = "org.ntp.ntpd"
diff --git a/tests/pytests/integration/ssh/test_py_versions.py b/tests/pytests/integration/ssh/test_py_versions.py
index 71d4cfaa94e..991a3b71c44 100644
--- a/tests/pytests/integration/ssh/test_py_versions.py
+++ b/tests/pytests/integration/ssh/test_py_versions.py
@@ -9,9 +9,10 @@ import time
import pytest
from saltfactories.utils import random_string
+from salt.utils.versions import Version
from tests.support.helpers import Keys
-pytest.importorskip("docker")
+docker = pytest.importorskip("docker")
INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
@@ -21,6 +22,10 @@ pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_if_binaries_missing("dockerd"),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/integration/ssh/test_ssh_setup.py b/tests/pytests/integration/ssh/test_ssh_setup.py
index 79b55ad90a5..97494bed36b 100644
--- a/tests/pytests/integration/ssh/test_ssh_setup.py
+++ b/tests/pytests/integration/ssh/test_ssh_setup.py
@@ -13,9 +13,10 @@ import pytest
from pytestshellutils.utils.processes import ProcessResult, terminate_process
from saltfactories.utils import random_string
+from salt.utils.versions import Version
from tests.support.helpers import Keys
-pytest.importorskip("docker")
+docker = pytest.importorskip("docker")
INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
@@ -25,6 +26,10 @@ pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_if_binaries_missing("dockerd"),
pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ pytest.mark.skipif(
+ Version(docker.__version__) < Version("4.0.0"),
+ reason="Test does not work in this version of docker-py",
+ ),
]
diff --git a/tests/pytests/integration/states/test_x509_v2.py b/tests/pytests/integration/states/test_x509_v2.py
index 9a1c09bb8bd..4f943412950 100644
--- a/tests/pytests/integration/states/test_x509_v2.py
+++ b/tests/pytests/integration/states/test_x509_v2.py
@@ -10,7 +10,7 @@ from pathlib import Path
import pytest
from saltfactories.utils import random_string
-import salt.utils.x509 as x509util
+x509util = pytest.importorskip("salt.utils.x509")
try:
import cryptography
diff --git a/tests/pytests/scenarios/setup/test_install.py b/tests/pytests/scenarios/setup/test_install.py
index 7664fda804e..7a4abfc6e9e 100644
--- a/tests/pytests/scenarios/setup/test_install.py
+++ b/tests/pytests/scenarios/setup/test_install.py
@@ -14,11 +14,16 @@ import salt.utils.path
import salt.utils.platform
import salt.version
from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
+from tests.support.runtests import RUNTIME_VARS
log = logging.getLogger(__name__)
INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+MISSING_SETUP_PY_FILE = not os.path.exists(
+ os.path.join(RUNTIME_VARS.CODE_DIR, "setup.py")
+)
+
pytestmark = [
pytest.mark.core_test,
pytest.mark.windows_whitelisted,
@@ -27,6 +32,9 @@ pytestmark = [
pytest.mark.skipif(
INSIDE_CONTAINER, reason="No gcc and python3-devel in container."
),
+ pytest.mark.skipif(
+ MISSING_SETUP_PY_FILE, reason="This test only work if setup.py is available"
+ ),
]
diff --git a/tests/pytests/unit/modules/test_pip.py b/tests/pytests/unit/modules/test_pip.py
index c03e6ed292b..4b2da77786b 100644
--- a/tests/pytests/unit/modules/test_pip.py
+++ b/tests/pytests/unit/modules/test_pip.py
@@ -9,6 +9,11 @@ import salt.utils.files
import salt.utils.platform
from salt.exceptions import CommandExecutionError
from tests.support.mock import MagicMock, patch
+from tests.support.runtests import RUNTIME_VARS
+
+MISSING_SETUP_PY_FILE = not os.path.exists(
+ os.path.join(RUNTIME_VARS.CODE_DIR, "setup.py")
+)
class FakeFopen:
@@ -1738,6 +1743,9 @@ def test_when_version_is_called_with_a_user_it_should_be_passed_to_undelying_run
)
+@pytest.mark.skipif(
+ MISSING_SETUP_PY_FILE, reason="This test only work if setup.py is available"
+)
@pytest.mark.parametrize(
"bin_env,target,target_env,expected_target",
[
diff --git a/tests/pytests/unit/utils/test_x509.py b/tests/pytests/unit/utils/test_x509.py
index 25971af40d8..dade9eda46b 100644
--- a/tests/pytests/unit/utils/test_x509.py
+++ b/tests/pytests/unit/utils/test_x509.py
@@ -4,9 +4,10 @@ import ipaddress
import pytest
import salt.exceptions
-import salt.utils.x509 as x509
from tests.support.mock import ANY, Mock, patch
+x509 = pytest.importorskip("salt.utils.x509")
+
try:
import cryptography
import cryptography.x509 as cx509
diff --git a/tests/unit/states/test_pip_state.py b/tests/unit/states/test_pip_state.py
index 981ad46a135..d70b1150008 100644
--- a/tests/unit/states/test_pip_state.py
+++ b/tests/unit/states/test_pip_state.py
@@ -27,6 +27,9 @@ try:
except ImportError:
HAS_PIP = False
+MISSING_SETUP_PY_FILE = not os.path.exists(
+ os.path.join(RUNTIME_VARS.CODE_DIR, "setup.py")
+)
log = logging.getLogger(__name__)
@@ -408,6 +411,9 @@ class PipStateUtilsTest(TestCase):
@pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False)
@pytest.mark.requires_network
+@pytest.mark.skipif(
+ MISSING_SETUP_PY_FILE, reason="This test only work if setup.py is available"
+)
class PipStateInstallationErrorTest(TestCase):
@pytest.mark.slow_test
def test_importable_installation_error(self):
diff --git a/tests/unit/utils/test_thin.py b/tests/unit/utils/test_thin.py
index 7fd1e7b5dc3..c4e9c3b3bef 100644
--- a/tests/unit/utils/test_thin.py
+++ b/tests/unit/utils/test_thin.py
@@ -1379,6 +1379,9 @@ class SSHThinTestCase(TestCase):
assert [x for x in calls if "{}".format(_file) in x[-2]]
@pytest.mark.slow_test
+ @pytest.mark.skip_if_binaries_missing(
+ "virtualenv", reason="Needs virtualenv binary"
+ )
@pytest.mark.skip_on_windows(reason="salt-ssh does not deploy to/from windows")
def test_thin_dir(self):
"""
--
2.43.0

View File

@ -0,0 +1,841 @@
From 290d092c06dc378647dd1e49f000f012a7c07904 Mon Sep 17 00:00:00 2001
From: vzhestkov <vzhestkov@suse.com>
Date: Wed, 2 Aug 2023 16:13:49 +0200
Subject: [PATCH] Fix tests to make them running with salt-testsuite
---
tests/pytests/unit/cli/test_batch_async.py | 718 +++++++++++----------
tests/unit/cli/test_support.py | 6 +-
tests/unit/modules/test_saltsupport.py | 4 +-
3 files changed, 364 insertions(+), 364 deletions(-)
diff --git a/tests/pytests/unit/cli/test_batch_async.py b/tests/pytests/unit/cli/test_batch_async.py
index c0b708de76..e0774ffff3 100644
--- a/tests/pytests/unit/cli/test_batch_async.py
+++ b/tests/pytests/unit/cli/test_batch_async.py
@@ -1,386 +1,392 @@
+import pytest
+
import salt.ext.tornado
from salt.cli.batch_async import BatchAsync
-from salt.ext.tornado.testing import AsyncTestCase
from tests.support.mock import MagicMock, patch
-from tests.support.unit import TestCase, skipIf
-
-
-class AsyncBatchTestCase(AsyncTestCase, TestCase):
- def setUp(self):
- self.io_loop = self.get_new_ioloop()
- opts = {
- "batch": "1",
- "conf_file": {},
- "tgt": "*",
- "timeout": 5,
- "gather_job_timeout": 5,
- "batch_presence_ping_timeout": 1,
- "transport": None,
- "sock_dir": "",
- }
-
- with patch("salt.client.get_local_client", MagicMock(return_value=MagicMock())):
- with patch(
- "salt.cli.batch_async.batch_get_opts", MagicMock(return_value=opts)
- ):
- self.batch = BatchAsync(
- opts,
- MagicMock(side_effect=["1234", "1235", "1236"]),
- {
- "tgt": "",
- "fun": "",
- "kwargs": {"batch": "", "batch_presence_ping_timeout": 1},
- },
- )
-
- def test_ping_jid(self):
- self.assertEqual(self.batch.ping_jid, "1234")
-
- def test_batch_jid(self):
- self.assertEqual(self.batch.batch_jid, "1235")
-
- def test_find_job_jid(self):
- self.assertEqual(self.batch.find_job_jid, "1236")
-
- def test_batch_size(self):
- """
- Tests passing batch value as a number
- """
- self.batch.opts = {"batch": "2", "timeout": 5}
- self.batch.minions = {"foo", "bar"}
- self.batch.start_batch()
- self.assertEqual(self.batch.batch_size, 2)
-
- @salt.ext.tornado.testing.gen_test
- def test_batch_start_on_batch_presence_ping_timeout(self):
- self.batch.event = MagicMock()
- future = salt.ext.tornado.gen.Future()
- future.set_result({"minions": ["foo", "bar"]})
- self.batch.local.run_job_async.return_value = future
- ret = self.batch.start()
- # assert start_batch is called later with batch_presence_ping_timeout as param
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.start_batch,),
- )
- # assert test.ping called
- self.assertEqual(
- self.batch.local.run_job_async.call_args[0], ("*", "test.ping", [], "glob")
- )
- # assert targeted_minions == all minions matched by tgt
- self.assertEqual(self.batch.targeted_minions, {"foo", "bar"})
-
- @salt.ext.tornado.testing.gen_test
- def test_batch_start_on_gather_job_timeout(self):
- self.batch.event = MagicMock()
- future = salt.ext.tornado.gen.Future()
- future.set_result({"minions": ["foo", "bar"]})
- self.batch.local.run_job_async.return_value = future
- self.batch.batch_presence_ping_timeout = None
- ret = self.batch.start()
- # assert start_batch is called later with gather_job_timeout as param
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.start_batch,),
- )
- def test_batch_fire_start_event(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.opts = {"batch": "2", "timeout": 5}
- self.batch.event = MagicMock()
- self.batch.metadata = {"mykey": "myvalue"}
- self.batch.start_batch()
- self.assertEqual(
- self.batch.event.fire_event.call_args[0],
- (
+
+@pytest.fixture
+def batch(temp_salt_master):
+ opts = {
+ "batch": "1",
+ "conf_file": {},
+ "tgt": "*",
+ "timeout": 5,
+ "gather_job_timeout": 5,
+ "batch_presence_ping_timeout": 1,
+ "transport": None,
+ "sock_dir": "",
+ }
+
+ with patch("salt.client.get_local_client", MagicMock(return_value=MagicMock())):
+ with patch("salt.cli.batch_async.batch_get_opts", MagicMock(return_value=opts)):
+ batch = BatchAsync(
+ opts,
+ MagicMock(side_effect=["1234", "1235", "1236"]),
{
- "available_minions": {"foo", "bar"},
- "down_minions": set(),
- "metadata": self.batch.metadata,
+ "tgt": "",
+ "fun": "",
+ "kwargs": {"batch": "", "batch_presence_ping_timeout": 1},
},
- "salt/batch/1235/start",
- ),
- )
+ )
+ yield batch
- @salt.ext.tornado.testing.gen_test
- def test_start_batch_calls_next(self):
- self.batch.run_next = MagicMock(return_value=MagicMock())
- self.batch.event = MagicMock()
- self.batch.start_batch()
- self.assertEqual(self.batch.initialized, True)
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0], (self.batch.run_next,)
- )
- def test_batch_fire_done_event(self):
- self.batch.targeted_minions = {"foo", "baz", "bar"}
- self.batch.minions = {"foo", "bar"}
- self.batch.done_minions = {"foo"}
- self.batch.timedout_minions = {"bar"}
- self.batch.event = MagicMock()
- self.batch.metadata = {"mykey": "myvalue"}
- old_event = self.batch.event
- self.batch.end_batch()
- self.assertEqual(
- old_event.fire_event.call_args[0],
- (
- {
- "available_minions": {"foo", "bar"},
- "done_minions": self.batch.done_minions,
- "down_minions": {"baz"},
- "timedout_minions": self.batch.timedout_minions,
- "metadata": self.batch.metadata,
- },
- "salt/batch/1235/done",
- ),
- )
+def test_ping_jid(batch):
+ assert batch.ping_jid == "1234"
- def test_batch__del__(self):
- batch = BatchAsync(MagicMock(), MagicMock(), MagicMock())
- event = MagicMock()
- batch.event = event
- batch.__del__()
- self.assertEqual(batch.local, None)
- self.assertEqual(batch.event, None)
- self.assertEqual(batch.ioloop, None)
-
- def test_batch_close_safe(self):
- batch = BatchAsync(MagicMock(), MagicMock(), MagicMock())
- event = MagicMock()
- batch.event = event
- batch.patterns = {
- ("salt/job/1234/ret/*", "find_job_return"),
- ("salt/job/4321/ret/*", "find_job_return"),
- }
- batch.close_safe()
- self.assertEqual(batch.local, None)
- self.assertEqual(batch.event, None)
- self.assertEqual(batch.ioloop, None)
- self.assertEqual(len(event.unsubscribe.mock_calls), 2)
- self.assertEqual(len(event.remove_event_handler.mock_calls), 1)
-
- @salt.ext.tornado.testing.gen_test
- def test_batch_next(self):
- self.batch.event = MagicMock()
- self.batch.opts["fun"] = "my.fun"
- self.batch.opts["arg"] = []
- self.batch._get_next = MagicMock(return_value={"foo", "bar"})
- self.batch.batch_size = 2
- future = salt.ext.tornado.gen.Future()
- future.set_result({"minions": ["foo", "bar"]})
- self.batch.local.run_job_async.return_value = future
- self.batch.run_next()
- self.assertEqual(
- self.batch.local.run_job_async.call_args[0],
- ({"foo", "bar"}, "my.fun", [], "list"),
- )
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.find_job, {"foo", "bar"}),
- )
- self.assertEqual(self.batch.active, {"bar", "foo"})
-
- def test_next_batch(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), {"foo", "bar"})
-
- def test_next_batch_one_done(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.done_minions = {"bar"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), {"foo"})
-
- def test_next_batch_one_done_one_active(self):
- self.batch.minions = {"foo", "bar", "baz"}
- self.batch.done_minions = {"bar"}
- self.batch.active = {"baz"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), {"foo"})
-
- def test_next_batch_one_done_one_active_one_timedout(self):
- self.batch.minions = {"foo", "bar", "baz", "faz"}
- self.batch.done_minions = {"bar"}
- self.batch.active = {"baz"}
- self.batch.timedout_minions = {"faz"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), {"foo"})
-
- def test_next_batch_bigger_size(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.batch_size = 3
- self.assertEqual(self.batch._get_next(), {"foo", "bar"})
-
- def test_next_batch_all_done(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.done_minions = {"foo", "bar"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), set())
-
- def test_next_batch_all_active(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.active = {"foo", "bar"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), set())
-
- def test_next_batch_all_timedout(self):
- self.batch.minions = {"foo", "bar"}
- self.batch.timedout_minions = {"foo", "bar"}
- self.batch.batch_size = 2
- self.assertEqual(self.batch._get_next(), set())
-
- def test_batch__event_handler_ping_return(self):
- self.batch.targeted_minions = {"foo"}
- self.batch.event = MagicMock(
- unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
- )
- self.batch.start()
- self.assertEqual(self.batch.minions, set())
- self.batch._BatchAsync__event_handler(MagicMock())
- self.assertEqual(self.batch.minions, {"foo"})
- self.assertEqual(self.batch.done_minions, set())
-
- def test_batch__event_handler_call_start_batch_when_all_pings_return(self):
- self.batch.targeted_minions = {"foo"}
- self.batch.event = MagicMock(
- unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
- )
- self.batch.start()
- self.batch._BatchAsync__event_handler(MagicMock())
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.start_batch,),
- )
- def test_batch__event_handler_not_call_start_batch_when_not_all_pings_return(self):
- self.batch.targeted_minions = {"foo", "bar"}
- self.batch.event = MagicMock(
- unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
- )
- self.batch.start()
- self.batch._BatchAsync__event_handler(MagicMock())
- self.assertEqual(len(self.batch.event.io_loop.spawn_callback.mock_calls), 0)
+def test_batch_jid(batch):
+ assert batch.batch_jid == "1235"
+
+
+def test_find_job_jid(batch):
+ assert batch.find_job_jid == "1236"
+
- def test_batch__event_handler_batch_run_return(self):
- self.batch.event = MagicMock(
- unpack=MagicMock(return_value=("salt/job/1235/ret/foo", {"id": "foo"}))
+def test_batch_size(batch):
+ """
+ Tests passing batch value as a number
+ """
+ batch.opts = {"batch": "2", "timeout": 5}
+ batch.minions = {"foo", "bar"}
+ batch.start_batch()
+ assert batch.batch_size == 2
+
+
+def test_batch_start_on_batch_presence_ping_timeout(batch):
+ # batch_async = BatchAsyncMock();
+ batch.event = MagicMock()
+ future = salt.ext.tornado.gen.Future()
+ future.set_result({"minions": ["foo", "bar"]})
+ batch.local.run_job_async.return_value = future
+ with patch("salt.ext.tornado.gen.sleep", return_value=future):
+ # ret = batch_async.start(batch)
+ ret = batch.start()
+ # assert start_batch is called later with batch_presence_ping_timeout as param
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.start_batch,)
+ # assert test.ping called
+ assert batch.local.run_job_async.call_args[0] == ("*", "test.ping", [], "glob")
+ # assert targeted_minions == all minions matched by tgt
+ assert batch.targeted_minions == {"foo", "bar"}
+
+
+def test_batch_start_on_gather_job_timeout(batch):
+ # batch_async = BatchAsyncMock();
+ batch.event = MagicMock()
+ future = salt.ext.tornado.gen.Future()
+ future.set_result({"minions": ["foo", "bar"]})
+ batch.local.run_job_async.return_value = future
+ batch.batch_presence_ping_timeout = None
+ with patch("salt.ext.tornado.gen.sleep", return_value=future):
+ # ret = batch_async.start(batch)
+ ret = batch.start()
+ # assert start_batch is called later with gather_job_timeout as param
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.start_batch,)
+
+
+def test_batch_fire_start_event(batch):
+ batch.minions = {"foo", "bar"}
+ batch.opts = {"batch": "2", "timeout": 5}
+ batch.event = MagicMock()
+ batch.metadata = {"mykey": "myvalue"}
+ batch.start_batch()
+ assert batch.event.fire_event.call_args[0] == (
+ {
+ "available_minions": {"foo", "bar"},
+ "down_minions": set(),
+ "metadata": batch.metadata,
+ },
+ "salt/batch/1235/start",
+ )
+
+
+def test_start_batch_calls_next(batch):
+ batch.run_next = MagicMock(return_value=MagicMock())
+ batch.event = MagicMock()
+ batch.start_batch()
+ assert batch.initialized
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.run_next,)
+
+
+def test_batch_fire_done_event(batch):
+ batch.targeted_minions = {"foo", "baz", "bar"}
+ batch.minions = {"foo", "bar"}
+ batch.done_minions = {"foo"}
+ batch.timedout_minions = {"bar"}
+ batch.event = MagicMock()
+ batch.metadata = {"mykey": "myvalue"}
+ old_event = batch.event
+ batch.end_batch()
+ assert old_event.fire_event.call_args[0] == (
+ {
+ "available_minions": {"foo", "bar"},
+ "done_minions": batch.done_minions,
+ "down_minions": {"baz"},
+ "timedout_minions": batch.timedout_minions,
+ "metadata": batch.metadata,
+ },
+ "salt/batch/1235/done",
+ )
+
+
+def test_batch__del__(batch):
+ batch = BatchAsync(MagicMock(), MagicMock(), MagicMock())
+ event = MagicMock()
+ batch.event = event
+ batch.__del__()
+ assert batch.local is None
+ assert batch.event is None
+ assert batch.ioloop is None
+
+
+def test_batch_close_safe(batch):
+ batch = BatchAsync(MagicMock(), MagicMock(), MagicMock())
+ event = MagicMock()
+ batch.event = event
+ batch.patterns = {
+ ("salt/job/1234/ret/*", "find_job_return"),
+ ("salt/job/4321/ret/*", "find_job_return"),
+ }
+ batch.close_safe()
+ assert batch.local is None
+ assert batch.event is None
+ assert batch.ioloop is None
+ assert len(event.unsubscribe.mock_calls) == 2
+ assert len(event.remove_event_handler.mock_calls) == 1
+
+
+def test_batch_next(batch):
+ batch.event = MagicMock()
+ batch.opts["fun"] = "my.fun"
+ batch.opts["arg"] = []
+ batch._get_next = MagicMock(return_value={"foo", "bar"})
+ batch.batch_size = 2
+ future = salt.ext.tornado.gen.Future()
+ future.set_result({"minions": ["foo", "bar"]})
+ batch.local.run_job_async.return_value = future
+ with patch("salt.ext.tornado.gen.sleep", return_value=future):
+ batch.run_next()
+ assert batch.local.run_job_async.call_args[0] == (
+ {"foo", "bar"},
+ "my.fun",
+ [],
+ "list",
)
- self.batch.start()
- self.batch.active = {"foo"}
- self.batch._BatchAsync__event_handler(MagicMock())
- self.assertEqual(self.batch.active, set())
- self.assertEqual(self.batch.done_minions, {"foo"})
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.schedule_next,),
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (
+ batch.find_job,
+ {"foo", "bar"},
)
+ assert batch.active == {"bar", "foo"}
+
- def test_batch__event_handler_find_job_return(self):
- self.batch.event = MagicMock(
- unpack=MagicMock(
- return_value=(
- "salt/job/1236/ret/foo",
- {"id": "foo", "return": "deadbeaf"},
- )
+def test_next_batch(batch):
+ batch.minions = {"foo", "bar"}
+ batch.batch_size = 2
+ assert batch._get_next() == {"foo", "bar"}
+
+
+def test_next_batch_one_done(batch):
+ batch.minions = {"foo", "bar"}
+ batch.done_minions = {"bar"}
+ batch.batch_size = 2
+ assert batch._get_next() == {"foo"}
+
+
+def test_next_batch_one_done_one_active(batch):
+ batch.minions = {"foo", "bar", "baz"}
+ batch.done_minions = {"bar"}
+ batch.active = {"baz"}
+ batch.batch_size = 2
+ assert batch._get_next() == {"foo"}
+
+
+def test_next_batch_one_done_one_active_one_timedout(batch):
+ batch.minions = {"foo", "bar", "baz", "faz"}
+ batch.done_minions = {"bar"}
+ batch.active = {"baz"}
+ batch.timedout_minions = {"faz"}
+ batch.batch_size = 2
+ assert batch._get_next() == {"foo"}
+
+
+def test_next_batch_bigger_size(batch):
+ batch.minions = {"foo", "bar"}
+ batch.batch_size = 3
+ assert batch._get_next() == {"foo", "bar"}
+
+
+def test_next_batch_all_done(batch):
+ batch.minions = {"foo", "bar"}
+ batch.done_minions = {"foo", "bar"}
+ batch.batch_size = 2
+ assert batch._get_next() == set()
+
+
+def test_next_batch_all_active(batch):
+ batch.minions = {"foo", "bar"}
+ batch.active = {"foo", "bar"}
+ batch.batch_size = 2
+ assert batch._get_next() == set()
+
+
+def test_next_batch_all_timedout(batch):
+ batch.minions = {"foo", "bar"}
+ batch.timedout_minions = {"foo", "bar"}
+ batch.batch_size = 2
+ assert batch._get_next() == set()
+
+
+def test_batch__event_handler_ping_return(batch):
+ batch.targeted_minions = {"foo"}
+ batch.event = MagicMock(
+ unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
+ )
+ batch.start()
+ assert batch.minions == set()
+ batch._BatchAsync__event_handler(MagicMock())
+ assert batch.minions == {"foo"}
+ assert batch.done_minions == set()
+
+
+def test_batch__event_handler_call_start_batch_when_all_pings_return(batch):
+ batch.targeted_minions = {"foo"}
+ batch.event = MagicMock(
+ unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
+ )
+ batch.start()
+ batch._BatchAsync__event_handler(MagicMock())
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.start_batch,)
+
+
+def test_batch__event_handler_not_call_start_batch_when_not_all_pings_return(batch):
+ batch.targeted_minions = {"foo", "bar"}
+ batch.event = MagicMock(
+ unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
+ )
+ batch.start()
+ batch._BatchAsync__event_handler(MagicMock())
+ assert len(batch.event.io_loop.spawn_callback.mock_calls) == 0
+
+
+def test_batch__event_handler_batch_run_return(batch):
+ batch.event = MagicMock(
+ unpack=MagicMock(return_value=("salt/job/1235/ret/foo", {"id": "foo"}))
+ )
+ batch.start()
+ batch.active = {"foo"}
+ batch._BatchAsync__event_handler(MagicMock())
+ assert batch.active == set()
+ assert batch.done_minions == {"foo"}
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.schedule_next,)
+
+
+def test_batch__event_handler_find_job_return(batch):
+ batch.event = MagicMock(
+ unpack=MagicMock(
+ return_value=(
+ "salt/job/1236/ret/foo",
+ {"id": "foo", "return": "deadbeaf"},
)
)
- self.batch.start()
- self.batch.patterns.add(("salt/job/1236/ret/*", "find_job_return"))
- self.batch._BatchAsync__event_handler(MagicMock())
- self.assertEqual(self.batch.find_job_returned, {"foo"})
-
- @salt.ext.tornado.testing.gen_test
- def test_batch_run_next_end_batch_when_no_next(self):
- self.batch.end_batch = MagicMock()
- self.batch._get_next = MagicMock(return_value={})
- self.batch.run_next()
- self.assertEqual(len(self.batch.end_batch.mock_calls), 1)
-
- @salt.ext.tornado.testing.gen_test
- def test_batch_find_job(self):
- self.batch.event = MagicMock()
- future = salt.ext.tornado.gen.Future()
- future.set_result({})
- self.batch.local.run_job_async.return_value = future
- self.batch.minions = {"foo", "bar"}
- self.batch.jid_gen = MagicMock(return_value="1234")
- salt.ext.tornado.gen.sleep = MagicMock(return_value=future)
- self.batch.find_job({"foo", "bar"})
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.check_find_job, {"foo", "bar"}, "1234"),
+ )
+ batch.start()
+ batch.patterns.add(("salt/job/1236/ret/*", "find_job_return"))
+ batch._BatchAsync__event_handler(MagicMock())
+ assert batch.find_job_returned == {"foo"}
+
+
+def test_batch_run_next_end_batch_when_no_next(batch):
+ batch.end_batch = MagicMock()
+ batch._get_next = MagicMock(return_value={})
+ batch.run_next()
+ assert len(batch.end_batch.mock_calls) == 1
+
+
+def test_batch_find_job(batch):
+ batch.event = MagicMock()
+ future = salt.ext.tornado.gen.Future()
+ future.set_result({})
+ batch.local.run_job_async.return_value = future
+ batch.minions = {"foo", "bar"}
+ batch.jid_gen = MagicMock(return_value="1234")
+ with patch("salt.ext.tornado.gen.sleep", return_value=future):
+ batch.find_job({"foo", "bar"})
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (
+ batch.check_find_job,
+ {"foo", "bar"},
+ "1234",
)
- @salt.ext.tornado.testing.gen_test
- def test_batch_find_job_with_done_minions(self):
- self.batch.done_minions = {"bar"}
- self.batch.event = MagicMock()
- future = salt.ext.tornado.gen.Future()
- future.set_result({})
- self.batch.local.run_job_async.return_value = future
- self.batch.minions = {"foo", "bar"}
- self.batch.jid_gen = MagicMock(return_value="1234")
- salt.ext.tornado.gen.sleep = MagicMock(return_value=future)
- self.batch.find_job({"foo", "bar"})
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.check_find_job, {"foo"}, "1234"),
- )
- def test_batch_check_find_job_did_not_return(self):
- self.batch.event = MagicMock()
- self.batch.active = {"foo"}
- self.batch.find_job_returned = set()
- self.batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
- self.batch.check_find_job({"foo"}, jid="1234")
- self.assertEqual(self.batch.find_job_returned, set())
- self.assertEqual(self.batch.active, set())
- self.assertEqual(len(self.batch.event.io_loop.add_callback.mock_calls), 0)
-
- def test_batch_check_find_job_did_return(self):
- self.batch.event = MagicMock()
- self.batch.find_job_returned = {"foo"}
- self.batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
- self.batch.check_find_job({"foo"}, jid="1234")
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.find_job, {"foo"}),
+def test_batch_find_job_with_done_minions(batch):
+ batch.done_minions = {"bar"}
+ batch.event = MagicMock()
+ future = salt.ext.tornado.gen.Future()
+ future.set_result({})
+ batch.local.run_job_async.return_value = future
+ batch.minions = {"foo", "bar"}
+ batch.jid_gen = MagicMock(return_value="1234")
+ with patch("salt.ext.tornado.gen.sleep", return_value=future):
+ batch.find_job({"foo", "bar"})
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (
+ batch.check_find_job,
+ {"foo"},
+ "1234",
)
- def test_batch_check_find_job_multiple_states(self):
- self.batch.event = MagicMock()
- # currently running minions
- self.batch.active = {"foo", "bar"}
- # minion is running and find_job returns
- self.batch.find_job_returned = {"foo"}
+def test_batch_check_find_job_did_not_return(batch):
+ batch.event = MagicMock()
+ batch.active = {"foo"}
+ batch.find_job_returned = set()
+ batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
+ batch.check_find_job({"foo"}, jid="1234")
+ assert batch.find_job_returned == set()
+ assert batch.active == set()
+ assert len(batch.event.io_loop.add_callback.mock_calls) == 0
- # minion started running but find_job did not return
- self.batch.timedout_minions = {"faz"}
- # minion finished
- self.batch.done_minions = {"baz"}
+def test_batch_check_find_job_did_return(batch):
+ batch.event = MagicMock()
+ batch.find_job_returned = {"foo"}
+ batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
+ batch.check_find_job({"foo"}, jid="1234")
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.find_job, {"foo"})
- # both not yet done but only 'foo' responded to find_job
- not_done = {"foo", "bar"}
- self.batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
- self.batch.check_find_job(not_done, jid="1234")
+def test_batch_check_find_job_multiple_states(batch):
+ batch.event = MagicMock()
+ # currently running minions
+ batch.active = {"foo", "bar"}
- # assert 'bar' removed from active
- self.assertEqual(self.batch.active, {"foo"})
+ # minion is running and find_job returns
+ batch.find_job_returned = {"foo"}
- # assert 'bar' added to timedout_minions
- self.assertEqual(self.batch.timedout_minions, {"bar", "faz"})
+ # minion started running but find_job did not return
+ batch.timedout_minions = {"faz"}
+
+ # minion finished
+ batch.done_minions = {"baz"}
+
+ # both not yet done but only 'foo' responded to find_job
+ not_done = {"foo", "bar"}
+
+ batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
+ batch.check_find_job(not_done, jid="1234")
+
+ # assert 'bar' removed from active
+ assert batch.active == {"foo"}
+
+ # assert 'bar' added to timedout_minions
+ assert batch.timedout_minions == {"bar", "faz"}
+
+ # assert 'find_job' schedueled again only for 'foo'
+ assert batch.event.io_loop.spawn_callback.call_args[0] == (batch.find_job, {"foo"})
- # assert 'find_job' schedueled again only for 'foo'
- self.assertEqual(
- self.batch.event.io_loop.spawn_callback.call_args[0],
- (self.batch.find_job, {"foo"}),
- )
- def test_only_on_run_next_is_scheduled(self):
- self.batch.event = MagicMock()
- self.batch.scheduled = True
- self.batch.schedule_next()
- self.assertEqual(len(self.batch.event.io_loop.spawn_callback.mock_calls), 0)
+def test_only_on_run_next_is_scheduled(batch):
+ batch.event = MagicMock()
+ batch.scheduled = True
+ batch.schedule_next()
+ assert len(batch.event.io_loop.spawn_callback.mock_calls) == 0
diff --git a/tests/unit/cli/test_support.py b/tests/unit/cli/test_support.py
index dc0e99bb3d..971a0f122b 100644
--- a/tests/unit/cli/test_support.py
+++ b/tests/unit/cli/test_support.py
@@ -14,7 +14,7 @@ from salt.cli.support.collector import SaltSupport, SupportDataCollector
from salt.cli.support.console import IndentOutput
from salt.utils.color import get_colors
from salt.utils.stringutils import to_bytes
-from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
+from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase, skipIf
try:
@@ -24,7 +24,6 @@ except ImportError:
@skipIf(not bool(pytest), "Pytest needs to be installed")
-@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltSupportIndentOutputTestCase(TestCase):
"""
Unit Tests for the salt-support indent output.
@@ -100,7 +99,6 @@ class SaltSupportIndentOutputTestCase(TestCase):
@skipIf(not bool(pytest), "Pytest needs to be installed")
-@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltSupportCollectorTestCase(TestCase):
"""
Collector tests.
@@ -232,7 +230,6 @@ class SaltSupportCollectorTestCase(TestCase):
@skipIf(not bool(pytest), "Pytest needs to be installed")
-@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltSupportRunnerTestCase(TestCase):
"""
Test runner class.
@@ -468,7 +465,6 @@ class SaltSupportRunnerTestCase(TestCase):
@skipIf(not bool(pytest), "Pytest needs to be installed")
-@skipIf(NO_MOCK, NO_MOCK_REASON)
class ProfileIntegrityTestCase(TestCase):
"""
Default profile integrity
diff --git a/tests/unit/modules/test_saltsupport.py b/tests/unit/modules/test_saltsupport.py
index 1715c68f4c..2afdd69b3e 100644
--- a/tests/unit/modules/test_saltsupport.py
+++ b/tests/unit/modules/test_saltsupport.py
@@ -8,7 +8,7 @@ import datetime
import salt.exceptions
from salt.modules import saltsupport
from tests.support.mixins import LoaderModuleMockMixin
-from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
+from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase, skipIf
try:
@@ -18,7 +18,6 @@ except ImportError:
@skipIf(not bool(pytest), "Pytest required")
-@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltSupportModuleTestCase(TestCase, LoaderModuleMockMixin):
"""
Test cases for salt.modules.support::SaltSupportModule
@@ -361,7 +360,6 @@ professor: Farnsworth
@skipIf(not bool(pytest), "Pytest required")
-@skipIf(NO_MOCK, NO_MOCK_REASON)
class LogCollectorTestCase(TestCase, LoaderModuleMockMixin):
"""
Test cases for salt.modules.support::LogCollector
--
2.41.0

View File

@ -0,0 +1,25 @@
From 4bc3be7814daf5365d63b88f164f791ea53b418f Mon Sep 17 00:00:00 2001
From: Marek Czernek <marek.czernek@suse.com>
Date: Wed, 17 Jan 2024 15:04:53 +0100
Subject: [PATCH] Fix the aptpkg.py unit test failure
---
salt/modules/aptpkg.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 9885e9fb60..ad5450c415 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -3128,7 +3128,7 @@ def expand_repo_def(**kwargs):
NOT USABLE IN THE CLI
"""
warn_until_date(
- "20240101",
+ "20250101",
"The pkg.expand_repo_def function is deprecated and set for removal "
"after {date}. This is only unsed internally by the apt pkg state "
"module. If that's not the case, please file an new issue requesting "
--
2.43.0

View File

@ -0,0 +1,23 @@
From b80c0d515e8715c160f94124dff8b5b90e773cd0 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Tue, 9 Nov 2021 16:19:56 +0300
Subject: [PATCH] Fix the regression for yumnotify plugin (#456)
---
scripts/suse/yum/plugins/yumnotify.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/scripts/suse/yum/plugins/yumnotify.py b/scripts/suse/yum/plugins/yumnotify.py
index 0d117e8946..cec5256d20 100644
--- a/scripts/suse/yum/plugins/yumnotify.py
+++ b/scripts/suse/yum/plugins/yumnotify.py
@@ -63,4 +63,4 @@ def posttrans_hook(conduit):
)
)
except OSError as e:
- print("Unable to save the cookie file: %s" % (e), file=sys.stderr)
+ sys.stderr.write("Unable to save the cookie file: %s\n" % (e))
--
2.39.2

View File

@ -0,0 +1,154 @@
From 502354be32fcff9b0607f6e435ca8825a4c2cd56 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Thu, 3 Aug 2023 11:07:03 +0200
Subject: [PATCH] Fix the regression of user.present state when group is
unset (#589)
* Fix user.present state when group is unset
* Fix user unit test
---------
Co-authored-by: Megan Wilhite <mwilhite@vmware.com>
---
changelog/64211.fixed.md | 1 +
salt/states/user.py | 2 +-
tests/pytests/functional/states/test_user.py | 74 +++++++++++++++++++-
tests/pytests/unit/states/test_user.py | 2 +
4 files changed, 76 insertions(+), 3 deletions(-)
create mode 100644 changelog/64211.fixed.md
diff --git a/changelog/64211.fixed.md b/changelog/64211.fixed.md
new file mode 100644
index 0000000000..26b39acf02
--- /dev/null
+++ b/changelog/64211.fixed.md
@@ -0,0 +1 @@
+Fix user.present state when groups is unset to ensure the groups are unchanged, as documented.
diff --git a/salt/states/user.py b/salt/states/user.py
index ed2d5a05f4..929afb2cd1 100644
--- a/salt/states/user.py
+++ b/salt/states/user.py
@@ -100,7 +100,7 @@ def _changes(
change = {}
wanted_groups = sorted(set((groups or []) + (optional_groups or [])))
- if not remove_groups:
+ if not remove_groups or groups is None and not optional_groups:
wanted_groups = sorted(set(wanted_groups + lusr["groups"]))
if uid and lusr["uid"] != uid:
change["uid"] = uid
diff --git a/tests/pytests/functional/states/test_user.py b/tests/pytests/functional/states/test_user.py
index 09d34da168..96b1ec55c8 100644
--- a/tests/pytests/functional/states/test_user.py
+++ b/tests/pytests/functional/states/test_user.py
@@ -117,7 +117,6 @@ def test_user_present_when_home_dir_does_not_18843(states, existing_account):
ret = states.user.present(
name=existing_account.username,
home=existing_account.info.home,
- remove_groups=False,
)
assert ret.result is True
assert pathlib.Path(existing_account.info.home).is_dir()
@@ -228,7 +227,6 @@ def test_user_present_unicode(states, username, subtests):
roomnumber="①②③",
workphone="١٢٣٤",
homephone="६७८",
- remove_groups=False,
)
assert ret.result is True
@@ -429,3 +427,75 @@ def test_user_present_change_optional_groups(
user_info = modules.user.info(username)
assert user_info
assert user_info["groups"] == [group_1.name]
+
+
+@pytest.mark.skip_unless_on_linux(reason="underlying functionality only runs on Linux")
+def test_user_present_no_groups(modules, states, username):
+ """
+ test user.present when groups arg is not
+ included by the group is created in another
+ state. Re-run the states to ensure there are
+ not changes and it is idempotent.
+ """
+ groups = ["testgroup1", "testgroup2"]
+ try:
+ ret = states.group.present(name=username, gid=61121)
+ assert ret.result is True
+
+ ret = states.user.present(
+ name=username,
+ uid=61121,
+ gid=61121,
+ )
+ assert ret.result is True
+ assert ret.changes["groups"] == [username]
+ assert ret.changes["name"] == username
+
+ ret = states.group.present(
+ name=groups[0],
+ members=[username],
+ )
+ assert ret.changes["members"] == [username]
+
+ ret = states.group.present(
+ name=groups[1],
+ members=[username],
+ )
+ assert ret.changes["members"] == [username]
+
+ user_info = modules.user.info(username)
+ assert user_info
+ assert user_info["groups"] == [username, groups[0], groups[1]]
+
+ # run again, expecting no changes
+ ret = states.group.present(name=username)
+ assert ret.result is True
+ assert ret.changes == {}
+
+ ret = states.user.present(
+ name=username,
+ )
+ assert ret.result is True
+ assert ret.changes == {}
+
+ ret = states.group.present(
+ name=groups[0],
+ members=[username],
+ )
+ assert ret.result is True
+ assert ret.changes == {}
+
+ ret = states.group.present(
+ name=groups[1],
+ members=[username],
+ )
+ assert ret.result is True
+ assert ret.changes == {}
+
+ user_info = modules.user.info(username)
+ assert user_info
+ assert user_info["groups"] == [username, groups[0], groups[1]]
+ finally:
+ for group in groups:
+ ret = states.group.absent(name=group)
+ assert ret.result is True
diff --git a/tests/pytests/unit/states/test_user.py b/tests/pytests/unit/states/test_user.py
index 94e69d70ed..d50d16e3be 100644
--- a/tests/pytests/unit/states/test_user.py
+++ b/tests/pytests/unit/states/test_user.py
@@ -189,6 +189,8 @@ def test_present_uid_gid_change():
"user.chgid": Mock(),
"file.group_to_gid": mock_group_to_gid,
"file.gid_to_group": mock_gid_to_group,
+ "group.info": MagicMock(return_value=after),
+ "user.chgroups": MagicMock(return_value=True),
}
with patch.dict(user.__grains__, {"kernel": "Linux"}), patch.dict(
user.__salt__, dunder_salt
--
2.41.0

View File

@ -0,0 +1,26 @@
From c37992e305978e95da1ac0a40a8142f578271320 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Mon, 8 Nov 2021 17:43:02 +0300
Subject: [PATCH] Fix traceback.print_exc calls for test_pip_state (#432)
---
tests/unit/states/test_pip_state.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/unit/states/test_pip_state.py b/tests/unit/states/test_pip_state.py
index 5e4b6e0af1..981ad46a13 100644
--- a/tests/unit/states/test_pip_state.py
+++ b/tests/unit/states/test_pip_state.py
@@ -442,7 +442,7 @@ class PipStateInstallationErrorTest(TestCase):
sys.stdout.flush()
sys.exit(2)
except Exception as exc:
- traceback.print_exc(exc, file=sys.stdout)
+ traceback.print_exc(file=sys.stdout)
sys.stdout.flush()
sys.exit(3)
sys.exit(0)
--
2.39.2

View File

@ -0,0 +1,265 @@
From 70509ff67d4eb734c88032913134092257a0d35b Mon Sep 17 00:00:00 2001
From: Flex Liu <fliu@suse.com>
Date: Tue, 2 Jul 2024 15:25:30 +0800
Subject: [PATCH] Fix user.list_groups omits remote groups
* fixes saltstack/salt#64953 user.list_groups omits remote groups
* fixes saltstack/salt#65029 support for pysss can be removed
* add changlog entries
* add tests for _getgrall and local vs remote group handling
* add negative tests for _getgrall
* root can still read the file and tests run as root
* remove permission check as its probably an unreachable edge case
---------
Co-authored-by: nicholasmhughes <nicholasmhughes@gmail.com>
---
changelog/64888.fixed.md | 1 +
changelog/64953.fixed.md | 1 +
changelog/65029.removed.md | 1 +
salt/auth/pam.py | 9 ---
salt/utils/user.py | 73 ++++++++++++-------
.../functional/utils/user/test__getgrall.py | 44 +++++++++++
tests/pytests/unit/utils/test_user.py | 29 ++++++++
7 files changed, 122 insertions(+), 36 deletions(-)
create mode 100644 changelog/64888.fixed.md
create mode 100644 changelog/64953.fixed.md
create mode 100644 changelog/65029.removed.md
create mode 100644 tests/pytests/functional/utils/user/test__getgrall.py
create mode 100644 tests/pytests/unit/utils/test_user.py
diff --git a/changelog/64888.fixed.md b/changelog/64888.fixed.md
new file mode 100644
index 0000000000..08b2efd042
--- /dev/null
+++ b/changelog/64888.fixed.md
@@ -0,0 +1 @@
+Fixed grp.getgrall() in utils/user.py causing performance issues
diff --git a/changelog/64953.fixed.md b/changelog/64953.fixed.md
new file mode 100644
index 0000000000..f0b1ed46f1
--- /dev/null
+++ b/changelog/64953.fixed.md
@@ -0,0 +1 @@
+Fix user.list_groups omits remote groups via sssd, etc.
diff --git a/changelog/65029.removed.md b/changelog/65029.removed.md
new file mode 100644
index 0000000000..d09f10b4ba
--- /dev/null
+++ b/changelog/65029.removed.md
@@ -0,0 +1 @@
+Tech Debt - support for pysss removed due to functionality addition in Python 3.3
diff --git a/salt/auth/pam.py b/salt/auth/pam.py
index f0397c1062..12af29bbdb 100644
--- a/salt/auth/pam.py
+++ b/salt/auth/pam.py
@@ -24,15 +24,6 @@ authenticated against. This defaults to `login`
The Python interface to PAM does not support authenticating as ``root``.
-.. note:: Using PAM groups with SSSD groups on python2.
-
- To use sssd with the PAM eauth module and groups the `pysss` module is
- needed. On RedHat/CentOS this is `python-sss`.
-
- This should not be needed with python >= 3.3, because the `os` modules has the
- `getgrouplist` function.
-
-
.. note:: This module executes itself in a subprocess in order to user the system python
and pam libraries. We do this to avoid openssl version conflicts when
running under a salt onedir build.
diff --git a/salt/utils/user.py b/salt/utils/user.py
index 2f1ca65cf9..3588b3804a 100644
--- a/salt/utils/user.py
+++ b/salt/utils/user.py
@@ -31,13 +31,6 @@ try:
except ImportError:
HAS_GRP = False
-try:
- import pysss
-
- HAS_PYSSS = True
-except ImportError:
- HAS_PYSSS = False
-
try:
import salt.utils.win_functions
@@ -289,30 +282,35 @@ def get_group_list(user, include_default=True):
return []
group_names = None
ugroups = set()
- if hasattr(os, "getgrouplist"):
- # Try os.getgrouplist, available in python >= 3.3
- log.trace("Trying os.getgrouplist for '%s'", user)
- try:
- user_group_list = os.getgrouplist(user, pwd.getpwnam(user).pw_gid)
- group_names = [
- _group.gr_name
- for _group in grp.getgrall()
- if _group.gr_gid in user_group_list
- ]
- except Exception: # pylint: disable=broad-except
- pass
- elif HAS_PYSSS:
- # Try pysss.getgrouplist
- log.trace("Trying pysss.getgrouplist for '%s'", user)
- try:
- group_names = list(pysss.getgrouplist(user))
- except Exception: # pylint: disable=broad-except
- pass
+ # Try os.getgrouplist, available in python >= 3.3
+ log.trace("Trying os.getgrouplist for '%s'", user)
+ try:
+ user_group_list = sorted(os.getgrouplist(user, pwd.getpwnam(user).pw_gid))
+ local_grall = _getgrall()
+ local_gids = sorted(lgrp.gr_gid for lgrp in local_grall)
+ max_idx = -1
+ local_max = local_gids[max_idx]
+ while local_max >= 65000:
+ max_idx -= 1
+ local_max = local_gids[max_idx]
+ user_group_list_local = [lgrp for lgrp in user_group_list if lgrp <= local_max]
+ user_group_list_remote = [rgrp for rgrp in user_group_list if rgrp > local_max]
+ local_group_names = [
+ _group.gr_name
+ for _group in local_grall
+ if _group.gr_gid in user_group_list_local
+ ]
+ remote_group_names = [
+ grp.getgrgid(group_id).gr_name for group_id in user_group_list_remote
+ ]
+ group_names = local_group_names + remote_group_names
+ except Exception: # pylint: disable=broad-except
+ pass
if group_names is None:
# Fall back to generic code
# Include the user's default group to match behavior of
- # os.getgrouplist() and pysss.getgrouplist()
+ # os.getgrouplist()
log.trace("Trying generic group list for '%s'", user)
group_names = [g.gr_name for g in grp.getgrall() if user in g.gr_mem]
try:
@@ -389,3 +387,24 @@ def get_gid(group=None):
return grp.getgrnam(group).gr_gid
except KeyError:
return None
+
+
+def _getgrall(root=None):
+ """
+ Alternative implemetantion for getgrall, that uses only /etc/group
+ """
+ ret = []
+ root = "/" if not root else root
+ etc_group = os.path.join(root, "etc/group")
+ with salt.utils.files.fopen(etc_group) as fp_:
+ for line in fp_:
+ line = salt.utils.stringutils.to_unicode(line)
+ comps = line.strip().split(":")
+ # Generate a getgrall compatible output
+ comps[2] = int(comps[2])
+ if comps[3]:
+ comps[3] = [mem.strip() for mem in comps[3].split(",")]
+ else:
+ comps[3] = []
+ ret.append(grp.struct_group(comps))
+ return ret
diff --git a/tests/pytests/functional/utils/user/test__getgrall.py b/tests/pytests/functional/utils/user/test__getgrall.py
new file mode 100644
index 0000000000..db994019e6
--- /dev/null
+++ b/tests/pytests/functional/utils/user/test__getgrall.py
@@ -0,0 +1,44 @@
+from textwrap import dedent
+
+import pytest
+
+pytest.importorskip("grp")
+
+import grp
+
+import salt.utils.user
+
+
+@pytest.fixture(scope="function")
+def etc_group(tmp_path):
+ etcgrp = tmp_path / "etc" / "group"
+ etcgrp.parent.mkdir()
+ etcgrp.write_text(
+ dedent(
+ """games:x:50:
+ docker:x:959:debian,salt
+ salt:x:1000:"""
+ )
+ )
+ return etcgrp
+
+
+def test__getgrall(etc_group):
+ group_lines = [
+ ["games", "x", 50, []],
+ ["docker", "x", 959, ["debian", "salt"]],
+ ["salt", "x", 1000, []],
+ ]
+ expected_grall = [grp.struct_group(comps) for comps in group_lines]
+
+ grall = salt.utils.user._getgrall(root=str(etc_group.parent.parent))
+
+ assert grall == expected_grall
+
+
+def test__getgrall_bad_format(etc_group):
+ with etc_group.open("a") as _fp:
+ _fp.write("\n# some comment here\n")
+
+ with pytest.raises(IndexError):
+ salt.utils.user._getgrall(root=str(etc_group.parent.parent))
diff --git a/tests/pytests/unit/utils/test_user.py b/tests/pytests/unit/utils/test_user.py
new file mode 100644
index 0000000000..17c6b1551f
--- /dev/null
+++ b/tests/pytests/unit/utils/test_user.py
@@ -0,0 +1,29 @@
+from types import SimpleNamespace
+
+import pytest
+
+from tests.support.mock import MagicMock, patch
+
+pytest.importorskip("grp")
+
+import grp
+
+import salt.utils.user
+
+
+def test_get_group_list():
+ getpwname = SimpleNamespace(pw_gid=1000)
+ getgrgid = MagicMock(side_effect=[SimpleNamespace(gr_name="remote")])
+ group_lines = [
+ ["games", "x", 50, []],
+ ["salt", "x", 1000, []],
+ ]
+ getgrall = [grp.struct_group(comps) for comps in group_lines]
+ with patch("os.getgrouplist", MagicMock(return_value=[50, 1000, 12000])), patch(
+ "pwd.getpwnam", MagicMock(return_value=getpwname)
+ ), patch("salt.utils.user._getgrall", MagicMock(return_value=getgrall)), patch(
+ "grp.getgrgid", getgrgid
+ ):
+ group_list = salt.utils.user.get_group_list("salt")
+ assert group_list == ["games", "remote", "salt"]
+ getgrgid.assert_called_once()
--
2.35.3

View File

@ -0,0 +1,181 @@
From 027cbef223616f5ab6c73e60bcaa9f9e81a6ce67 Mon Sep 17 00:00:00 2001
From: Daniel Mach <daniel.mach@suse.com>
Date: Wed, 28 Jun 2023 16:39:42 +0200
Subject: [PATCH] Fix utf8 handling in 'pass' renderer and make it more
robust (#579)
* Migrate string formatting in 'pass' renderer to a f-string
* Fix utf8 handling in 'pass' renderer and make it more robust
---
changelog/64300.fixed.md | 1 +
salt/renderers/pass.py | 12 +--
tests/pytests/unit/renderers/test_pass.py | 99 +++++++++++++++++++++++
3 files changed, 103 insertions(+), 9 deletions(-)
create mode 100644 changelog/64300.fixed.md
diff --git a/changelog/64300.fixed.md b/changelog/64300.fixed.md
new file mode 100644
index 0000000000..4418db1d04
--- /dev/null
+++ b/changelog/64300.fixed.md
@@ -0,0 +1 @@
+Fix utf8 handling in 'pass' renderer
diff --git a/salt/renderers/pass.py b/salt/renderers/pass.py
index ba0f152c23..ae75bba443 100644
--- a/salt/renderers/pass.py
+++ b/salt/renderers/pass.py
@@ -145,23 +145,17 @@ def _fetch_secret(pass_path):
env["GNUPGHOME"] = pass_gnupghome
try:
- proc = Popen(cmd, stdout=PIPE, stderr=PIPE, env=env)
+ proc = Popen(cmd, stdout=PIPE, stderr=PIPE, env=env, encoding="utf-8")
pass_data, pass_error = proc.communicate()
pass_returncode = proc.returncode
- except OSError as e:
+ except (OSError, UnicodeDecodeError) as e:
pass_data, pass_error = "", str(e)
pass_returncode = 1
# The version of pass used during development sent output to
# stdout instead of stderr even though its returncode was non zero.
if pass_returncode or not pass_data:
- try:
- pass_error = pass_error.decode("utf-8")
- except (AttributeError, ValueError):
- pass
- msg = "Could not fetch secret '{}' from the password store: {}".format(
- pass_path, pass_error
- )
+ msg = f"Could not fetch secret '{pass_path}' from the password store: {pass_error}"
if pass_strict_fetch:
raise SaltRenderError(msg)
else:
diff --git a/tests/pytests/unit/renderers/test_pass.py b/tests/pytests/unit/renderers/test_pass.py
index 1e2ebb7ea8..f7c79e1fe1 100644
--- a/tests/pytests/unit/renderers/test_pass.py
+++ b/tests/pytests/unit/renderers/test_pass.py
@@ -1,8 +1,12 @@
import importlib
+import os
+import shutil
+import tempfile
import pytest
import salt.exceptions
+import salt.utils.files
from tests.support.mock import MagicMock, patch
# "pass" is a reserved keyword, we need to import it differently
@@ -19,6 +23,47 @@ def configure_loader_modules(master_opts):
}
+@pytest.fixture()
+def pass_executable(request):
+ tmp_dir = tempfile.mkdtemp(prefix="salt_pass_")
+ pass_path = os.path.join(tmp_dir, "pass")
+ with salt.utils.files.fopen(pass_path, "w") as f:
+ f.write("#!/bin/sh\n")
+ # return path path wrapped into unicode characters
+ # pass args ($1, $2) are ("show", <pass_path>)
+ f.write('echo "α>>> $2 <<<β"\n')
+ os.chmod(pass_path, 0o755)
+ yield pass_path
+ shutil.rmtree(tmp_dir)
+
+
+@pytest.fixture()
+def pass_executable_error(request):
+ tmp_dir = tempfile.mkdtemp(prefix="salt_pass_")
+ pass_path = os.path.join(tmp_dir, "pass")
+ with salt.utils.files.fopen(pass_path, "w") as f:
+ f.write("#!/bin/sh\n")
+ # return error message with unicode characters
+ f.write('echo "ERROR: αβγ" >&2\n')
+ f.write("exit 1\n")
+ os.chmod(pass_path, 0o755)
+ yield pass_path
+ shutil.rmtree(tmp_dir)
+
+
+@pytest.fixture()
+def pass_executable_invalid_utf8(request):
+ tmp_dir = tempfile.mkdtemp(prefix="salt_pass_")
+ pass_path = os.path.join(tmp_dir, "pass")
+ with salt.utils.files.fopen(pass_path, "wb") as f:
+ f.write(b"#!/bin/sh\n")
+ # return invalid utf-8 sequence
+ f.write(b'echo "\x80\x81"\n')
+ os.chmod(pass_path, 0o755)
+ yield pass_path
+ shutil.rmtree(tmp_dir)
+
+
# The default behavior is that if fetching a secret from pass fails,
# the value is passed through. Even the trailing newlines are preserved.
def test_passthrough():
@@ -161,3 +206,57 @@ def test_env():
call_args, call_kwargs = popen_mock.call_args_list[0]
assert call_kwargs["env"]["GNUPGHOME"] == config["pass_gnupghome"]
assert call_kwargs["env"]["PASSWORD_STORE_DIR"] == config["pass_dir"]
+
+
+@pytest.mark.skip_on_windows(reason="Not supported on Windows")
+def test_utf8(pass_executable):
+ config = {
+ "pass_variable_prefix": "pass:",
+ "pass_strict_fetch": True,
+ }
+ mocks = {
+ "_get_pass_exec": MagicMock(return_value=pass_executable),
+ }
+
+ pass_path = "pass:secret"
+ with patch.dict(pass_.__opts__, config), patch.dict(pass_.__dict__, mocks):
+ result = pass_.render(pass_path)
+ assert result == "α>>> secret <<<β"
+
+
+@pytest.mark.skip_on_windows(reason="Not supported on Windows")
+def test_utf8_error(pass_executable_error):
+ config = {
+ "pass_variable_prefix": "pass:",
+ "pass_strict_fetch": True,
+ }
+ mocks = {
+ "_get_pass_exec": MagicMock(return_value=pass_executable_error),
+ }
+
+ pass_path = "pass:secret"
+ with patch.dict(pass_.__opts__, config), patch.dict(pass_.__dict__, mocks):
+ with pytest.raises(
+ salt.exceptions.SaltRenderError,
+ match=r"Could not fetch secret 'secret' from the password store: ERROR: αβγ",
+ ):
+ result = pass_.render(pass_path)
+
+
+@pytest.mark.skip_on_windows(reason="Not supported on Windows")
+def test_invalid_utf8(pass_executable_invalid_utf8):
+ config = {
+ "pass_variable_prefix": "pass:",
+ "pass_strict_fetch": True,
+ }
+ mocks = {
+ "_get_pass_exec": MagicMock(return_value=pass_executable_invalid_utf8),
+ }
+
+ pass_path = "pass:secret"
+ with patch.dict(pass_.__opts__, config), patch.dict(pass_.__dict__, mocks):
+ with pytest.raises(
+ salt.exceptions.SaltRenderError,
+ match=r"Could not fetch secret 'secret' from the password store: 'utf-8' codec can't decode byte 0x80 in position 0: invalid start byte",
+ ):
+ result = pass_.render(pass_path)
--
2.41.0

View File

@ -0,0 +1,58 @@
From c0fae09e5a4f6997a60007d970c7c6a5614d9102 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 19 Apr 2023 10:41:28 +0100
Subject: [PATCH] Fix version detection and avoid building and testing
failures
---
salt/version.py | 20 ++------------------
1 file changed, 2 insertions(+), 18 deletions(-)
diff --git a/salt/version.py b/salt/version.py
index 43cb5f86f7..67719bd020 100644
--- a/salt/version.py
+++ b/salt/version.py
@@ -1,7 +1,6 @@
"""
Set up the version of Salt
"""
-import argparse
import operator
import os
import platform
@@ -78,7 +77,7 @@ class SaltVersionsInfo(type):
ALUMINIUM = SaltVersion("Aluminium" , info=3003, released=True)
SILICON = SaltVersion("Silicon" , info=3004, released=True)
PHOSPHORUS = SaltVersion("Phosphorus" , info=3005, released=True)
- SULFUR = SaltVersion("Sulfur" , info=(3006, 0), released=True)
+ SULFUR = SaltVersion("Sulfur" , info=(3006, 0))
CHLORINE = SaltVersion("Chlorine" , info=(3007, 0))
ARGON = SaltVersion("Argon" , info=(3008, 0))
POTASSIUM = SaltVersion("Potassium" , info=(3009, 0))
@@ -922,20 +921,5 @@ def versions_report(include_salt_cloud=False, include_extensions=True):
yield from info
-def _parser():
- parser = argparse.ArgumentParser()
- parser.add_argument(
- "--next-release", help="Return the next release", action="store_true"
- )
- # When pip installing we pass in other args to this script.
- # This allows us to catch those args but not use them
- parser.add_argument("unknown", nargs=argparse.REMAINDER)
- return parser.parse_args()
-
-
if __name__ == "__main__":
- args = _parser()
- if args.next_release:
- print(__saltstack_version__.next_release())
- else:
- print(__version__)
+ print(__version__)
--
2.39.2

View File

@ -0,0 +1,833 @@
From 7051f86bb48dbd618a7422d469f3aae4c6f18008 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 31 Aug 2023 10:41:53 +0100
Subject: [PATCH] Fixed gitfs cachedir_basename to avoid hash collisions
(#599)
(bsc#1193948, bsc#1214797, CVE-2023-20898)
Fix gitfs tests
It's `gitfs` not `gtfs`, plus some code fixes and cleanup
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
fix doc
wrap sha in base64
clean up cache name
stop branch collision
run pre
Co-authored-by: cmcmarrow <charles.mcmarrow.4@gmail.com>
---
changelog/cve-2023-20898.security.md | 1 +
salt/utils/gitfs.py | 83 ++++++-
tests/pytests/unit/utils/test_gitfs.py | 255 +++++++++++++++++++++
tests/unit/utils/test_gitfs.py | 305 ++++++-------------------
4 files changed, 403 insertions(+), 241 deletions(-)
create mode 100644 changelog/cve-2023-20898.security.md
create mode 100644 tests/pytests/unit/utils/test_gitfs.py
diff --git a/changelog/cve-2023-20898.security.md b/changelog/cve-2023-20898.security.md
new file mode 100644
index 0000000000..44f1729192
--- /dev/null
+++ b/changelog/cve-2023-20898.security.md
@@ -0,0 +1 @@
+Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions.
diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py
index 38e84f38aa..af61aa0dda 100644
--- a/salt/utils/gitfs.py
+++ b/salt/utils/gitfs.py
@@ -3,6 +3,7 @@ Classes which provide the shared base for GitFS, git_pillar, and winrepo
"""
+import base64
import contextlib
import copy
import errno
@@ -11,10 +12,12 @@ import glob
import hashlib
import io
import logging
+import multiprocessing
import os
import shlex
import shutil
import stat
+import string
import subprocess
import time
import weakref
@@ -22,6 +25,7 @@ from datetime import datetime
import salt.ext.tornado.ioloop
import salt.fileserver
+import salt.syspaths
import salt.utils.configparser
import salt.utils.data
import salt.utils.files
@@ -34,7 +38,6 @@ import salt.utils.stringutils
import salt.utils.url
import salt.utils.user
import salt.utils.versions
-import salt.syspaths
from salt.config import DEFAULT_MASTER_OPTS as _DEFAULT_MASTER_OPTS
from salt.exceptions import FileserverConfigError, GitLockError, get_error_message
from salt.utils.event import tagify
@@ -226,6 +229,10 @@ class GitProvider:
invoking the parent class' __init__.
"""
+ # master lock should only be locked for very short periods of times "seconds"
+ # the master lock should be used when ever git provider reads or writes to one if it locks
+ _master_lock = multiprocessing.Lock()
+
def __init__(
self,
opts,
@@ -452,13 +459,44 @@ class GitProvider:
failhard(self.role)
hash_type = getattr(hashlib, self.opts.get("hash_type", "md5"))
+ # Generate full id.
+ # Full id helps decrease the chances of collections in the gitfs cache.
+ try:
+ target = str(self.get_checkout_target())
+ except AttributeError:
+ target = ""
+ self._full_id = "-".join(
+ [
+ getattr(self, "name", ""),
+ self.id,
+ getattr(self, "env", ""),
+ getattr(self, "_root", ""),
+ self.role,
+ getattr(self, "base", ""),
+ getattr(self, "branch", ""),
+ target,
+ ]
+ )
# We loaded this data from yaml configuration files, so, its safe
# to use UTF-8
- self.hash = hash_type(self.id.encode("utf-8")).hexdigest()
- self.cachedir_basename = getattr(self, "name", self.hash)
+ base64_hash = str(
+ base64.b64encode(hash_type(self._full_id.encode("utf-8")).digest()),
+ encoding="ascii", # base64 only outputs ascii
+ ).replace(
+ "/", "_"
+ ) # replace "/" with "_" to not cause trouble with file system
+
+ # limit name length to 19, so we don't eat up all the path length for windows
+ # this is due to pygit2 limitations
+ # replace any unknown char with "_" to not cause trouble with file system
+ name_chars = string.ascii_letters + string.digits + "-"
+ cache_name = "".join(
+ c if c in name_chars else "_" for c in getattr(self, "name", "")[:19]
+ )
+
+ self.cachedir_basename = f"{cache_name}-{base64_hash}"
self.cachedir = salt.utils.path.join(cache_root, self.cachedir_basename)
self.linkdir = salt.utils.path.join(cache_root, "links", self.cachedir_basename)
-
if not os.path.isdir(self.cachedir):
os.makedirs(self.cachedir)
@@ -473,6 +511,12 @@ class GitProvider:
log.critical(msg, exc_info=True)
failhard(self.role)
+ def full_id(self):
+ return self._full_id
+
+ def get_cachedir_basename(self):
+ return self.cachedir_basename
+
def _get_envs_from_ref_paths(self, refs):
"""
Return the names of remote refs (stripped of the remote name) and tags
@@ -663,6 +707,19 @@ class GitProvider:
"""
Clear update.lk
"""
+ if self.__class__._master_lock.acquire(timeout=60) is False:
+ # if gitfs works right we should never see this timeout error.
+ log.error("gitfs master lock timeout!")
+ raise TimeoutError("gitfs master lock timeout!")
+ try:
+ return self._clear_lock(lock_type)
+ finally:
+ self.__class__._master_lock.release()
+
+ def _clear_lock(self, lock_type="update"):
+ """
+ Clear update.lk without MultiProcessing locks
+ """
lock_file = self._get_lock_file(lock_type=lock_type)
def _add_error(errlist, exc):
@@ -838,6 +895,20 @@ class GitProvider:
"""
Place a lock file if (and only if) it does not already exist.
"""
+ if self.__class__._master_lock.acquire(timeout=60) is False:
+ # if gitfs works right we should never see this timeout error.
+ log.error("gitfs master lock timeout!")
+ raise TimeoutError("gitfs master lock timeout!")
+ try:
+ return self.__lock(lock_type, failhard)
+ finally:
+ self.__class__._master_lock.release()
+
+ def __lock(self, lock_type="update", failhard=False):
+ """
+ Place a lock file if (and only if) it does not already exist.
+ Without MultiProcessing locks.
+ """
try:
fh_ = os.open(
self._get_lock_file(lock_type), os.O_CREAT | os.O_EXCL | os.O_WRONLY
@@ -904,9 +975,9 @@ class GitProvider:
lock_type,
lock_file,
)
- success, fail = self.clear_lock()
+ success, fail = self._clear_lock()
if success:
- return self._lock(lock_type="update", failhard=failhard)
+ return self.__lock(lock_type="update", failhard=failhard)
elif failhard:
raise
return
diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py
new file mode 100644
index 0000000000..e9915de412
--- /dev/null
+++ b/tests/pytests/unit/utils/test_gitfs.py
@@ -0,0 +1,255 @@
+import os
+import string
+import time
+
+import pytest
+
+import salt.fileserver.gitfs
+import salt.utils.gitfs
+from salt.exceptions import FileserverConfigError
+from tests.support.helpers import patched_environ
+from tests.support.mock import MagicMock, patch
+
+try:
+ HAS_PYGIT2 = (
+ salt.utils.gitfs.PYGIT2_VERSION
+ and salt.utils.gitfs.PYGIT2_VERSION >= salt.utils.gitfs.PYGIT2_MINVER
+ and salt.utils.gitfs.LIBGIT2_VERSION
+ and salt.utils.gitfs.LIBGIT2_VERSION >= salt.utils.gitfs.LIBGIT2_MINVER
+ )
+except AttributeError:
+ HAS_PYGIT2 = False
+
+
+if HAS_PYGIT2:
+ import pygit2
+
+
+@pytest.mark.parametrize(
+ "role_name,role_class",
+ (
+ ("gitfs", salt.utils.gitfs.GitFS),
+ ("git_pillar", salt.utils.gitfs.GitPillar),
+ ("winrepo", salt.utils.gitfs.WinRepo),
+ ),
+)
+def test_provider_case_insensitive_gitfs_provider(minion_opts, role_name, role_class):
+ """
+ Ensure that both lowercase and non-lowercase values are supported
+ """
+ provider = "GitPython"
+ key = "{}_provider".format(role_name)
+ with patch.object(role_class, "verify_gitpython", MagicMock(return_value=True)):
+ with patch.object(role_class, "verify_pygit2", MagicMock(return_value=False)):
+ args = [minion_opts, {}]
+ kwargs = {"init_remotes": False}
+ if role_name == "winrepo":
+ kwargs["cache_root"] = "/tmp/winrepo-dir"
+ with patch.dict(minion_opts, {key: provider}):
+ # Try to create an instance with uppercase letters in
+ # provider name. If it fails then a
+ # FileserverConfigError will be raised, so no assert is
+ # necessary.
+ role_class(*args, **kwargs)
+ # Now try to instantiate an instance with all lowercase
+ # letters. Again, no need for an assert here.
+ role_class(*args, **kwargs)
+
+
+@pytest.mark.parametrize(
+ "role_name,role_class",
+ (
+ ("gitfs", salt.utils.gitfs.GitFS),
+ ("git_pillar", salt.utils.gitfs.GitPillar),
+ ("winrepo", salt.utils.gitfs.WinRepo),
+ ),
+)
+def test_valid_provider_gitfs_provider(minion_opts, role_name, role_class):
+ """
+ Ensure that an invalid provider is not accepted, raising a
+ FileserverConfigError.
+ """
+
+ def _get_mock(verify, provider):
+ """
+ Return a MagicMock with the desired return value
+ """
+ return MagicMock(return_value=verify.endswith(provider))
+
+ key = "{}_provider".format(role_name)
+ for provider in salt.utils.gitfs.GIT_PROVIDERS:
+ verify = "verify_gitpython"
+ mock1 = _get_mock(verify, provider)
+ with patch.object(role_class, verify, mock1):
+ verify = "verify_pygit2"
+ mock2 = _get_mock(verify, provider)
+ with patch.object(role_class, verify, mock2):
+ args = [minion_opts, {}]
+ kwargs = {"init_remotes": False}
+ if role_name == "winrepo":
+ kwargs["cache_root"] = "/tmp/winrepo-dir"
+ with patch.dict(minion_opts, {key: provider}):
+ role_class(*args, **kwargs)
+ with patch.dict(minion_opts, {key: "foo"}):
+ # Set the provider name to a known invalid provider
+ # and make sure it raises an exception.
+ with pytest.raises(FileserverConfigError):
+ role_class(*args, **kwargs)
+
+
+@pytest.fixture
+def _prepare_remote_repository_pygit2(tmp_path):
+ remote = os.path.join(tmp_path, "pygit2-repo")
+ filecontent = "This is an empty README file"
+ filename = "README"
+ signature = pygit2.Signature(
+ "Dummy Commiter", "dummy@dummy.com", int(time.time()), 0
+ )
+ repository = pygit2.init_repository(remote, False)
+ builder = repository.TreeBuilder()
+ tree = builder.write()
+ commit = repository.create_commit(
+ "HEAD", signature, signature, "Create master branch", tree, []
+ )
+ repository.create_reference("refs/tags/simple_tag", commit)
+ with salt.utils.files.fopen(
+ os.path.join(repository.workdir, filename), "w"
+ ) as file:
+ file.write(filecontent)
+ blob = repository.create_blob_fromworkdir(filename)
+ builder = repository.TreeBuilder()
+ builder.insert(filename, blob, pygit2.GIT_FILEMODE_BLOB)
+ tree = builder.write()
+ repository.index.read()
+ repository.index.add(filename)
+ repository.index.write()
+ commit = repository.create_commit(
+ "HEAD",
+ signature,
+ signature,
+ "Added a README",
+ tree,
+ [repository.head.target],
+ )
+ repository.create_tag(
+ "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message"
+ )
+ return remote
+
+
+@pytest.fixture
+def _prepare_provider(tmp_path, minion_opts, _prepare_remote_repository_pygit2):
+ cache = tmp_path / "pygit2-repo-cache"
+ minion_opts.update(
+ {
+ "cachedir": str(cache),
+ "gitfs_disable_saltenv_mapping": False,
+ "gitfs_base": "master",
+ "gitfs_insecure_auth": False,
+ "gitfs_mountpoint": "",
+ "gitfs_passphrase": "",
+ "gitfs_password": "",
+ "gitfs_privkey": "",
+ "gitfs_provider": "pygit2",
+ "gitfs_pubkey": "",
+ "gitfs_ref_types": ["branch", "tag", "sha"],
+ "gitfs_refspecs": [
+ "+refs/heads/*:refs/remotes/origin/*",
+ "+refs/tags/*:refs/tags/*",
+ ],
+ "gitfs_root": "",
+ "gitfs_saltenv_blacklist": [],
+ "gitfs_saltenv_whitelist": [],
+ "gitfs_ssl_verify": True,
+ "gitfs_update_interval": 3,
+ "gitfs_user": "",
+ "verified_gitfs_provider": "pygit2",
+ }
+ )
+ per_remote_defaults = {
+ "base": "master",
+ "disable_saltenv_mapping": False,
+ "insecure_auth": False,
+ "ref_types": ["branch", "tag", "sha"],
+ "passphrase": "",
+ "mountpoint": "",
+ "password": "",
+ "privkey": "",
+ "pubkey": "",
+ "refspecs": [
+ "+refs/heads/*:refs/remotes/origin/*",
+ "+refs/tags/*:refs/tags/*",
+ ],
+ "root": "",
+ "saltenv_blacklist": [],
+ "saltenv_whitelist": [],
+ "ssl_verify": True,
+ "update_interval": 60,
+ "user": "",
+ }
+ per_remote_only = ("all_saltenvs", "name", "saltenv")
+ override_params = tuple(per_remote_defaults)
+ cache_root = cache / "gitfs"
+ role = "gitfs"
+ provider = salt.utils.gitfs.Pygit2(
+ minion_opts,
+ _prepare_remote_repository_pygit2,
+ per_remote_defaults,
+ per_remote_only,
+ override_params,
+ str(cache_root),
+ role,
+ )
+ return provider
+
+
+@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support")
+@pytest.mark.skip_on_windows(
+ reason="Skip Pygit2 on windows, due to pygit2 access error on windows"
+)
+def test_checkout_pygit2(_prepare_provider):
+ provider = _prepare_provider
+ provider.remotecallbacks = None
+ provider.credentials = None
+ provider.init_remote()
+ provider.fetch()
+ provider.branch = "master"
+ assert provider.cachedir in provider.checkout()
+ provider.branch = "simple_tag"
+ assert provider.cachedir in provider.checkout()
+ provider.branch = "annotated_tag"
+ assert provider.cachedir in provider.checkout()
+ provider.branch = "does_not_exist"
+ assert provider.checkout() is None
+
+
+@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support")
+@pytest.mark.skip_on_windows(
+ reason="Skip Pygit2 on windows, due to pygit2 access error on windows"
+)
+def test_checkout_pygit2_with_home_env_unset(_prepare_provider):
+ provider = _prepare_provider
+ provider.remotecallbacks = None
+ provider.credentials = None
+ with patched_environ(__cleanup__=["HOME"]):
+ assert "HOME" not in os.environ
+ provider.init_remote()
+ provider.fetch()
+ assert "HOME" in os.environ
+
+
+def test_full_id_pygit2(_prepare_provider):
+ assert _prepare_provider.full_id().startswith("-")
+ assert _prepare_provider.full_id().endswith("/pygit2-repo---gitfs-master--")
+
+
+@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support")
+@pytest.mark.skip_on_windows(
+ reason="Skip Pygit2 on windows, due to pygit2 access error on windows"
+)
+def test_get_cachedir_basename_pygit2(_prepare_provider):
+ basename = _prepare_provider.get_cachedir_basename()
+ assert len(basename) == 45
+ assert basename[0] == "-"
+ # check that a valid base64 is given '/' -> '_'
+ assert all(c in string.ascii_letters + string.digits + "+_=" for c in basename[1:])
diff --git a/tests/unit/utils/test_gitfs.py b/tests/unit/utils/test_gitfs.py
index 7c400b69af..6d8e97a239 100644
--- a/tests/unit/utils/test_gitfs.py
+++ b/tests/unit/utils/test_gitfs.py
@@ -2,37 +2,20 @@
These only test the provider selection and verification logic, they do not init
any remotes.
"""
-import os
-import shutil
-from time import time
+
+import tempfile
import pytest
+import salt.ext.tornado.ioloop
import salt.fileserver.gitfs
import salt.utils.files
import salt.utils.gitfs
+import salt.utils.path
import salt.utils.platform
-import tests.support.paths
-from salt.exceptions import FileserverConfigError
-from tests.support.helpers import patched_environ
from tests.support.mixins import AdaptedConfigurationTestCaseMixin
-from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase
-try:
- HAS_PYGIT2 = (
- salt.utils.gitfs.PYGIT2_VERSION
- and salt.utils.gitfs.PYGIT2_VERSION >= salt.utils.gitfs.PYGIT2_MINVER
- and salt.utils.gitfs.LIBGIT2_VERSION
- and salt.utils.gitfs.LIBGIT2_VERSION >= salt.utils.gitfs.LIBGIT2_MINVER
- )
-except AttributeError:
- HAS_PYGIT2 = False
-
-
-if HAS_PYGIT2:
- import pygit2
-
def _clear_instance_map():
try:
@@ -45,6 +28,9 @@ def _clear_instance_map():
class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin):
def setUp(self):
+ self._tmp_dir = tempfile.TemporaryDirectory()
+ tmp_name = self._tmp_dir.name
+
class MockedProvider(
salt.utils.gitfs.GitProvider
): # pylint: disable=abstract-method
@@ -71,6 +57,7 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin):
)
def init_remote(self):
+ self.gitdir = salt.utils.path.join(tmp_name, ".git")
self.repo = True
new = False
return new
@@ -107,6 +94,7 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin):
for remote in self.main_class.remotes:
remote.fetched = False
del self.main_class
+ self._tmp_dir.cleanup()
def test_update_all(self):
self.main_class.update()
@@ -126,226 +114,73 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin):
self.assertTrue(self.main_class.remotes[0].fetched)
self.assertFalse(self.main_class.remotes[1].fetched)
-
-class TestGitFSProvider(TestCase):
- def setUp(self):
- self.opts = {"cachedir": "/tmp/gitfs-test-cache"}
-
- def tearDown(self):
- self.opts = None
-
- def test_provider_case_insensitive(self):
- """
- Ensure that both lowercase and non-lowercase values are supported
- """
- provider = "GitPython"
- for role_name, role_class in (
- ("gitfs", salt.utils.gitfs.GitFS),
- ("git_pillar", salt.utils.gitfs.GitPillar),
- ("winrepo", salt.utils.gitfs.WinRepo),
- ):
-
- key = "{}_provider".format(role_name)
- with patch.object(
- role_class, "verify_gitpython", MagicMock(return_value=True)
- ):
- with patch.object(
- role_class, "verify_pygit2", MagicMock(return_value=False)
- ):
- args = [self.opts, {}]
- kwargs = {"init_remotes": False}
- if role_name == "winrepo":
- kwargs["cache_root"] = "/tmp/winrepo-dir"
- with patch.dict(self.opts, {key: provider}):
- # Try to create an instance with uppercase letters in
- # provider name. If it fails then a
- # FileserverConfigError will be raised, so no assert is
- # necessary.
- role_class(*args, **kwargs)
- # Now try to instantiate an instance with all lowercase
- # letters. Again, no need for an assert here.
- role_class(*args, **kwargs)
-
- def test_valid_provider(self):
- """
- Ensure that an invalid provider is not accepted, raising a
- FileserverConfigError.
- """
-
- def _get_mock(verify, provider):
- """
- Return a MagicMock with the desired return value
- """
- return MagicMock(return_value=verify.endswith(provider))
-
- for role_name, role_class in (
- ("gitfs", salt.utils.gitfs.GitFS),
- ("git_pillar", salt.utils.gitfs.GitPillar),
- ("winrepo", salt.utils.gitfs.WinRepo),
- ):
- key = "{}_provider".format(role_name)
- for provider in salt.utils.gitfs.GIT_PROVIDERS:
- verify = "verify_gitpython"
- mock1 = _get_mock(verify, provider)
- with patch.object(role_class, verify, mock1):
- verify = "verify_pygit2"
- mock2 = _get_mock(verify, provider)
- with patch.object(role_class, verify, mock2):
- args = [self.opts, {}]
- kwargs = {"init_remotes": False}
- if role_name == "winrepo":
- kwargs["cache_root"] = "/tmp/winrepo-dir"
-
- with patch.dict(self.opts, {key: provider}):
- role_class(*args, **kwargs)
-
- with patch.dict(self.opts, {key: "foo"}):
- # Set the provider name to a known invalid provider
- # and make sure it raises an exception.
- self.assertRaises(
- FileserverConfigError, role_class, *args, **kwargs
- )
-
-
-@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support")
-@pytest.mark.skip_on_windows(
- reason="Skip Pygit2 on windows, due to pygit2 access error on windows"
-)
-class TestPygit2(TestCase):
- def _prepare_remote_repository(self, path):
- shutil.rmtree(path, ignore_errors=True)
-
- filecontent = "This is an empty README file"
- filename = "README"
-
- signature = pygit2.Signature(
- "Dummy Commiter", "dummy@dummy.com", int(time()), 0
+ def test_full_id(self):
+ self.assertEqual(
+ self.main_class.remotes[0].full_id(), "-file://repo1.git---gitfs-master--"
)
- repository = pygit2.init_repository(path, False)
- builder = repository.TreeBuilder()
- tree = builder.write()
- commit = repository.create_commit(
- "HEAD", signature, signature, "Create master branch", tree, []
+ def test_full_id_with_name(self):
+ self.assertEqual(
+ self.main_class.remotes[1].full_id(),
+ "repo2-file://repo2.git---gitfs-master--",
)
- repository.create_reference("refs/tags/simple_tag", commit)
- with salt.utils.files.fopen(
- os.path.join(repository.workdir, filename), "w"
- ) as file:
- file.write(filecontent)
-
- blob = repository.create_blob_fromworkdir(filename)
- builder = repository.TreeBuilder()
- builder.insert(filename, blob, pygit2.GIT_FILEMODE_BLOB)
- tree = builder.write()
-
- repository.index.read()
- repository.index.add(filename)
- repository.index.write()
-
- commit = repository.create_commit(
- "HEAD",
- signature,
- signature,
- "Added a README",
- tree,
- [repository.head.target],
- )
- repository.create_tag(
- "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message"
+ def test_get_cachedir_basename(self):
+ self.assertEqual(
+ self.main_class.remotes[0].get_cachedir_basename(),
+ "-jXhnbGDemchtZwTwaD2s6VOaVvs98a7w+AtiYlmOVb0=",
)
- def _prepare_cache_repository(self, remote, cache):
- opts = {
- "cachedir": cache,
- "__role": "minion",
- "gitfs_disable_saltenv_mapping": False,
- "gitfs_base": "master",
- "gitfs_insecure_auth": False,
- "gitfs_mountpoint": "",
- "gitfs_passphrase": "",
- "gitfs_password": "",
- "gitfs_privkey": "",
- "gitfs_provider": "pygit2",
- "gitfs_pubkey": "",
- "gitfs_ref_types": ["branch", "tag", "sha"],
- "gitfs_refspecs": [
- "+refs/heads/*:refs/remotes/origin/*",
- "+refs/tags/*:refs/tags/*",
- ],
- "gitfs_root": "",
- "gitfs_saltenv_blacklist": [],
- "gitfs_saltenv_whitelist": [],
- "gitfs_ssl_verify": True,
- "gitfs_update_interval": 3,
- "gitfs_user": "",
- "verified_gitfs_provider": "pygit2",
- }
- per_remote_defaults = {
- "base": "master",
- "disable_saltenv_mapping": False,
- "insecure_auth": False,
- "ref_types": ["branch", "tag", "sha"],
- "passphrase": "",
- "mountpoint": "",
- "password": "",
- "privkey": "",
- "pubkey": "",
- "refspecs": [
- "+refs/heads/*:refs/remotes/origin/*",
- "+refs/tags/*:refs/tags/*",
- ],
- "root": "",
- "saltenv_blacklist": [],
- "saltenv_whitelist": [],
- "ssl_verify": True,
- "update_interval": 60,
- "user": "",
- }
- per_remote_only = ("all_saltenvs", "name", "saltenv")
- override_params = tuple(per_remote_defaults.keys())
- cache_root = os.path.join(cache, "gitfs")
- role = "gitfs"
- shutil.rmtree(cache_root, ignore_errors=True)
- provider = salt.utils.gitfs.Pygit2(
- opts,
- remote,
- per_remote_defaults,
- per_remote_only,
- override_params,
- cache_root,
- role,
+ def test_get_cachedir_base_with_name(self):
+ self.assertEqual(
+ self.main_class.remotes[1].get_cachedir_basename(),
+ "repo2-nuezpiDtjQRFC0ZJDByvi+F6Vb8ZhfoH41n_KFxTGsU=",
)
- return provider
- def test_checkout(self):
- remote = os.path.join(tests.support.paths.TMP, "pygit2-repo")
- cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache")
- self._prepare_remote_repository(remote)
- provider = self._prepare_cache_repository(remote, cache)
- provider.remotecallbacks = None
- provider.credentials = None
- provider.init_remote()
- provider.fetch()
- provider.branch = "master"
- self.assertIn(provider.cachedir, provider.checkout())
- provider.branch = "simple_tag"
- self.assertIn(provider.cachedir, provider.checkout())
- provider.branch = "annotated_tag"
- self.assertIn(provider.cachedir, provider.checkout())
- provider.branch = "does_not_exist"
- self.assertIsNone(provider.checkout())
+ def test_git_provider_mp_lock(self):
+ """
+ Check that lock is released after provider.lock()
+ """
+ provider = self.main_class.remotes[0]
+ provider.lock()
+ # check that lock has been released
+ self.assertTrue(provider._master_lock.acquire(timeout=5))
+ provider._master_lock.release()
- def test_checkout_with_home_env_unset(self):
- remote = os.path.join(tests.support.paths.TMP, "pygit2-repo")
- cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache")
- self._prepare_remote_repository(remote)
- provider = self._prepare_cache_repository(remote, cache)
- provider.remotecallbacks = None
- provider.credentials = None
- with patched_environ(__cleanup__=["HOME"]):
- self.assertTrue("HOME" not in os.environ)
- provider.init_remote()
- provider.fetch()
- self.assertTrue("HOME" in os.environ)
+ def test_git_provider_mp_clear_lock(self):
+ """
+ Check that lock is released after provider.clear_lock()
+ """
+ provider = self.main_class.remotes[0]
+ provider.clear_lock()
+ # check that lock has been released
+ self.assertTrue(provider._master_lock.acquire(timeout=5))
+ provider._master_lock.release()
+
+ @pytest.mark.slow_test
+ def test_git_provider_mp_lock_timeout(self):
+ """
+ Check that lock will time out if master lock is locked.
+ """
+ provider = self.main_class.remotes[0]
+ # Hijack the lock so git provider is fooled into thinking another instance is doing somthing.
+ self.assertTrue(provider._master_lock.acquire(timeout=5))
+ try:
+ # git provider should raise timeout error to avoid lock race conditions
+ self.assertRaises(TimeoutError, provider.lock)
+ finally:
+ provider._master_lock.release()
+
+ @pytest.mark.slow_test
+ def test_git_provider_mp_clear_lock_timeout(self):
+ """
+ Check that clear lock will time out if master lock is locked.
+ """
+ provider = self.main_class.remotes[0]
+ # Hijack the lock so git provider is fooled into thinking another instance is doing somthing.
+ self.assertTrue(provider._master_lock.acquire(timeout=5))
+ try:
+ # git provider should raise timeout error to avoid lock race conditions
+ self.assertRaises(TimeoutError, provider.clear_lock)
+ finally:
+ provider._master_lock.release()
--
2.41.0

View File

@ -0,0 +1,121 @@
From f41a8e2a142a8487e13af481990928e0afb5f15e Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Thu, 18 Jan 2024 17:02:03 +0100
Subject: [PATCH] Fixed KeyError in logs when running a state that
fails. (#615)
Co-authored-by: Megan Wilhite <mwilhite@vmware.com>
---
changelog/64231.fixed.md | 1 +
salt/master.py | 2 +-
salt/minion.py | 4 ++
salt/utils/event.py | 3 +-
.../integration/states/test_state_test.py | 38 +++++++++++++++++++
5 files changed, 46 insertions(+), 2 deletions(-)
create mode 100644 changelog/64231.fixed.md
create mode 100644 tests/pytests/integration/states/test_state_test.py
diff --git a/changelog/64231.fixed.md b/changelog/64231.fixed.md
new file mode 100644
index 0000000000..0991c5a8b9
--- /dev/null
+++ b/changelog/64231.fixed.md
@@ -0,0 +1 @@
+Fixed KeyError in logs when running a state that fails.
diff --git a/salt/master.py b/salt/master.py
index fc243ef674..3d2ba1e29d 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -1790,7 +1790,7 @@ class AESFuncs(TransportMethods):
def pub_ret(self, load):
"""
Request the return data from a specific jid, only allowed
- if the requesting minion also initialted the execution.
+ if the requesting minion also initiated the execution.
:param dict load: The minion payload
diff --git a/salt/minion.py b/salt/minion.py
index 4db0d31bd4..2ccd0cd5a9 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -2022,6 +2022,8 @@ class Minion(MinionBase):
ret["jid"] = data["jid"]
ret["fun"] = data["fun"]
ret["fun_args"] = data["arg"]
+ if "user" in data:
+ ret["user"] = data["user"]
if "master_id" in data:
ret["master_id"] = data["master_id"]
if "metadata" in data:
@@ -2141,6 +2143,8 @@ class Minion(MinionBase):
ret["jid"] = data["jid"]
ret["fun"] = data["fun"]
ret["fun_args"] = data["arg"]
+ if "user" in data:
+ ret["user"] = data["user"]
if "metadata" in data:
ret["metadata"] = data["metadata"]
if minion_instance.connected:
diff --git a/salt/utils/event.py b/salt/utils/event.py
index 869e12a140..e6d7b00520 100644
--- a/salt/utils/event.py
+++ b/salt/utils/event.py
@@ -902,7 +902,8 @@ class SaltEvent:
data["success"] = False
data["return"] = "Error: {}.{}".format(tags[0], tags[-1])
data["fun"] = fun
- data["user"] = load["user"]
+ if "user" in load:
+ data["user"] = load["user"]
self.fire_event(
data,
tagify([load["jid"], "sub", load["id"], "error", fun], "job"),
diff --git a/tests/pytests/integration/states/test_state_test.py b/tests/pytests/integration/states/test_state_test.py
new file mode 100644
index 0000000000..b2328a4c2b
--- /dev/null
+++ b/tests/pytests/integration/states/test_state_test.py
@@ -0,0 +1,38 @@
+def test_failing_sls(salt_master, salt_minion, salt_cli, caplog):
+ """
+ Test when running state.sls and the state fails.
+ When the master stores the job and attempts to send
+ an event a KeyError was previously being logged.
+ This test ensures we do not log an error when
+ attempting to send an event about a failing state.
+ """
+ statesls = """
+ test_state:
+ test.fail_without_changes:
+ - name: "bla"
+ """
+ with salt_master.state_tree.base.temp_file("test_failure.sls", statesls):
+ ret = salt_cli.run("state.sls", "test_failure", minion_tgt=salt_minion.id)
+ for message in caplog.messages:
+ assert "Event iteration failed with" not in message
+
+
+def test_failing_sls_compound(salt_master, salt_minion, salt_cli, caplog):
+ """
+ Test when running state.sls in a compound command and the state fails.
+ When the master stores the job and attempts to send
+ an event a KeyError was previously being logged.
+ This test ensures we do not log an error when
+ attempting to send an event about a failing state.
+ """
+ statesls = """
+ test_state:
+ test.fail_without_changes:
+ - name: "bla"
+ """
+ with salt_master.state_tree.base.temp_file("test_failure.sls", statesls):
+ ret = salt_cli.run(
+ "state.sls,cmd.run", "test_failure,ls", minion_tgt=salt_minion.id
+ )
+ for message in caplog.messages:
+ assert "Event iteration failed with" not in message
--
2.43.0

View File

@ -0,0 +1,44 @@
From 4996f423f14369fad14a9e6d2d3b8bd750c77fc7 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Tue, 5 Apr 2022 12:04:46 +0300
Subject: [PATCH] Fixes for Python 3.10 (#502)
* Use collections.abc.Mapping instead collections.Mapping in state
---
salt/state.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/salt/state.py b/salt/state.py
index ab84cb8b4d..489424a083 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -12,7 +12,6 @@ The data sent to the state calls is as follows:
"""
-import collections
import copy
import datetime
import fnmatch
@@ -27,6 +26,8 @@ import sys
import time
import traceback
+from collections.abc import Mapping
+
import salt.channel.client
import salt.fileclient
import salt.loader
@@ -3513,7 +3514,7 @@ class State:
"""
for chunk in high:
state = high[chunk]
- if not isinstance(state, collections.Mapping):
+ if not isinstance(state, Mapping):
continue
for state_ref in state:
needs_default = True
--
2.39.2

BIN
html.tar.bz2 (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -0,0 +1,145 @@
From 7ab208fd2d23eaa582cdbba912d4538d8c87e5f4 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Mon, 2 Oct 2023 13:24:15 +0200
Subject: [PATCH] Implement the calling for batch async from the salt
CLI
* Implement calling batch async with salt CLI
* Add the test for calling batch async with salt CLI
---
salt/cli/salt.py | 53 ++++++++++++++++++++++++++++-
tests/pytests/unit/cli/test_salt.py | 50 +++++++++++++++++++++++++++
2 files changed, 102 insertions(+), 1 deletion(-)
create mode 100644 tests/pytests/unit/cli/test_salt.py
diff --git a/salt/cli/salt.py b/salt/cli/salt.py
index f90057f668..e19cfa5ce6 100644
--- a/salt/cli/salt.py
+++ b/salt/cli/salt.py
@@ -47,7 +47,12 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
self.exit(2, "{}\n".format(exc))
return
- if self.options.batch or self.options.static:
+ if self.options.batch and self.config["async"]:
+ # _run_batch_async() will just return the jid and exit
+ # Execution will not continue past this point
+ # in batch async mode. Batch async is handled by the master.
+ self._run_batch_async()
+ elif self.options.batch or self.options.static:
# _run_batch() will handle all output and
# exit with the appropriate error condition
# Execution will not continue past this point
@@ -296,6 +301,52 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
retcode = job_retcode
sys.exit(retcode)
+ def _run_batch_async(self):
+ kwargs = {
+ "tgt": self.config["tgt"],
+ "fun": self.config["fun"],
+ "arg": self.config["arg"],
+ "timeout": self.options.timeout,
+ "show_timeout": self.options.show_timeout,
+ "show_jid": self.options.show_jid,
+ "batch": self.config["batch"],
+ }
+ tgt = kwargs.pop("tgt", "")
+ fun = kwargs.pop("fun", "")
+
+ if self.config.get("eauth", ""):
+ kwargs.update(
+ {
+ "eauth": self.config["eauth"],
+ }
+ )
+ for opt in ("username", "password"):
+ if opt in self.config:
+ kwargs[opt] = self.config[opt]
+
+ try:
+ ret = self.local_client.run_job(tgt, fun, **kwargs)
+ except (
+ AuthenticationError,
+ AuthorizationError,
+ SaltInvocationError,
+ EauthAuthenticationError,
+ SaltClientError,
+ ) as exc:
+ ret = str(exc)
+ self.exit(2, "ERROR: {}\n".format(exc))
+ if "jid" in ret and "error" not in ret:
+ salt.utils.stringutils.print_cli(
+ "Executed command with job ID: {}".format(ret["jid"])
+ )
+ else:
+ self._output_ret(ret, self.config.get("output", "nested"))
+
+ if "error" in ret:
+ sys.exit(1)
+
+ sys.exit(0)
+
def _print_errors_summary(self, errors):
if errors:
salt.utils.stringutils.print_cli("\n")
diff --git a/tests/pytests/unit/cli/test_salt.py b/tests/pytests/unit/cli/test_salt.py
new file mode 100644
index 0000000000..d9f4b5b097
--- /dev/null
+++ b/tests/pytests/unit/cli/test_salt.py
@@ -0,0 +1,50 @@
+import pytest
+
+from tests.support.mock import MagicMock, patch
+
+
+def test_saltcmd_batch_async_call():
+ """
+ Test calling batch async with salt CLI
+ """
+ import salt.cli.salt
+
+ local_client = MagicMock()
+ local_client.run_job = MagicMock(return_value={"jid": 123456})
+ with pytest.raises(SystemExit) as exit_info, patch(
+ "sys.argv",
+ [
+ "salt",
+ "--batch=10",
+ "--async",
+ "*",
+ "test.arg",
+ "arg1",
+ "arg2",
+ "kwarg1=val1",
+ ],
+ ), patch("salt.cli.salt.SaltCMD.process_config_dir", MagicMock), patch(
+ "salt.output.display_output", MagicMock()
+ ), patch(
+ "salt.client.get_local_client", return_value=local_client
+ ), patch(
+ "salt.utils.stringutils.print_cli", MagicMock()
+ ) as print_cli:
+ salt_cmd = salt.cli.salt.SaltCMD()
+ salt_cmd.config = {
+ "async": True,
+ "batch": 10,
+ "tgt": "*",
+ "fun": "test.arg",
+ "arg": ["arg1", "arg2", {"__kwarg__": True, "kwarg1": "val1"}],
+ }
+ salt_cmd._mixin_after_parsed_funcs = []
+ salt_cmd.run()
+
+ local_client.run_job.assert_called_once()
+ assert local_client.run_job.mock_calls[0].args[0] == "*"
+ assert local_client.run_job.mock_calls[0].args[1] == "test.arg"
+ assert local_client.run_job.mock_calls[0].kwargs["arg"] == ["arg1", "arg2", {"__kwarg__": True, "kwarg1": "val1"}]
+ assert local_client.run_job.mock_calls[0].kwargs["batch"] == 10
+ print_cli.assert_called_once_with("Executed command with job ID: 123456")
+ assert exit_info.value.code == 0
--
2.42.0

View File

@ -0,0 +1,202 @@
From 88bd54971d39b34d9728f3fe5fcb493cec3ff2fd Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 15 May 2024 09:22:11 +0200
Subject: [PATCH] Improve broken events catching and reporting
* Improve broken events catching and reporting
* Add test of catching SaltDeserializationError on reading event
* Add test for fire_ret_load
---
salt/utils/event.py | 23 +++-
tests/pytests/unit/utils/event/test_event.py | 107 +++++++++++++++++++
2 files changed, 128 insertions(+), 2 deletions(-)
diff --git a/salt/utils/event.py b/salt/utils/event.py
index e6d7b00520..ef048335ae 100644
--- a/salt/utils/event.py
+++ b/salt/utils/event.py
@@ -75,6 +75,7 @@ import salt.utils.platform
import salt.utils.process
import salt.utils.stringutils
import salt.utils.zeromq
+from salt.exceptions import SaltDeserializationError
log = logging.getLogger(__name__)
@@ -461,7 +462,13 @@ class SaltEvent:
salt.utils.stringutils.to_bytes(TAGEND)
) # split tag from data
mtag = salt.utils.stringutils.to_str(mtag)
- data = salt.payload.loads(mdata, encoding="utf-8")
+ try:
+ data = salt.payload.loads(mdata, encoding="utf-8")
+ except SaltDeserializationError:
+ log.warning(
+ "SaltDeserializationError on unpacking data, the payload could be incomplete"
+ )
+ raise
return mtag, data
def _get_match_func(self, match_type=None):
@@ -583,6 +590,9 @@ class SaltEvent:
raise
else:
return None
+ except SaltDeserializationError:
+ log.error("Unable to deserialize received event")
+ return None
except RuntimeError:
return None
@@ -889,6 +899,14 @@ class SaltEvent:
ret = load.get("return", {})
retcode = load["retcode"]
+ if not isinstance(ret, dict):
+ log.error(
+ "Event with bad payload received from '%s': %s",
+ load.get("id", "UNKNOWN"),
+ "".join(ret) if isinstance(ret, list) else ret,
+ )
+ return
+
try:
for tag, data in ret.items():
data["retcode"] = retcode
@@ -910,7 +928,8 @@ class SaltEvent:
)
except Exception as exc: # pylint: disable=broad-except
log.error(
- "Event iteration failed with exception: %s",
+ "Event from '%s' iteration failed with exception: %s",
+ load.get("id", "UNKNOWN"),
exc,
exc_info_on_loglevel=logging.DEBUG,
)
diff --git a/tests/pytests/unit/utils/event/test_event.py b/tests/pytests/unit/utils/event/test_event.py
index f4b6c15999..3eadfaf6ba 100644
--- a/tests/pytests/unit/utils/event/test_event.py
+++ b/tests/pytests/unit/utils/event/test_event.py
@@ -12,6 +12,7 @@ import salt.ext.tornado.ioloop
import salt.ext.tornado.iostream
import salt.utils.event
import salt.utils.stringutils
+from salt.exceptions import SaltDeserializationError
from salt.utils.event import SaltEvent
from tests.support.events import eventpublisher_process, eventsender_process
from tests.support.mock import patch
@@ -340,3 +341,109 @@ def test_master_pub_permissions(sock_dir):
assert bool(os.lstat(p).st_mode & stat.S_IRUSR)
assert not bool(os.lstat(p).st_mode & stat.S_IRGRP)
assert not bool(os.lstat(p).st_mode & stat.S_IROTH)
+
+
+def test_event_unpack_with_SaltDeserializationError(sock_dir):
+ with eventpublisher_process(str(sock_dir)), salt.utils.event.MasterEvent(
+ str(sock_dir), listen=True
+ ) as me, patch.object(
+ salt.utils.event.log, "warning", autospec=True
+ ) as mock_log_warning, patch.object(
+ salt.utils.event.log, "error", autospec=True
+ ) as mock_log_error:
+ me.fire_event({"data": "foo1"}, "evt1")
+ me.fire_event({"data": "foo2"}, "evt2")
+ evt2 = me.get_event(tag="")
+ with patch("salt.payload.loads", side_effect=SaltDeserializationError):
+ evt1 = me.get_event(tag="")
+ _assert_got_event(evt2, {"data": "foo2"}, expected_failure=True)
+ assert evt1 is None
+ assert (
+ mock_log_warning.mock_calls[0].args[0]
+ == "SaltDeserializationError on unpacking data, the payload could be incomplete"
+ )
+ assert (
+ mock_log_error.mock_calls[0].args[0]
+ == "Unable to deserialize received event"
+ )
+
+
+def test_event_fire_ret_load():
+ event = SaltEvent(node=None)
+ test_load = {
+ "id": "minion_id.example.org",
+ "jid": "20240212095247760376",
+ "fun": "state.highstate",
+ "retcode": 254,
+ "return": {
+ "saltutil_|-sync_states_|-sync_states_|-sync_states": {
+ "result": True,
+ },
+ "saltutil_|-sync_modules_|-sync_modules_|-sync_modules": {
+ "result": False,
+ },
+ },
+ }
+ test_fire_event_data = {
+ "result": False,
+ "retcode": 254,
+ "jid": "20240212095247760376",
+ "id": "minion_id.example.org",
+ "success": False,
+ "return": "Error: saltutil.sync_modules",
+ "fun": "state.highstate",
+ }
+ test_unhandled_exc = "Unhandled exception running state.highstate"
+ test_traceback = [
+ "Traceback (most recent call last):\n",
+ " Just an example of possible return as a list\n",
+ ]
+ with patch.object(
+ event, "fire_event", side_effect=[None, None, Exception]
+ ) as mock_fire_event, patch.object(
+ salt.utils.event.log, "error", autospec=True
+ ) as mock_log_error:
+ event.fire_ret_load(test_load)
+ assert len(mock_fire_event.mock_calls) == 2
+ assert mock_fire_event.mock_calls[0].args[0] == test_fire_event_data
+ assert mock_fire_event.mock_calls[0].args[1] == "saltutil.sync_modules"
+ assert mock_fire_event.mock_calls[1].args[0] == test_fire_event_data
+ assert (
+ mock_fire_event.mock_calls[1].args[1]
+ == "salt/job/20240212095247760376/sub/minion_id.example.org/error/state.highstate"
+ )
+ assert not mock_log_error.mock_calls
+
+ mock_log_error.reset_mock()
+
+ event.fire_ret_load(test_load)
+ assert (
+ mock_log_error.mock_calls[0].args[0]
+ == "Event from '%s' iteration failed with exception: %s"
+ )
+ assert mock_log_error.mock_calls[0].args[1] == "minion_id.example.org"
+
+ mock_log_error.reset_mock()
+ test_load["return"] = test_unhandled_exc
+
+ event.fire_ret_load(test_load)
+ assert (
+ mock_log_error.mock_calls[0].args[0]
+ == "Event with bad payload received from '%s': %s"
+ )
+ assert mock_log_error.mock_calls[0].args[1] == "minion_id.example.org"
+ assert (
+ mock_log_error.mock_calls[0].args[2]
+ == "Unhandled exception running state.highstate"
+ )
+
+ mock_log_error.reset_mock()
+ test_load["return"] = test_traceback
+
+ event.fire_ret_load(test_load)
+ assert (
+ mock_log_error.mock_calls[0].args[0]
+ == "Event with bad payload received from '%s': %s"
+ )
+ assert mock_log_error.mock_calls[0].args[1] == "minion_id.example.org"
+ assert mock_log_error.mock_calls[0].args[2] == "".join(test_traceback)
--
2.45.0

View File

@ -0,0 +1,113 @@
From da938aa8a572138b5b9b1535c5c3d69326e5194e Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Thu, 18 Jan 2024 17:02:23 +0100
Subject: [PATCH] Improve pip target override condition with
VENV_PIP_TARGET environment variable (bsc#1216850) (#613)
* Improve pip target override condition
* Improve pip test with different condition of overriding the target
* Add changelog entry
---
changelog/65562.fixed.md | 1 +
salt/modules/pip.py | 6 ++--
tests/pytests/unit/modules/test_pip.py | 50 +++++++++++++++++---------
3 files changed, 38 insertions(+), 19 deletions(-)
create mode 100644 changelog/65562.fixed.md
diff --git a/changelog/65562.fixed.md b/changelog/65562.fixed.md
new file mode 100644
index 0000000000..ba483b4b77
--- /dev/null
+++ b/changelog/65562.fixed.md
@@ -0,0 +1 @@
+Improve the condition of overriding target for pip with VENV_PIP_TARGET environment variable.
diff --git a/salt/modules/pip.py b/salt/modules/pip.py
index a60bdca0bb..68a2a442a1 100644
--- a/salt/modules/pip.py
+++ b/salt/modules/pip.py
@@ -857,9 +857,11 @@ def install(
cmd.extend(["--build", build])
# Use VENV_PIP_TARGET environment variable value as target
- # if set and no target specified on the function call
+ # if set and no target specified on the function call.
+ # Do not set target if bin_env specified, use default
+ # for specified binary environment or expect explicit target specification.
target_env = os.environ.get("VENV_PIP_TARGET", None)
- if target is None and target_env is not None:
+ if target is None and target_env is not None and bin_env is None:
target = target_env
if target:
diff --git a/tests/pytests/unit/modules/test_pip.py b/tests/pytests/unit/modules/test_pip.py
index b7ad1ea3fd..c03e6ed292 100644
--- a/tests/pytests/unit/modules/test_pip.py
+++ b/tests/pytests/unit/modules/test_pip.py
@@ -1738,28 +1738,44 @@ def test_when_version_is_called_with_a_user_it_should_be_passed_to_undelying_run
)
-def test_install_target_from_VENV_PIP_TARGET_in_resulting_command(python_binary):
+@pytest.mark.parametrize(
+ "bin_env,target,target_env,expected_target",
+ [
+ (None, None, None, None),
+ (None, "/tmp/foo", None, "/tmp/foo"),
+ (None, None, "/tmp/bar", "/tmp/bar"),
+ (None, "/tmp/foo", "/tmp/bar", "/tmp/foo"),
+ ("/tmp/venv", "/tmp/foo", None, "/tmp/foo"),
+ ("/tmp/venv", None, "/tmp/bar", None),
+ ("/tmp/venv", "/tmp/foo", "/tmp/bar", "/tmp/foo"),
+ ],
+)
+def test_install_target_from_VENV_PIP_TARGET_in_resulting_command(
+ python_binary, bin_env, target, target_env, expected_target
+):
pkg = "pep8"
- target = "/tmp/foo"
- target_env = "/tmp/bar"
mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
environment = os.environ.copy()
- environment["VENV_PIP_TARGET"] = target_env
+ real_get_pip_bin = pip._get_pip_bin
+
+ def mock_get_pip_bin(bin_env):
+ if not bin_env:
+ return real_get_pip_bin(bin_env)
+ return [f"{bin_env}/bin/pip"]
+
+ if target_env is not None:
+ environment["VENV_PIP_TARGET"] = target_env
with patch.dict(pip.__salt__, {"cmd.run_all": mock}), patch.object(
os, "environ", environment
- ):
- pip.install(pkg)
- expected = [*python_binary, "install", "--target", target_env, pkg]
- mock.assert_called_with(
- expected,
- saltenv="base",
- runas=None,
- use_vt=False,
- python_shell=False,
- )
- mock.reset_mock()
- pip.install(pkg, target=target)
- expected = [*python_binary, "install", "--target", target, pkg]
+ ), patch.object(pip, "_get_pip_bin", mock_get_pip_bin):
+ pip.install(pkg, bin_env=bin_env, target=target)
+ expected_binary = python_binary
+ if bin_env is not None:
+ expected_binary = [f"{bin_env}/bin/pip"]
+ if expected_target is not None:
+ expected = [*expected_binary, "install", "--target", expected_target, pkg]
+ else:
+ expected = [*expected_binary, "install", pkg]
mock.assert_called_with(
expected,
saltenv="base",
--
2.43.0

View File

@ -0,0 +1,204 @@
From 4e6b445f2dbe8a79d220c697abff946e00b2e57b Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Mon, 2 Oct 2023 13:26:20 +0200
Subject: [PATCH] Improve salt.utils.json.find_json (bsc#1213293)
* Improve salt.utils.json.find_json
* Move tests of find_json to pytest
---
salt/utils/json.py | 39 +++++++-
tests/pytests/unit/utils/test_json.py | 122 ++++++++++++++++++++++++++
2 files changed, 158 insertions(+), 3 deletions(-)
create mode 100644 tests/pytests/unit/utils/test_json.py
diff --git a/salt/utils/json.py b/salt/utils/json.py
index 33cdbf401d..0845b64694 100644
--- a/salt/utils/json.py
+++ b/salt/utils/json.py
@@ -32,18 +32,51 @@ def find_json(raw):
"""
ret = {}
lines = __split(raw)
+ lengths = list(map(len, lines))
+ starts = []
+ ends = []
+
+ # Search for possible starts end ends of the json fragments
for ind, _ in enumerate(lines):
+ line = lines[ind].lstrip()
+ if line == "{" or line == "[":
+ starts.append((ind, line))
+ if line == "}" or line == "]":
+ ends.append((ind, line))
+
+ # List all the possible pairs of starts and ends,
+ # and fill the length of each block to sort by size after
+ starts_ends = []
+ for start, start_br in starts:
+ for end, end_br in reversed(ends):
+ if end > start and (
+ (start_br == "{" and end_br == "}")
+ or (start_br == "[" and end_br == "]")
+ ):
+ starts_ends.append((start, end, sum(lengths[start : end + 1])))
+
+ # Iterate through all the possible pairs starting from the largest
+ starts_ends.sort(key=lambda x: (x[2], x[1] - x[0], x[0]), reverse=True)
+ for start, end, _ in starts_ends:
+ working = "\n".join(lines[start : end + 1])
try:
- working = "\n".join(lines[ind:])
- except UnicodeDecodeError:
- working = "\n".join(salt.utils.data.decode(lines[ind:]))
+ ret = json.loads(working)
+ except ValueError:
+ continue
+ if ret:
+ return ret
+ # Fall back to old implementation for backward compatibility
+ # excpecting json after the text
+ for ind, _ in enumerate(lines):
+ working = "\n".join(lines[ind:])
try:
ret = json.loads(working)
except ValueError:
continue
if ret:
return ret
+
if not ret:
# Not json, raise an error
raise ValueError
diff --git a/tests/pytests/unit/utils/test_json.py b/tests/pytests/unit/utils/test_json.py
new file mode 100644
index 0000000000..72b1023003
--- /dev/null
+++ b/tests/pytests/unit/utils/test_json.py
@@ -0,0 +1,122 @@
+"""
+Tests for salt.utils.json
+"""
+
+import textwrap
+
+import pytest
+
+import salt.utils.json
+
+
+def test_find_json():
+ some_junk_text = textwrap.dedent(
+ """
+ Just some junk text
+ with multiline
+ """
+ )
+ some_warning_message = textwrap.dedent(
+ """
+ [WARNING] Test warning message
+ """
+ )
+ test_small_json = textwrap.dedent(
+ """
+ {
+ "local": true
+ }
+ """
+ )
+ test_sample_json = """
+ {
+ "glossary": {
+ "title": "example glossary",
+ "GlossDiv": {
+ "title": "S",
+ "GlossList": {
+ "GlossEntry": {
+ "ID": "SGML",
+ "SortAs": "SGML",
+ "GlossTerm": "Standard Generalized Markup Language",
+ "Acronym": "SGML",
+ "Abbrev": "ISO 8879:1986",
+ "GlossDef": {
+ "para": "A meta-markup language, used to create markup languages such as DocBook.",
+ "GlossSeeAlso": ["GML", "XML"]
+ },
+ "GlossSee": "markup"
+ }
+ }
+ }
+ }
+ }
+ """
+ expected_ret = {
+ "glossary": {
+ "GlossDiv": {
+ "GlossList": {
+ "GlossEntry": {
+ "GlossDef": {
+ "GlossSeeAlso": ["GML", "XML"],
+ "para": (
+ "A meta-markup language, used to create markup"
+ " languages such as DocBook."
+ ),
+ },
+ "GlossSee": "markup",
+ "Acronym": "SGML",
+ "GlossTerm": "Standard Generalized Markup Language",
+ "SortAs": "SGML",
+ "Abbrev": "ISO 8879:1986",
+ "ID": "SGML",
+ }
+ },
+ "title": "S",
+ },
+ "title": "example glossary",
+ }
+ }
+
+ # First test the valid JSON
+ ret = salt.utils.json.find_json(test_sample_json)
+ assert ret == expected_ret
+
+ # Now pre-pend some garbage and re-test
+ garbage_prepend_json = f"{some_junk_text}{test_sample_json}"
+ ret = salt.utils.json.find_json(garbage_prepend_json)
+ assert ret == expected_ret
+
+ # Now post-pend some garbage and re-test
+ garbage_postpend_json = f"{test_sample_json}{some_junk_text}"
+ ret = salt.utils.json.find_json(garbage_postpend_json)
+ assert ret == expected_ret
+
+ # Now pre-pend some warning and re-test
+ warning_prepend_json = f"{some_warning_message}{test_sample_json}"
+ ret = salt.utils.json.find_json(warning_prepend_json)
+ assert ret == expected_ret
+
+ # Now post-pend some warning and re-test
+ warning_postpend_json = f"{test_sample_json}{some_warning_message}"
+ ret = salt.utils.json.find_json(warning_postpend_json)
+ assert ret == expected_ret
+
+ # Now put around some garbage and re-test
+ garbage_around_json = f"{some_junk_text}{test_sample_json}{some_junk_text}"
+ ret = salt.utils.json.find_json(garbage_around_json)
+ assert ret == expected_ret
+
+ # Now pre-pend small json and re-test
+ small_json_pre_json = f"{test_small_json}{test_sample_json}"
+ ret = salt.utils.json.find_json(small_json_pre_json)
+ assert ret == expected_ret
+
+ # Now post-pend small json and re-test
+ small_json_post_json = f"{test_sample_json}{test_small_json}"
+ ret = salt.utils.json.find_json(small_json_post_json)
+ assert ret == expected_ret
+
+ # Test to see if a ValueError is raised if no JSON is passed in
+ with pytest.raises(ValueError):
+ ret = salt.utils.json.find_json(some_junk_text)
--
2.42.0

View File

@ -0,0 +1,138 @@
From 4f459d670886a8f4a410fdbd1ec595477d45e4e2 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 18 Jan 2022 17:10:37 +0100
Subject: [PATCH] Include aliases in the fqdns grains
Add UT for "is_fqdn"
Add "is_fqdn" check to the network utils
Bugfix: include FQDNs aliases
Deprecate UnitTest assertion in favour of built-in assert keyword
Add UT for fqdns aliases
Leverage cached interfaces, if any.
Implement network.fqdns module function (bsc#1134860) (#172)
* Duplicate fqdns logic in module.network
* Move _get_interfaces to utils.network
* Reuse network.fqdns in grains.core.fqdns
* Return empty list when fqdns grains is disabled
Co-authored-by: Eric Siebigteroth <eric.siebigteroth@suse.de>
---
salt/modules/network.py | 5 +++-
salt/utils/network.py | 16 +++++++++++
tests/pytests/unit/modules/test_network.py | 4 +--
tests/unit/utils/test_network.py | 32 ++++++++++++++++++++++
4 files changed, 54 insertions(+), 3 deletions(-)
diff --git a/salt/modules/network.py b/salt/modules/network.py
index 524b1b74fa..f959dbf97b 100644
--- a/salt/modules/network.py
+++ b/salt/modules/network.py
@@ -2096,7 +2096,10 @@ def fqdns():
# https://sourceware.org/bugzilla/show_bug.cgi?id=19329
time.sleep(random.randint(5, 25) / 1000)
try:
- return [socket.getfqdn(socket.gethostbyaddr(ip)[0])]
+ name, aliaslist, addresslist = socket.gethostbyaddr(ip)
+ return [socket.getfqdn(name)] + [
+ als for als in aliaslist if salt.utils.network.is_fqdn(als)
+ ]
except socket.herror as err:
if err.errno in (0, HOST_NOT_FOUND, NO_DATA):
# No FQDN for this IP address, so we don't need to know this all the time.
diff --git a/salt/utils/network.py b/salt/utils/network.py
index 2bea2cf129..6ec993a678 100644
--- a/salt/utils/network.py
+++ b/salt/utils/network.py
@@ -2372,3 +2372,19 @@ def ip_bracket(addr, strip=False):
addr = addr.rstrip("]")
addr = ipaddress.ip_address(addr)
return ("[{}]" if addr.version == 6 and not strip else "{}").format(addr)
+
+
+def is_fqdn(hostname):
+ """
+ Verify if hostname conforms to be a FQDN.
+
+ :param hostname: text string with the name of the host
+ :return: bool, True if hostname is correct FQDN, False otherwise
+ """
+
+ compliant = re.compile(r"(?!-)[A-Z\d\-\_]{1,63}(?<!-)$", re.IGNORECASE)
+ return (
+ "." in hostname
+ and len(hostname) < 0xFF
+ and all(compliant.match(x) for x in hostname.rstrip(".").split("."))
+ )
diff --git a/tests/pytests/unit/modules/test_network.py b/tests/pytests/unit/modules/test_network.py
index 81035434b6..3f31391f44 100644
--- a/tests/pytests/unit/modules/test_network.py
+++ b/tests/pytests/unit/modules/test_network.py
@@ -29,7 +29,7 @@ def fake_fqdn():
with patch("socket.getfqdn", autospec=True, return_value=fqdn), patch(
"socket.gethostbyaddr",
autospec=True,
- return_value=("fnord", "fnord fnord"),
+ return_value=("fnord", ["fnord fnord"], []),
):
yield fqdn
@@ -89,7 +89,7 @@ def test_fqdns_should_return_sorted_unique_domains(fake_ips):
with patch("socket.getfqdn", autospec=True, side_effect=fake_domains), patch(
"socket.gethostbyaddr",
autospec=True,
- return_value=("fnord", "fnord fnord"),
+ return_value=("fnord", ["fnord fnord"], []),
):
actual_fqdns = networkmod.fqdns()
assert actual_fqdns == {
diff --git a/tests/unit/utils/test_network.py b/tests/unit/utils/test_network.py
index f7d3972930..cdb1ca19ca 100644
--- a/tests/unit/utils/test_network.py
+++ b/tests/unit/utils/test_network.py
@@ -1311,3 +1311,35 @@ class NetworkTestCase(TestCase):
ip_addr_obj = ipaddress.ip_address(test_ipv4)
self.assertEqual(test_ipv4, network.ip_bracket(ip_addr_obj))
+
+ def test_is_fqdn(self):
+ """
+ Test is_fqdn function passes possible FQDN names.
+
+ :return: None
+ """
+ for fqdn in [
+ "host.domain.com",
+ "something.with.the.dots.still.ok",
+ "UPPERCASE.ALSO.SHOULD.WORK",
+ "MiXeD.CaSe.AcCePtAbLe",
+ "123.host.com",
+ "host123.com",
+ "some_underscore.com",
+ "host-here.com",
+ ]:
+ assert network.is_fqdn(fqdn)
+
+ def test_is_not_fqdn(self):
+ """
+ Test is_fqdn function rejects FQDN names.
+
+ :return: None
+ """
+ for fqdn in [
+ "hostname",
+ "/some/path",
+ "$variable.here",
+ "verylonghostname.{}".format("domain" * 45),
+ ]:
+ assert not network.is_fqdn(fqdn)
--
2.39.2

View File

@ -0,0 +1,66 @@
From 01a670dad69e03bd8bf2da76a6a81e847af20aab Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Tue, 25 Jan 2022 17:12:47 +0100
Subject: [PATCH] info_installed works without status attr now
If 'status' was excluded via attr, info_installed was no longer able to
detect if a package was installed or not. Now info_installed adds the
'status' for the 'lowpkg.info' request again.
---
salt/modules/aptpkg.py | 9 +++++++++
tests/pytests/unit/modules/test_aptpkg.py | 18 ++++++++++++++++++
2 files changed, 27 insertions(+)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 938e37cc9e..3289f6604d 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -3461,6 +3461,15 @@ def info_installed(*names, **kwargs):
failhard = kwargs.pop("failhard", True)
kwargs.pop("errors", None) # Only for compatibility with RPM
attr = kwargs.pop("attr", None) # Package attributes to return
+
+ # status is needed to see if a package is installed. So we have to add it,
+ # even if it's excluded via attr parameter. Otherwise all packages are
+ # returned.
+ if attr:
+ attr_list = set(attr.split(","))
+ attr_list.add("status")
+ attr = ",".join(attr_list)
+
all_versions = kwargs.pop(
"all_versions", False
) # This is for backward compatible structure only
diff --git a/tests/pytests/unit/modules/test_aptpkg.py b/tests/pytests/unit/modules/test_aptpkg.py
index 4226957eeb..eb72447c3a 100644
--- a/tests/pytests/unit/modules/test_aptpkg.py
+++ b/tests/pytests/unit/modules/test_aptpkg.py
@@ -385,6 +385,24 @@ def test_info_installed_attr(lowpkg_info_var):
assert ret["wget"] == expected_pkg
+def test_info_installed_attr_without_status(lowpkg_info_var):
+ """
+ Test info_installed 'attr' for inclusion of 'status' attribute.
+
+ Since info_installed should only return installed packages, we need to
+ call __salt__['lowpkg.info'] with the 'status' attribute even if the user
+ is not asking for it in 'attr'. Otherwise info_installed would not be able
+ to check if the package is installed and would return everything.
+
+ :return:
+ """
+ mock = MagicMock(return_value=lowpkg_info_var)
+ with patch.dict(aptpkg.__salt__, {"lowpkg.info": mock}):
+ aptpkg.info_installed("wget", attr="version")
+ assert "status" in mock.call_args.kwargs["attr"]
+ assert "version" in mock.call_args.kwargs["attr"]
+
+
def test_info_installed_all_versions(lowpkg_info_var):
"""
Test info_installed 'all_versions'.
--
2.39.2

View File

@ -0,0 +1,32 @@
From 1de8313e55317a62c36a1a6262e7b9463544d69c Mon Sep 17 00:00:00 2001
From: Can Bulut Bayburt <1103552+cbbayburt@users.noreply.github.com>
Date: Wed, 4 Dec 2019 15:59:46 +0100
Subject: [PATCH] Let salt-ssh use 'platform-python' binary in RHEL8
(#191)
RHEL/CentOS 8 has an internal Python interpreter called 'platform-python'
included in the base setup.
Add this binary to the list of Python executables to look for when
creating the sh shim.
---
salt/client/ssh/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index 88365a6099..049baff51a 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -146,7 +146,7 @@ if [ "$SUDO" ] && [ "$SUDO_USER" ]
then SUDO="$SUDO -u $SUDO_USER"
fi
EX_PYTHON_INVALID={EX_THIN_PYTHON_INVALID}
-PYTHON_CMDS="python3 python27 python2.7 python26 python2.6 python2 python /usr/libexec/platform-python"
+PYTHON_CMDS="python3 /usr/libexec/platform-python python27 python2.7 python26 python2.6 python2 python"
for py_cmd in $PYTHON_CMDS
do
if command -v "$py_cmd" >/dev/null 2>&1 && "$py_cmd" -c "import sys; sys.exit(not (sys.version_info >= (2, 6)));"
--
2.39.2

View File

@ -0,0 +1,28 @@
From f9731227e7af0b1bf0a54993e0cac890225517f6 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 16 Nov 2018 10:54:12 +0100
Subject: [PATCH] Make aptpkg.list_repos compatible on enabled/disabled
output
---
salt/modules/aptpkg.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index f68b1907e8..8e89744b5e 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -1919,6 +1919,9 @@ def list_repos(**kwargs):
repo["file"] = source.file
repo["comps"] = getattr(source, "comps", [])
repo["disabled"] = source.disabled
+ repo["enabled"] = not repo[
+ "disabled"
+ ] # This is for compatibility with the other modules
repo["dist"] = source.dist
repo["type"] = source.type
repo["uri"] = source.uri
--
2.39.2

View File

@ -0,0 +1,63 @@
From 0913a58a36ef69d957dd9cc5c95fafe6d56448d5 Mon Sep 17 00:00:00 2001
From: Marek Czernek <marek.czernek@suse.com>
Date: Mon, 4 Mar 2024 11:27:35 +0100
Subject: [PATCH] Make importing seco.range thread safe (bsc#1211649)
---
salt/roster/range.py | 5 +++++
salt/utils/roster_matcher.py | 5 +++++
2 files changed, 10 insertions(+)
diff --git a/salt/roster/range.py b/salt/roster/range.py
index 3f039dcef42..1525f70c32b 100644
--- a/salt/roster/range.py
+++ b/salt/roster/range.py
@@ -15,16 +15,21 @@ import copy
import fnmatch
import logging
+import salt.loader
+
log = logging.getLogger(__name__)
# Try to import range from https://github.com/ytoolshed/range
HAS_RANGE = False
try:
+ salt.loader.LOAD_LOCK.acquire()
import seco.range
HAS_RANGE = True
except ImportError:
log.error("Unable to load range library")
+finally:
+ salt.loader.LOAD_LOCK.release()
# pylint: enable=import-error
diff --git a/salt/utils/roster_matcher.py b/salt/utils/roster_matcher.py
index db5dfda3e03..5165dc122b7 100644
--- a/salt/utils/roster_matcher.py
+++ b/salt/utils/roster_matcher.py
@@ -8,14 +8,19 @@ import functools
import logging
import re
+import salt.loader
+
# Try to import range from https://github.com/ytoolshed/range
HAS_RANGE = False
try:
+ salt.loader.LOAD_LOCK.acquire()
import seco.range
HAS_RANGE = True
except ImportError:
pass
+finally:
+ salt.loader.LOAD_LOCK.release()
# pylint: enable=import-error
--
2.44.0

View File

@ -0,0 +1,233 @@
From 48b6f57ece7eb9f58b8e6da40ec241b6df3f6d01 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 15 May 2024 09:20:18 +0200
Subject: [PATCH] Make logging calls lighter
* Call set_lowest_log_level_by_opts with set_logging_options_dict
* Fix the _logging test with setting minimum logging level
* Fix test_deferred_stream_handler test
* Fix vt.Terminal failing test: test_log_sanitize
Fixes failing test added in a09b4f445052be66f0ac53fd01fa02bfa5b82ea6
We can't assume tests are run at debug level, so this ensures the test
passes regardless of what logging level is currently set by capturing
the output in caplog at DEBUG which stream_stdout/stream_stderr uses by
default.
Signed-off-by: Joe Groocock <jgroocock@cloudflare.com>
---------
Signed-off-by: Joe Groocock <jgroocock@cloudflare.com>
Co-authored-by: Joe Groocock <jgroocock@cloudflare.com>
---
salt/_logging/impl.py | 1 +
.../integration/_logging/test_logging.py | 106 ++++++++++++++++++
.../handlers/test_deferred_stream_handler.py | 9 +-
tests/pytests/unit/utils/test_vt.py | 6 +-
4 files changed, 117 insertions(+), 5 deletions(-)
create mode 100644 tests/pytests/integration/_logging/test_logging.py
diff --git a/salt/_logging/impl.py b/salt/_logging/impl.py
index 2d1a276cb8..1d71cb8be8 100644
--- a/salt/_logging/impl.py
+++ b/salt/_logging/impl.py
@@ -426,6 +426,7 @@ def set_logging_options_dict(opts):
except AttributeError:
pass
set_logging_options_dict.__options_dict__ = opts
+ set_lowest_log_level_by_opts(opts)
def freeze_logging_options_dict():
diff --git a/tests/pytests/integration/_logging/test_logging.py b/tests/pytests/integration/_logging/test_logging.py
new file mode 100644
index 0000000000..8e38f55b38
--- /dev/null
+++ b/tests/pytests/integration/_logging/test_logging.py
@@ -0,0 +1,106 @@
+import logging
+import os
+
+import pytest
+
+import salt._logging.impl as log_impl
+from tests.support.mock import MagicMock, patch
+
+pytestmark = [
+ pytest.mark.skip_on_windows(reason="Temporarily skipped on the newer golden images")
+]
+
+
+log = logging.getLogger(__name__)
+
+
+@pytest.fixture
+def configure_loader_modules():
+ return {log_impl: {}}
+
+
+def log_nameToLevel(name):
+ """
+ Return the numeric representation of textual logging level
+ """
+ # log level values
+ CRITICAL = 50
+ FATAL = CRITICAL
+ ERROR = 40
+ WARNING = 30
+ WARN = WARNING
+ INFO = 20
+ DEBUG = 10
+ NOTSET = 0
+
+ _nameToLevel = {
+ "CRITICAL": CRITICAL,
+ "FATAL": FATAL,
+ "ERROR": ERROR,
+ "WARN": WARNING,
+ "WARNING": WARNING,
+ "INFO": INFO,
+ "DEBUG": DEBUG,
+ "NOTSET": NOTSET,
+ }
+ return _nameToLevel.get(name, None)
+
+
+def test_lowest_log_level():
+ ret = log_impl.get_lowest_log_level()
+ assert ret is not None
+
+ log_impl.set_lowest_log_level(log_nameToLevel("DEBUG"))
+ ret = log_impl.get_lowest_log_level()
+ assert ret is log_nameToLevel("DEBUG")
+
+ log_impl.set_lowest_log_level(log_nameToLevel("WARNING"))
+ ret = log_impl.get_lowest_log_level()
+ assert ret is log_nameToLevel("WARNING")
+
+ opts = {"log_level": "ERROR", "log_level_logfile": "INFO"}
+ log_impl.set_lowest_log_level_by_opts(opts)
+ ret = log_impl.get_lowest_log_level()
+ assert ret is log_nameToLevel("INFO")
+
+
+def test_get_logging_level_from_string(caplog):
+ ret = log_impl.get_logging_level_from_string(None)
+ assert ret is log_nameToLevel("WARNING")
+
+ ret = log_impl.get_logging_level_from_string(log_nameToLevel("DEBUG"))
+ assert ret is log_nameToLevel("DEBUG")
+
+ ret = log_impl.get_logging_level_from_string("CRITICAL")
+ assert ret is log_nameToLevel("CRITICAL")
+
+ caplog.clear()
+ with caplog.at_level(logging.WARNING):
+ msg = "Could not translate the logging level string 'BADLEVEL' into an actual logging level integer. Returning 'logging.ERROR'."
+ ret = log_impl.get_logging_level_from_string("BADLEVEL")
+ assert ret is log_nameToLevel("ERROR")
+ assert msg in caplog.text
+
+
+def test_logfile_handler(caplog):
+ caplog.clear()
+ with caplog.at_level(logging.WARNING):
+ ret = log_impl.is_logfile_handler_configured()
+ assert ret is False
+
+ msg = "log_path setting is set to `None`. Nothing else to do"
+ log_path = None
+ assert log_impl.setup_logfile_handler(log_path) is None
+ assert msg in caplog.text
+
+
+def test_in_mainprocess():
+ ret = log_impl.in_mainprocess()
+ assert ret is True
+
+ curr_pid = os.getpid()
+ with patch(
+ "os.getpid", MagicMock(side_effect=[AttributeError, curr_pid, curr_pid])
+ ):
+ ret = log_impl.in_mainprocess()
+ assert ret is True
diff --git a/tests/pytests/unit/_logging/handlers/test_deferred_stream_handler.py b/tests/pytests/unit/_logging/handlers/test_deferred_stream_handler.py
index 76b0e88eca..62c0dff4be 100644
--- a/tests/pytests/unit/_logging/handlers/test_deferred_stream_handler.py
+++ b/tests/pytests/unit/_logging/handlers/test_deferred_stream_handler.py
@@ -9,6 +9,7 @@ import pytest
from pytestshellutils.utils.processes import terminate_process
from salt._logging.handlers import DeferredStreamHandler
+from salt._logging.impl import set_lowest_log_level
from salt.utils.nb_popen import NonBlockingPopen
from tests.support.helpers import CaptureOutput, dedent
from tests.support.runtests import RUNTIME_VARS
@@ -20,7 +21,7 @@ def _sync_with_handlers_proc_target():
with CaptureOutput() as stds:
handler = DeferredStreamHandler(sys.stderr)
- handler.setLevel(logging.DEBUG)
+ set_lowest_log_level(logging.DEBUG)
formatter = logging.Formatter("%(message)s")
handler.setFormatter(formatter)
logging.root.addHandler(handler)
@@ -45,7 +46,7 @@ def _deferred_write_on_flush_proc_target():
with CaptureOutput() as stds:
handler = DeferredStreamHandler(sys.stderr)
- handler.setLevel(logging.DEBUG)
+ set_lowest_log_level(logging.DEBUG)
formatter = logging.Formatter("%(message)s")
handler.setFormatter(formatter)
logging.root.addHandler(handler)
@@ -126,7 +127,7 @@ def test_deferred_write_on_atexit(tmp_path):
# Just loop consuming output
while True:
if time.time() > max_time:
- pytest.fail("Script didn't exit after {} second".format(execution_time))
+ pytest.fail(f"Script didn't exit after {execution_time} second")
time.sleep(0.125)
_out = proc.recv()
@@ -146,7 +147,7 @@ def test_deferred_write_on_atexit(tmp_path):
finally:
terminate_process(proc.pid, kill_children=True)
if b"Foo" not in err:
- pytest.fail("'Foo' should be in stderr and it's not: {}".format(err))
+ pytest.fail(f"'Foo' should be in stderr and it's not: {err}")
@pytest.mark.skip_on_windows(reason="Windows does not support SIGINT")
diff --git a/tests/pytests/unit/utils/test_vt.py b/tests/pytests/unit/utils/test_vt.py
index 438a6eb09c..c31b25e623 100644
--- a/tests/pytests/unit/utils/test_vt.py
+++ b/tests/pytests/unit/utils/test_vt.py
@@ -1,3 +1,4 @@
+import logging
import os
import signal
@@ -43,10 +44,13 @@ def test_log_sanitize(test_cmd, caplog):
cmd,
log_stdout=True,
log_stderr=True,
+ log_stdout_level="debug",
+ log_stderr_level="debug",
log_sanitize=password,
stream_stdout=False,
stream_stderr=False,
)
- ret = term.recv()
+ with caplog.at_level(logging.DEBUG):
+ ret = term.recv()
assert password not in caplog.text
assert "******" in caplog.text
--
2.45.0

View File

@ -0,0 +1,37 @@
From 53a5a62191b81c6838c3041cf95ffeb12fbab5b5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 19 Jun 2023 15:35:41 +0100
Subject: [PATCH] Make master_tops compatible with Salt 3000 and older
minions (bsc#1212516) (bsc#1212517) (#587)
---
salt/master.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/salt/master.py b/salt/master.py
index da1eb8cef5..fc243ef674 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -1213,6 +1213,7 @@ class AESFuncs(TransportMethods):
"_dir_list",
"_symlink_list",
"_file_envs",
+ "_ext_nodes", # To keep compatibility with old Salt minion versions
)
def __init__(self, opts, context=None):
@@ -1412,6 +1413,9 @@ class AESFuncs(TransportMethods):
return {}
return self.masterapi._master_tops(load, skip_verify=True)
+ # Needed so older minions can request master_tops
+ _ext_nodes = _master_tops
+
def _master_opts(self, load):
"""
Return the master options to the minion
--
2.41.0

View File

@ -0,0 +1,104 @@
From 0d35f09288700f5c961567442c3fcc25838b8de4 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 15 May 2024 09:44:21 +0200
Subject: [PATCH] Make reactor engine less blocking the EventPublisher
---
salt/utils/reactor.py | 45 +++++++++++++++++++++++++++----------------
1 file changed, 28 insertions(+), 17 deletions(-)
diff --git a/salt/utils/reactor.py b/salt/utils/reactor.py
index 19420a51cf..78adad34da 100644
--- a/salt/utils/reactor.py
+++ b/salt/utils/reactor.py
@@ -1,10 +1,12 @@
"""
Functions which implement running reactor jobs
"""
+
import fnmatch
import glob
import logging
import os
+from threading import Lock
import salt.client
import salt.defaults.exitcodes
@@ -194,13 +196,6 @@ class Reactor(salt.utils.process.SignalHandlingProcess, salt.state.Compiler):
self.resolve_aliases(chunks)
return chunks
- def call_reactions(self, chunks):
- """
- Execute the reaction state
- """
- for chunk in chunks:
- self.wrap.run(chunk)
-
def run(self):
"""
Enter into the server loop
@@ -218,7 +213,7 @@ class Reactor(salt.utils.process.SignalHandlingProcess, salt.state.Compiler):
) as event:
self.wrap = ReactWrap(self.opts)
- for data in event.iter_events(full=True):
+ for data in event.iter_events(full=True, auto_reconnect=True):
# skip all events fired by ourselves
if data["data"].get("user") == self.wrap.event_user:
continue
@@ -268,15 +263,9 @@ class Reactor(salt.utils.process.SignalHandlingProcess, salt.state.Compiler):
if not self.is_leader:
continue
else:
- reactors = self.list_reactors(data["tag"])
- if not reactors:
- continue
- chunks = self.reactions(data["tag"], data["data"], reactors)
- if chunks:
- try:
- self.call_reactions(chunks)
- except SystemExit:
- log.warning("Exit ignored by reactor")
+ self.wrap.call_reactions(
+ data, self.list_reactors, self.reactions
+ )
class ReactWrap:
@@ -297,6 +286,7 @@ class ReactWrap:
def __init__(self, opts):
self.opts = opts
+ self._run_lock = Lock()
if ReactWrap.client_cache is None:
ReactWrap.client_cache = salt.utils.cache.CacheDict(
opts["reactor_refresh_interval"]
@@ -480,3 +470,24 @@ class ReactWrap:
Wrap LocalCaller to execute remote exec functions locally on the Minion
"""
self.client_cache["caller"].cmd(fun, *kwargs["arg"], **kwargs["kwarg"])
+
+ def _call_reactions(self, data, list_reactors, get_reactions):
+ reactors = list_reactors(data["tag"])
+ if not reactors:
+ return
+ chunks = get_reactions(data["tag"], data["data"], reactors)
+ if not chunks:
+ return
+ with self._run_lock:
+ try:
+ for chunk in chunks:
+ self.run(chunk)
+ except Exception as exc: # pylint: disable=broad-except
+ log.error(
+ "Exception while calling the reactions: %s", exc, exc_info=True
+ )
+
+ def call_reactions(self, data, list_reactors, get_reactions):
+ return self.pool.fire_async(
+ self._call_reactions, args=(data, list_reactors, get_reactions)
+ )
--
2.45.0

View File

@ -0,0 +1,243 @@
From 794b5d1aa7b8e880e9a21940183d241c6cbde9c9 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 15 May 2024 09:42:23 +0200
Subject: [PATCH] Make salt-master self recoverable on killing
EventPublisher
* Implement timeout and tries to transport.ipc.IPCClient.send
* Make timeout and tries configurable for fire_event
* Add test of timeout and tries
* Prevent exceptions from tornado Future on closing the IPC connection
---
salt/transport/ipc.py | 73 +++++++++++++++++---
salt/utils/event.py | 21 +++++-
tests/pytests/unit/utils/event/test_event.py | 43 ++++++++++++
3 files changed, 125 insertions(+), 12 deletions(-)
diff --git a/salt/transport/ipc.py b/salt/transport/ipc.py
index cee100b086..6631781c5c 100644
--- a/salt/transport/ipc.py
+++ b/salt/transport/ipc.py
@@ -2,7 +2,6 @@
IPC transport classes
"""
-
import errno
import logging
import socket
@@ -340,7 +339,8 @@ class IPCClient:
try:
log.trace("IPCClient: Connecting to socket: %s", self.socket_path)
yield self.stream.connect(sock_addr)
- self._connecting_future.set_result(True)
+ if self._connecting_future is not None:
+ self._connecting_future.set_result(True)
break
except Exception as e: # pylint: disable=broad-except
if self.stream.closed():
@@ -350,7 +350,8 @@ class IPCClient:
if self.stream is not None:
self.stream.close()
self.stream = None
- self._connecting_future.set_exception(e)
+ if self._connecting_future is not None:
+ self._connecting_future.set_exception(e)
break
yield salt.ext.tornado.gen.sleep(1)
@@ -365,7 +366,13 @@ class IPCClient:
return
self._closing = True
- self._connecting_future = None
+ if self._connecting_future is not None:
+ try:
+ self._connecting_future.set_result(True)
+ self._connecting_future.exception() # pylint: disable=E0203
+ except Exception as e: # pylint: disable=broad-except
+ log.warning("Unhandled connecting exception: %s", e, exc_info=True)
+ self._connecting_future = None
log.debug("Closing %s instance", self.__class__.__name__)
@@ -435,8 +442,6 @@ class IPCMessageClient(IPCClient):
"close",
]
- # FIXME timeout unimplemented
- # FIXME tries unimplemented
@salt.ext.tornado.gen.coroutine
def send(self, msg, timeout=None, tries=None):
"""
@@ -445,12 +450,60 @@ class IPCMessageClient(IPCClient):
If the socket is not currently connected, a connection will be established.
:param dict msg: The message to be sent
- :param int timeout: Timeout when sending message (Currently unimplemented)
+ :param int timeout: Timeout when sending message
+ :param int tries: Maximum numer of tries to send message
"""
- if not self.connected():
- yield self.connect()
+ if tries is None or tries < 1:
+ tries = 1
+ due_time = None
+ if timeout is not None:
+ due_time = time.time() + timeout
+ _try = 1
+ exc_count = 0
pack = salt.transport.frame.frame_msg_ipc(msg, raw_body=True)
- yield self.stream.write(pack)
+ while _try <= tries:
+ if not self.connected():
+ self.close()
+ self.stream = None
+ self._closing = False
+ try:
+ yield self.connect(
+ timeout=(
+ None if due_time is None else max(due_time - time.time(), 1)
+ )
+ )
+ except StreamClosedError:
+ log.warning(
+ "IPCMessageClient: Unable to reconnect IPC stream on sending message with ID: 0x%016x%s",
+ id(msg),
+ f", retry {_try} of {tries}" if tries > 1 else "",
+ )
+ exc_count += 1
+ if self.connected():
+ try:
+ yield self.stream.write(pack)
+ return
+ except StreamClosedError:
+ if self._closing:
+ break
+ log.warning(
+ "IPCMessageClient: Stream was closed on sending message with ID: 0x%016x",
+ id(msg),
+ )
+ exc_count += 1
+ if exc_count == 1:
+ # Give one more chance in case if stream was detected as closed
+ # on the first write attempt
+ continue
+ cur_time = time.time()
+ _try += 1
+ if _try > tries or (due_time is not None and cur_time > due_time):
+ return
+ yield salt.ext.tornado.gen.sleep(
+ 1
+ if due_time is None
+ else (due_time - cur_time) / max(tries - _try + 1, 1)
+ )
class IPCMessageServer(IPCServer):
diff --git a/salt/utils/event.py b/salt/utils/event.py
index ef048335ae..36b530d1af 100644
--- a/salt/utils/event.py
+++ b/salt/utils/event.py
@@ -270,6 +270,10 @@ class SaltEvent:
# and don't read out events from the buffer on an on-going basis,
# the buffer will grow resulting in big memory usage.
self.connect_pub()
+ self.pusher_send_timeout = self.opts.get(
+ "pusher_send_timeout", self.opts.get("timeout")
+ )
+ self.pusher_send_tries = self.opts.get("pusher_send_tries", 3)
@classmethod
def __load_cache_regex(cls):
@@ -839,10 +843,18 @@ class SaltEvent:
]
)
msg = salt.utils.stringutils.to_bytes(event, "utf-8")
+ if timeout is None:
+ timeout_s = self.pusher_send_timeout
+ else:
+ timeout_s = float(timeout) / 1000
if self._run_io_loop_sync:
with salt.utils.asynchronous.current_ioloop(self.io_loop):
try:
- self.pusher.send(msg)
+ self.pusher.send(
+ msg,
+ timeout=timeout_s,
+ tries=self.pusher_send_tries,
+ )
except Exception as exc: # pylint: disable=broad-except
log.debug(
"Publisher send failed with exception: %s",
@@ -851,7 +863,12 @@ class SaltEvent:
)
raise
else:
- self.io_loop.spawn_callback(self.pusher.send, msg)
+ self.io_loop.spawn_callback(
+ self.pusher.send,
+ msg,
+ timeout=timeout_s,
+ tries=self.pusher_send_tries,
+ )
return True
def fire_master(self, data, tag, timeout=1000):
diff --git a/tests/pytests/unit/utils/event/test_event.py b/tests/pytests/unit/utils/event/test_event.py
index 3eadfaf6ba..fa9e420a93 100644
--- a/tests/pytests/unit/utils/event/test_event.py
+++ b/tests/pytests/unit/utils/event/test_event.py
@@ -447,3 +447,46 @@ def test_event_fire_ret_load():
)
assert mock_log_error.mock_calls[0].args[1] == "minion_id.example.org"
assert mock_log_error.mock_calls[0].args[2] == "".join(test_traceback)
+
+
+@pytest.mark.slow_test
+def test_event_single_timeout_tries(sock_dir):
+ """Test an event is sent with timout and tries"""
+
+ write_calls_count = 0
+ real_stream_write = None
+
+ @salt.ext.tornado.gen.coroutine
+ def write_mock(pack):
+ nonlocal write_calls_count
+ nonlocal real_stream_write
+ write_calls_count += 1
+ if write_calls_count > 3:
+ yield real_stream_write(pack)
+ else:
+ raise salt.ext.tornado.iostream.StreamClosedError()
+
+ with eventpublisher_process(str(sock_dir)), salt.utils.event.MasterEvent(
+ str(sock_dir), listen=True
+ ) as me:
+ me.fire_event({"data": "foo1"}, "evt1")
+ evt1 = me.get_event(tag="evt1")
+ _assert_got_event(evt1, {"data": "foo1"})
+ real_stream_write = me.pusher.stream.write
+ with patch.object(
+ me.pusher,
+ "connected",
+ side_effect=[True, True, False, False, True, True],
+ ), patch.object(
+ me.pusher,
+ "connect",
+ side_effect=salt.ext.tornado.iostream.StreamClosedError,
+ ), patch.object(
+ me.pusher.stream,
+ "write",
+ write_mock,
+ ):
+ me.fire_event({"data": "bar2"}, "evt2", timeout=5000)
+ evt2 = me.get_event(tag="evt2")
+ _assert_got_event(evt2, {"data": "bar2"})
+ assert write_calls_count == 4
--
2.45.0

View File

@ -0,0 +1,33 @@
From d2b4c8170d7ff30bf33623fcbbb6ebb6d7af934e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 25 Mar 2020 13:09:52 +0000
Subject: [PATCH] Make setup.py script to not require setuptools > 9.1
---
setup.py | 8 --------
1 file changed, 8 deletions(-)
diff --git a/setup.py b/setup.py
index e60f1b7085..8ca8a66d45 100755
--- a/setup.py
+++ b/setup.py
@@ -632,14 +632,6 @@ class Install(install):
install.finalize_options(self)
def run(self):
- if LooseVersion(setuptools.__version__) < LooseVersion("9.1"):
- sys.stderr.write(
- "\n\nInstalling Salt requires setuptools >= 9.1\n"
- "Available setuptools version is {}\n\n".format(setuptools.__version__)
- )
- sys.stderr.flush()
- sys.exit(1)
-
# Let's set the running_salt_install attribute so we can add
# _version.txt in the build command
self.distribution.running_salt_install = True
--
2.39.2

View File

@ -0,0 +1,204 @@
From 5ea4add5c8e2bed50b9825edfff7565e5f6124f3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 22 Aug 2023 12:57:44 +0100
Subject: [PATCH] Make sure configured user is properly set by Salt
(bsc#1210994) (#596)
* Make sure Salt user and env is validated before daemon init
* Ensure HOME is always present in env and set according to pwuser
* Set User to salt in salt-master.service files
* Return proper exitcode if user is not valid
* Fix environment also for salt-ssh command
* Increase start_timeout to avoid test to be flaky
---
pkg/common/salt-master.service | 1 +
pkg/old/deb/salt-master.service | 1 +
pkg/old/suse/salt-master.service | 1 +
salt/cli/daemons.py | 27 +++++++++++++++++++
salt/cli/ssh.py | 8 ++++++
salt/utils/verify.py | 4 +--
.../integration/cli/test_salt_minion.py | 4 +--
7 files changed, 42 insertions(+), 4 deletions(-)
diff --git a/pkg/common/salt-master.service b/pkg/common/salt-master.service
index 377c87afeb..257ecc283f 100644
--- a/pkg/common/salt-master.service
+++ b/pkg/common/salt-master.service
@@ -8,6 +8,7 @@ LimitNOFILE=100000
Type=notify
NotifyAccess=all
ExecStart=/usr/bin/salt-master
+User=salt
[Install]
WantedBy=multi-user.target
diff --git a/pkg/old/deb/salt-master.service b/pkg/old/deb/salt-master.service
index b5d0cdd22c..f9dca296b4 100644
--- a/pkg/old/deb/salt-master.service
+++ b/pkg/old/deb/salt-master.service
@@ -7,6 +7,7 @@ LimitNOFILE=16384
Type=notify
NotifyAccess=all
ExecStart=/usr/bin/salt-master
+User=salt
[Install]
WantedBy=multi-user.target
diff --git a/pkg/old/suse/salt-master.service b/pkg/old/suse/salt-master.service
index 9e002d16ca..caabca511c 100644
--- a/pkg/old/suse/salt-master.service
+++ b/pkg/old/suse/salt-master.service
@@ -8,6 +8,7 @@ LimitNOFILE=100000
Type=simple
ExecStart=/usr/bin/salt-master
TasksMax=infinity
+User=salt
[Install]
WantedBy=multi-user.target
diff --git a/salt/cli/daemons.py b/salt/cli/daemons.py
index ecc05c919e..c9ee9ced91 100644
--- a/salt/cli/daemons.py
+++ b/salt/cli/daemons.py
@@ -7,6 +7,7 @@ import logging
import os
import warnings
+import salt.defaults.exitcodes
import salt.utils.kinds as kinds
from salt.exceptions import SaltClientError, SaltSystemExit, get_error_message
from salt.utils import migrations
@@ -73,6 +74,16 @@ class DaemonsMixin: # pylint: disable=no-init
self.__class__.__name__,
)
+ def verify_user(self):
+ """
+ Verify Salt configured user for Salt and shutdown daemon if not valid.
+
+ :return:
+ """
+ if not check_user(self.config["user"]):
+ self.action_log_info("Cannot switch to configured user for Salt. Exiting")
+ self.shutdown(salt.defaults.exitcodes.EX_NOUSER)
+
def action_log_info(self, action):
"""
Say daemon starting.
@@ -178,6 +189,10 @@ class Master(
self.config["interface"] = ip_bracket(self.config["interface"])
migrations.migrate_paths(self.config)
+ # Ensure configured user is valid and environment is properly set
+ # before initializating rest of the stack.
+ self.verify_user()
+
# Late import so logging works correctly
import salt.master
@@ -290,6 +305,10 @@ class Minion(
transport = self.config.get("transport").lower()
+ # Ensure configured user is valid and environment is properly set
+ # before initializating rest of the stack.
+ self.verify_user()
+
try:
# Late import so logging works correctly
import salt.minion
@@ -478,6 +497,10 @@ class ProxyMinion(
self.action_log_info("An instance is already running. Exiting")
self.shutdown(1)
+ # Ensure configured user is valid and environment is properly set
+ # before initializating rest of the stack.
+ self.verify_user()
+
# TODO: AIO core is separate from transport
# Late import so logging works correctly
import salt.minion
@@ -576,6 +599,10 @@ class Syndic(
self.action_log_info('Setting up "{}"'.format(self.config["id"]))
+ # Ensure configured user is valid and environment is properly set
+ # before initializating rest of the stack.
+ self.verify_user()
+
# Late import so logging works correctly
import salt.minion
diff --git a/salt/cli/ssh.py b/salt/cli/ssh.py
index 6048cb5f58..672f32b8c0 100644
--- a/salt/cli/ssh.py
+++ b/salt/cli/ssh.py
@@ -1,7 +1,9 @@
import sys
import salt.client.ssh
+import salt.defaults.exitcodes
import salt.utils.parsers
+from salt.utils.verify import check_user
class SaltSSH(salt.utils.parsers.SaltSSHOptionParser):
@@ -15,5 +17,11 @@ class SaltSSH(salt.utils.parsers.SaltSSHOptionParser):
# that won't be used anyways with -H or --hosts
self.parse_args()
+ if not check_user(self.config["user"]):
+ self.exit(
+ salt.defaults.exitcodes.EX_NOUSER,
+ "Cannot switch to configured user for Salt. Exiting",
+ )
+
ssh = salt.client.ssh.SSH(self.config)
ssh.run()
diff --git a/salt/utils/verify.py b/salt/utils/verify.py
index 879128f231..7899fbe538 100644
--- a/salt/utils/verify.py
+++ b/salt/utils/verify.py
@@ -335,8 +335,8 @@ def check_user(user):
# We could just reset the whole environment but let's just override
# the variables we can get from pwuser
- if "HOME" in os.environ:
- os.environ["HOME"] = pwuser.pw_dir
+ # We ensure HOME is always present and set according to pwuser
+ os.environ["HOME"] = pwuser.pw_dir
if "SHELL" in os.environ:
os.environ["SHELL"] = pwuser.pw_shell
diff --git a/tests/pytests/integration/cli/test_salt_minion.py b/tests/pytests/integration/cli/test_salt_minion.py
index c0d6013474..bde2dd51d7 100644
--- a/tests/pytests/integration/cli/test_salt_minion.py
+++ b/tests/pytests/integration/cli/test_salt_minion.py
@@ -41,7 +41,7 @@ def test_exit_status_unknown_user(salt_master, minion_id):
factory = salt_master.salt_minion_daemon(
minion_id, overrides={"user": "unknown-user"}
)
- factory.start(start_timeout=10, max_start_attempts=1)
+ factory.start(start_timeout=30, max_start_attempts=1)
assert exc.value.process_result.returncode == salt.defaults.exitcodes.EX_NOUSER
assert "The user is not available." in exc.value.process_result.stderr
@@ -53,7 +53,7 @@ def test_exit_status_unknown_argument(salt_master, minion_id):
"""
with pytest.raises(FactoryNotStarted) as exc:
factory = salt_master.salt_minion_daemon(minion_id)
- factory.start("--unknown-argument", start_timeout=10, max_start_attempts=1)
+ factory.start("--unknown-argument", start_timeout=30, max_start_attempts=1)
assert exc.value.process_result.returncode == salt.defaults.exitcodes.EX_USAGE
assert "Usage" in exc.value.process_result.stderr
--
2.41.0

View File

@ -0,0 +1,850 @@
From a1fc5287d501a1ecdbd259e5bbdd4f7d5d06dd13 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Fri, 28 Apr 2023 09:41:28 +0200
Subject: [PATCH] Make sure the file client is destroyed upon used
Backport of https://github.com/saltstack/salt/pull/64113
---
salt/client/ssh/wrapper/saltcheck.py | 108 +++----
salt/fileclient.py | 11 -
salt/modules/dockermod.py | 17 +-
salt/pillar/__init__.py | 6 +-
salt/states/ansiblegate.py | 11 +-
salt/utils/asynchronous.py | 2 +-
salt/utils/jinja.py | 53 ++-
salt/utils/mako.py | 7 +
salt/utils/templates.py | 303 +++++++++---------
.../integration/states/test_include.py | 40 +++
.../utils/jinja/test_salt_cache_loader.py | 47 ++-
11 files changed, 330 insertions(+), 275 deletions(-)
create mode 100644 tests/pytests/integration/states/test_include.py
diff --git a/salt/client/ssh/wrapper/saltcheck.py b/salt/client/ssh/wrapper/saltcheck.py
index d47b5cf6883..b0b94593809 100644
--- a/salt/client/ssh/wrapper/saltcheck.py
+++ b/salt/client/ssh/wrapper/saltcheck.py
@@ -9,6 +9,7 @@ import tarfile
import tempfile
from contextlib import closing
+import salt.fileclient
import salt.utils.files
import salt.utils.json
import salt.utils.url
@@ -28,65 +29,62 @@ def update_master_cache(states, saltenv="base"):
# Setup for copying states to gendir
gendir = tempfile.mkdtemp()
trans_tar = salt.utils.files.mkstemp()
- if "cp.fileclient_{}".format(id(__opts__)) not in __context__:
- __context__[
- "cp.fileclient_{}".format(id(__opts__))
- ] = salt.fileclient.get_file_client(__opts__)
-
- # generate cp.list_states output and save to gendir
- cp_output = salt.utils.json.dumps(__salt__["cp.list_states"]())
- cp_output_file = os.path.join(gendir, "cp_output.txt")
- with salt.utils.files.fopen(cp_output_file, "w") as fp:
- fp.write(cp_output)
-
- # cp state directories to gendir
- already_processed = []
- sls_list = salt.utils.args.split_input(states)
- for state_name in sls_list:
- # generate low data for each state and save to gendir
- state_low_file = os.path.join(gendir, state_name + ".low")
- state_low_output = salt.utils.json.dumps(
- __salt__["state.show_low_sls"](state_name)
- )
- with salt.utils.files.fopen(state_low_file, "w") as fp:
- fp.write(state_low_output)
-
- state_name = state_name.replace(".", os.sep)
- if state_name in already_processed:
- log.debug("Already cached state for %s", state_name)
- else:
- file_copy_file = os.path.join(gendir, state_name + ".copy")
- log.debug("copying %s to %s", state_name, gendir)
- qualified_name = salt.utils.url.create(state_name, saltenv)
- # Duplicate cp.get_dir to gendir
- copy_result = __context__["cp.fileclient_{}".format(id(__opts__))].get_dir(
- qualified_name, gendir, saltenv
+ with salt.fileclient.get_file_client(__opts__) as cp_fileclient:
+
+ # generate cp.list_states output and save to gendir
+ cp_output = salt.utils.json.dumps(__salt__["cp.list_states"]())
+ cp_output_file = os.path.join(gendir, "cp_output.txt")
+ with salt.utils.files.fopen(cp_output_file, "w") as fp:
+ fp.write(cp_output)
+
+ # cp state directories to gendir
+ already_processed = []
+ sls_list = salt.utils.args.split_input(states)
+ for state_name in sls_list:
+ # generate low data for each state and save to gendir
+ state_low_file = os.path.join(gendir, state_name + ".low")
+ state_low_output = salt.utils.json.dumps(
+ __salt__["state.show_low_sls"](state_name)
)
- if copy_result:
- copy_result = [dir.replace(gendir, state_cache) for dir in copy_result]
- copy_result_output = salt.utils.json.dumps(copy_result)
- with salt.utils.files.fopen(file_copy_file, "w") as fp:
- fp.write(copy_result_output)
- already_processed.append(state_name)
+ with salt.utils.files.fopen(state_low_file, "w") as fp:
+ fp.write(state_low_output)
+
+ state_name = state_name.replace(".", os.sep)
+ if state_name in already_processed:
+ log.debug("Already cached state for %s", state_name)
else:
- # If files were not copied, assume state.file.sls was given and just copy state
- state_name = os.path.dirname(state_name)
file_copy_file = os.path.join(gendir, state_name + ".copy")
- if state_name in already_processed:
- log.debug("Already cached state for %s", state_name)
+ log.debug("copying %s to %s", state_name, gendir)
+ qualified_name = salt.utils.url.create(state_name, saltenv)
+ # Duplicate cp.get_dir to gendir
+ copy_result = cp_fileclient.get_dir(qualified_name, gendir, saltenv)
+ if copy_result:
+ copy_result = [
+ dir.replace(gendir, state_cache) for dir in copy_result
+ ]
+ copy_result_output = salt.utils.json.dumps(copy_result)
+ with salt.utils.files.fopen(file_copy_file, "w") as fp:
+ fp.write(copy_result_output)
+ already_processed.append(state_name)
else:
- qualified_name = salt.utils.url.create(state_name, saltenv)
- copy_result = __context__[
- "cp.fileclient_{}".format(id(__opts__))
- ].get_dir(qualified_name, gendir, saltenv)
- if copy_result:
- copy_result = [
- dir.replace(gendir, state_cache) for dir in copy_result
- ]
- copy_result_output = salt.utils.json.dumps(copy_result)
- with salt.utils.files.fopen(file_copy_file, "w") as fp:
- fp.write(copy_result_output)
- already_processed.append(state_name)
+ # If files were not copied, assume state.file.sls was given and just copy state
+ state_name = os.path.dirname(state_name)
+ file_copy_file = os.path.join(gendir, state_name + ".copy")
+ if state_name in already_processed:
+ log.debug("Already cached state for %s", state_name)
+ else:
+ qualified_name = salt.utils.url.create(state_name, saltenv)
+ copy_result = cp_fileclient.get_dir(
+ qualified_name, gendir, saltenv
+ )
+ if copy_result:
+ copy_result = [
+ dir.replace(gendir, state_cache) for dir in copy_result
+ ]
+ copy_result_output = salt.utils.json.dumps(copy_result)
+ with salt.utils.files.fopen(file_copy_file, "w") as fp:
+ fp.write(copy_result_output)
+ already_processed.append(state_name)
# turn gendir into tarball and remove gendir
try:
diff --git a/salt/fileclient.py b/salt/fileclient.py
index fef5154a0be..f01a86dd0d4 100644
--- a/salt/fileclient.py
+++ b/salt/fileclient.py
@@ -849,7 +849,6 @@ class Client:
kwargs.pop("env")
kwargs["saltenv"] = saltenv
- url_data = urllib.parse.urlparse(url)
sfn = self.cache_file(url, saltenv, cachedir=cachedir)
if not sfn or not os.path.exists(sfn):
return ""
@@ -1165,13 +1164,8 @@ class RemoteClient(Client):
if not salt.utils.platform.is_windows():
hash_server, stat_server = self.hash_and_stat_file(path, saltenv)
- try:
- mode_server = stat_server[0]
- except (IndexError, TypeError):
- mode_server = None
else:
hash_server = self.hash_file(path, saltenv)
- mode_server = None
# Check if file exists on server, before creating files and
# directories
@@ -1214,13 +1208,8 @@ class RemoteClient(Client):
if dest2check and os.path.isfile(dest2check):
if not salt.utils.platform.is_windows():
hash_local, stat_local = self.hash_and_stat_file(dest2check, saltenv)
- try:
- mode_local = stat_local[0]
- except (IndexError, TypeError):
- mode_local = None
else:
hash_local = self.hash_file(dest2check, saltenv)
- mode_local = None
if hash_local == hash_server:
return dest2check
diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py
index f7344b66ac6..69b722f0c95 100644
--- a/salt/modules/dockermod.py
+++ b/salt/modules/dockermod.py
@@ -6667,14 +6667,6 @@ def script_retcode(
)["retcode"]
-def _mk_fileclient():
- """
- Create a file client and add it to the context.
- """
- if "cp.fileclient" not in __context__:
- __context__["cp.fileclient"] = salt.fileclient.get_file_client(__opts__)
-
-
def _generate_tmp_path():
return os.path.join("/tmp", "salt.docker.{}".format(uuid.uuid4().hex[:6]))
@@ -6688,11 +6680,10 @@ def _prepare_trans_tar(name, sls_opts, mods=None, pillar=None, extra_filerefs=""
# reuse it from salt.ssh, however this function should
# be somewhere else
refs = salt.client.ssh.state.lowstate_file_refs(chunks, extra_filerefs)
- _mk_fileclient()
- trans_tar = salt.client.ssh.state.prep_trans_tar(
- __context__["cp.fileclient"], chunks, refs, pillar, name
- )
- return trans_tar
+ with salt.fileclient.get_file_client(__opts__) as fileclient:
+ return salt.client.ssh.state.prep_trans_tar(
+ fileclient, chunks, refs, pillar, name
+ )
def _compile_state(sls_opts, mods=None):
diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py
index 0dfab4cc579..26312b3bd53 100644
--- a/salt/pillar/__init__.py
+++ b/salt/pillar/__init__.py
@@ -9,7 +9,6 @@ import logging
import os
import sys
import traceback
-import uuid
import salt.channel.client
import salt.ext.tornado.gen
@@ -1351,6 +1350,11 @@ class Pillar:
if hasattr(self, "_closing") and self._closing:
return
self._closing = True
+ if self.client:
+ try:
+ self.client.destroy()
+ except AttributeError:
+ pass
# pylint: disable=W1701
def __del__(self):
diff --git a/salt/states/ansiblegate.py b/salt/states/ansiblegate.py
index 7fd4deb6c2a..9abd418c42c 100644
--- a/salt/states/ansiblegate.py
+++ b/salt/states/ansiblegate.py
@@ -32,12 +32,10 @@ state:
- state: installed
"""
-
import logging
import os
import sys
-# Import salt modules
import salt.fileclient
import salt.utils.decorators.path
from salt.utils.decorators import depends
@@ -108,13 +106,6 @@ def __virtual__():
return __virtualname__
-def _client():
- """
- Get a fileclient
- """
- return salt.fileclient.get_file_client(__opts__)
-
-
def _changes(plays):
"""
Find changes in ansible return data
@@ -171,7 +162,7 @@ def playbooks(name, rundir=None, git_repo=None, git_kwargs=None, ansible_kwargs=
}
if git_repo:
if not isinstance(rundir, str) or not os.path.isdir(rundir):
- with _client() as client:
+ with salt.fileclient.get_file_client(__opts__) as client:
rundir = client._extrn_path(git_repo, "base")
log.trace("rundir set to %s", rundir)
if not isinstance(git_kwargs, dict):
diff --git a/salt/utils/asynchronous.py b/salt/utils/asynchronous.py
index 2a858feee98..0c645bbc3bb 100644
--- a/salt/utils/asynchronous.py
+++ b/salt/utils/asynchronous.py
@@ -131,7 +131,7 @@ class SyncWrapper:
result = io_loop.run_sync(lambda: getattr(self.obj, key)(*args, **kwargs))
results.append(True)
results.append(result)
- except Exception as exc: # pylint: disable=broad-except
+ except Exception: # pylint: disable=broad-except
results.append(False)
results.append(sys.exc_info())
diff --git a/salt/utils/jinja.py b/salt/utils/jinja.py
index fcc5aec497e..a6a8a279605 100644
--- a/salt/utils/jinja.py
+++ b/salt/utils/jinja.py
@@ -58,19 +58,6 @@ class SaltCacheLoader(BaseLoader):
and only loaded once per loader instance.
"""
- _cached_pillar_client = None
- _cached_client = None
-
- @classmethod
- def shutdown(cls):
- for attr in ("_cached_client", "_cached_pillar_client"):
- client = getattr(cls, attr, None)
- if client is not None:
- # PillarClient and LocalClient objects do not have a destroy method
- if hasattr(client, "destroy"):
- client.destroy()
- setattr(cls, attr, None)
-
def __init__(
self,
opts,
@@ -93,8 +80,7 @@ class SaltCacheLoader(BaseLoader):
log.debug("Jinja search path: %s", self.searchpath)
self.cached = []
self._file_client = _file_client
- # Instantiate the fileclient
- self.file_client()
+ self._close_file_client = _file_client is None
def file_client(self):
"""
@@ -108,18 +94,10 @@ class SaltCacheLoader(BaseLoader):
or not hasattr(self._file_client, "opts")
or self._file_client.opts["file_roots"] != self.opts["file_roots"]
):
- attr = "_cached_pillar_client" if self.pillar_rend else "_cached_client"
- cached_client = getattr(self, attr, None)
- if (
- cached_client is None
- or not hasattr(cached_client, "opts")
- or cached_client.opts["file_roots"] != self.opts["file_roots"]
- ):
- cached_client = salt.fileclient.get_file_client(
- self.opts, self.pillar_rend
- )
- setattr(SaltCacheLoader, attr, cached_client)
- self._file_client = cached_client
+ self._file_client = salt.fileclient.get_file_client(
+ self.opts, self.pillar_rend
+ )
+ self._close_file_client = True
return self._file_client
def cache_file(self, template):
@@ -221,6 +199,27 @@ class SaltCacheLoader(BaseLoader):
# there is no template file within searchpaths
raise TemplateNotFound(template)
+ def destroy(self):
+ if self._close_file_client is False:
+ return
+ if self._file_client is None:
+ return
+ file_client = self._file_client
+ self._file_client = None
+
+ try:
+ file_client.destroy()
+ except AttributeError:
+ # PillarClient and LocalClient objects do not have a destroy method
+ pass
+
+ def __enter__(self):
+ self.file_client()
+ return self
+
+ def __exit__(self, *args):
+ self.destroy()
+
class PrintableDict(OrderedDict):
"""
diff --git a/salt/utils/mako.py b/salt/utils/mako.py
index 69618de9837..037d5d86deb 100644
--- a/salt/utils/mako.py
+++ b/salt/utils/mako.py
@@ -97,3 +97,10 @@ if HAS_MAKO:
self.cache[fpath] = self.file_client().get_file(
fpath, "", True, self.saltenv
)
+
+ def destroy(self):
+ if self.client:
+ try:
+ self.client.destroy()
+ except AttributeError:
+ pass
diff --git a/salt/utils/templates.py b/salt/utils/templates.py
index 4947b820a36..4a8adf2a14f 100644
--- a/salt/utils/templates.py
+++ b/salt/utils/templates.py
@@ -362,163 +362,169 @@ def render_jinja_tmpl(tmplstr, context, tmplpath=None):
elif tmplstr.endswith("\n"):
newline = "\n"
- if not saltenv:
- if tmplpath:
- loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath))
- else:
- loader = salt.utils.jinja.SaltCacheLoader(
- opts,
- saltenv,
- pillar_rend=context.get("_pillar_rend", False),
- _file_client=file_client,
- )
+ try:
+ if not saltenv:
+ if tmplpath:
+ loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath))
+ else:
+ loader = salt.utils.jinja.SaltCacheLoader(
+ opts,
+ saltenv,
+ pillar_rend=context.get("_pillar_rend", False),
+ _file_client=file_client,
+ )
- env_args = {"extensions": [], "loader": loader}
-
- if hasattr(jinja2.ext, "with_"):
- env_args["extensions"].append("jinja2.ext.with_")
- if hasattr(jinja2.ext, "do"):
- env_args["extensions"].append("jinja2.ext.do")
- if hasattr(jinja2.ext, "loopcontrols"):
- env_args["extensions"].append("jinja2.ext.loopcontrols")
- env_args["extensions"].append(salt.utils.jinja.SerializerExtension)
-
- opt_jinja_env = opts.get("jinja_env", {})
- opt_jinja_sls_env = opts.get("jinja_sls_env", {})
-
- opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {}
- opt_jinja_sls_env = opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {}
-
- # Pass through trim_blocks and lstrip_blocks Jinja parameters
- # trim_blocks removes newlines around Jinja blocks
- # lstrip_blocks strips tabs and spaces from the beginning of
- # line to the start of a block.
- if opts.get("jinja_trim_blocks", False):
- log.debug("Jinja2 trim_blocks is enabled")
- log.warning(
- "jinja_trim_blocks is deprecated and will be removed in a future release,"
- " please use jinja_env and/or jinja_sls_env instead"
- )
- opt_jinja_env["trim_blocks"] = True
- opt_jinja_sls_env["trim_blocks"] = True
- if opts.get("jinja_lstrip_blocks", False):
- log.debug("Jinja2 lstrip_blocks is enabled")
- log.warning(
- "jinja_lstrip_blocks is deprecated and will be removed in a future release,"
- " please use jinja_env and/or jinja_sls_env instead"
- )
- opt_jinja_env["lstrip_blocks"] = True
- opt_jinja_sls_env["lstrip_blocks"] = True
-
- def opt_jinja_env_helper(opts, optname):
- for k, v in opts.items():
- k = k.lower()
- if hasattr(jinja2.defaults, k.upper()):
- log.debug("Jinja2 environment %s was set to %s by %s", k, v, optname)
- env_args[k] = v
- else:
- log.warning("Jinja2 environment %s is not recognized", k)
+ env_args = {"extensions": [], "loader": loader}
- if "sls" in context and context["sls"] != "":
- opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env")
- else:
- opt_jinja_env_helper(opt_jinja_env, "jinja_env")
+ if hasattr(jinja2.ext, "with_"):
+ env_args["extensions"].append("jinja2.ext.with_")
+ if hasattr(jinja2.ext, "do"):
+ env_args["extensions"].append("jinja2.ext.do")
+ if hasattr(jinja2.ext, "loopcontrols"):
+ env_args["extensions"].append("jinja2.ext.loopcontrols")
+ env_args["extensions"].append(salt.utils.jinja.SerializerExtension)
- if opts.get("allow_undefined", False):
- jinja_env = jinja2.sandbox.SandboxedEnvironment(**env_args)
- else:
- jinja_env = jinja2.sandbox.SandboxedEnvironment(
- undefined=jinja2.StrictUndefined, **env_args
- )
+ opt_jinja_env = opts.get("jinja_env", {})
+ opt_jinja_sls_env = opts.get("jinja_sls_env", {})
- indent_filter = jinja_env.filters.get("indent")
- jinja_env.tests.update(JinjaTest.salt_jinja_tests)
- jinja_env.filters.update(JinjaFilter.salt_jinja_filters)
- if salt.utils.jinja.JINJA_VERSION >= Version("2.11"):
- # Use the existing indent filter on Jinja versions where it's not broken
- jinja_env.filters["indent"] = indent_filter
- jinja_env.globals.update(JinjaGlobal.salt_jinja_globals)
-
- # globals
- jinja_env.globals["odict"] = OrderedDict
- jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context
-
- jinja_env.tests["list"] = salt.utils.data.is_list
-
- decoded_context = {}
- for key, value in context.items():
- if not isinstance(value, str):
- if isinstance(value, NamedLoaderContext):
- decoded_context[key] = value.value()
- else:
- decoded_context[key] = value
- continue
+ opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {}
+ opt_jinja_sls_env = (
+ opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {}
+ )
- try:
- decoded_context[key] = salt.utils.stringutils.to_unicode(
- value, encoding=SLS_ENCODING
+ # Pass through trim_blocks and lstrip_blocks Jinja parameters
+ # trim_blocks removes newlines around Jinja blocks
+ # lstrip_blocks strips tabs and spaces from the beginning of
+ # line to the start of a block.
+ if opts.get("jinja_trim_blocks", False):
+ log.debug("Jinja2 trim_blocks is enabled")
+ log.warning(
+ "jinja_trim_blocks is deprecated and will be removed in a future release,"
+ " please use jinja_env and/or jinja_sls_env instead"
)
- except UnicodeDecodeError as ex:
- log.debug(
- "Failed to decode using default encoding (%s), trying system encoding",
- SLS_ENCODING,
+ opt_jinja_env["trim_blocks"] = True
+ opt_jinja_sls_env["trim_blocks"] = True
+ if opts.get("jinja_lstrip_blocks", False):
+ log.debug("Jinja2 lstrip_blocks is enabled")
+ log.warning(
+ "jinja_lstrip_blocks is deprecated and will be removed in a future release,"
+ " please use jinja_env and/or jinja_sls_env instead"
)
- decoded_context[key] = salt.utils.data.decode(value)
+ opt_jinja_env["lstrip_blocks"] = True
+ opt_jinja_sls_env["lstrip_blocks"] = True
+
+ def opt_jinja_env_helper(opts, optname):
+ for k, v in opts.items():
+ k = k.lower()
+ if hasattr(jinja2.defaults, k.upper()):
+ log.debug(
+ "Jinja2 environment %s was set to %s by %s", k, v, optname
+ )
+ env_args[k] = v
+ else:
+ log.warning("Jinja2 environment %s is not recognized", k)
- jinja_env.globals.update(decoded_context)
- try:
- template = jinja_env.from_string(tmplstr)
- output = template.render(**decoded_context)
- except jinja2.exceptions.UndefinedError as exc:
- trace = traceback.extract_tb(sys.exc_info()[2])
- line, out = _get_jinja_error(trace, context=decoded_context)
- if not line:
- tmplstr = ""
- raise SaltRenderError("Jinja variable {}{}".format(exc, out), line, tmplstr)
- except (
- jinja2.exceptions.TemplateRuntimeError,
- jinja2.exceptions.TemplateSyntaxError,
- jinja2.exceptions.SecurityError,
- ) as exc:
- trace = traceback.extract_tb(sys.exc_info()[2])
- line, out = _get_jinja_error(trace, context=decoded_context)
- if not line:
- tmplstr = ""
- raise SaltRenderError(
- "Jinja syntax error: {}{}".format(exc, out), line, tmplstr
- )
- except (SaltInvocationError, CommandExecutionError) as exc:
- trace = traceback.extract_tb(sys.exc_info()[2])
- line, out = _get_jinja_error(trace, context=decoded_context)
- if not line:
- tmplstr = ""
- raise SaltRenderError(
- "Problem running salt function in Jinja template: {}{}".format(exc, out),
- line,
- tmplstr,
- )
- except Exception as exc: # pylint: disable=broad-except
- tracestr = traceback.format_exc()
- trace = traceback.extract_tb(sys.exc_info()[2])
- line, out = _get_jinja_error(trace, context=decoded_context)
- if not line:
- tmplstr = ""
+ if "sls" in context and context["sls"] != "":
+ opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env")
else:
- tmplstr += "\n{}".format(tracestr)
- log.debug("Jinja Error")
- log.debug("Exception:", exc_info=True)
- log.debug("Out: %s", out)
- log.debug("Line: %s", line)
- log.debug("TmplStr: %s", tmplstr)
- log.debug("TraceStr: %s", tracestr)
+ opt_jinja_env_helper(opt_jinja_env, "jinja_env")
- raise SaltRenderError(
- "Jinja error: {}{}".format(exc, out), line, tmplstr, trace=tracestr
- )
+ if opts.get("allow_undefined", False):
+ jinja_env = jinja2.sandbox.SandboxedEnvironment(**env_args)
+ else:
+ jinja_env = jinja2.sandbox.SandboxedEnvironment(
+ undefined=jinja2.StrictUndefined, **env_args
+ )
+
+ indent_filter = jinja_env.filters.get("indent")
+ jinja_env.tests.update(JinjaTest.salt_jinja_tests)
+ jinja_env.filters.update(JinjaFilter.salt_jinja_filters)
+ if salt.utils.jinja.JINJA_VERSION >= Version("2.11"):
+ # Use the existing indent filter on Jinja versions where it's not broken
+ jinja_env.filters["indent"] = indent_filter
+ jinja_env.globals.update(JinjaGlobal.salt_jinja_globals)
+
+ # globals
+ jinja_env.globals["odict"] = OrderedDict
+ jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context
+
+ jinja_env.tests["list"] = salt.utils.data.is_list
+
+ decoded_context = {}
+ for key, value in context.items():
+ if not isinstance(value, str):
+ if isinstance(value, NamedLoaderContext):
+ decoded_context[key] = value.value()
+ else:
+ decoded_context[key] = value
+ continue
+
+ try:
+ decoded_context[key] = salt.utils.stringutils.to_unicode(
+ value, encoding=SLS_ENCODING
+ )
+ except UnicodeDecodeError:
+ log.debug(
+ "Failed to decode using default encoding (%s), trying system encoding",
+ SLS_ENCODING,
+ )
+ decoded_context[key] = salt.utils.data.decode(value)
+
+ jinja_env.globals.update(decoded_context)
+ try:
+ template = jinja_env.from_string(tmplstr)
+ output = template.render(**decoded_context)
+ except jinja2.exceptions.UndefinedError as exc:
+ trace = traceback.extract_tb(sys.exc_info()[2])
+ line, out = _get_jinja_error(trace, context=decoded_context)
+ if not line:
+ tmplstr = ""
+ raise SaltRenderError("Jinja variable {}{}".format(exc, out), line, tmplstr)
+ except (
+ jinja2.exceptions.TemplateRuntimeError,
+ jinja2.exceptions.TemplateSyntaxError,
+ jinja2.exceptions.SecurityError,
+ ) as exc:
+ trace = traceback.extract_tb(sys.exc_info()[2])
+ line, out = _get_jinja_error(trace, context=decoded_context)
+ if not line:
+ tmplstr = ""
+ raise SaltRenderError(
+ "Jinja syntax error: {}{}".format(exc, out), line, tmplstr
+ )
+ except (SaltInvocationError, CommandExecutionError) as exc:
+ trace = traceback.extract_tb(sys.exc_info()[2])
+ line, out = _get_jinja_error(trace, context=decoded_context)
+ if not line:
+ tmplstr = ""
+ raise SaltRenderError(
+ "Problem running salt function in Jinja template: {}{}".format(
+ exc, out
+ ),
+ line,
+ tmplstr,
+ )
+ except Exception as exc: # pylint: disable=broad-except
+ tracestr = traceback.format_exc()
+ trace = traceback.extract_tb(sys.exc_info()[2])
+ line, out = _get_jinja_error(trace, context=decoded_context)
+ if not line:
+ tmplstr = ""
+ else:
+ tmplstr += "\n{}".format(tracestr)
+ log.debug("Jinja Error")
+ log.debug("Exception:", exc_info=True)
+ log.debug("Out: %s", out)
+ log.debug("Line: %s", line)
+ log.debug("TmplStr: %s", tmplstr)
+ log.debug("TraceStr: %s", tracestr)
+
+ raise SaltRenderError(
+ "Jinja error: {}{}".format(exc, out), line, tmplstr, trace=tracestr
+ )
finally:
- if loader and hasattr(loader, "_file_client"):
- if hasattr(loader._file_client, "destroy"):
- loader._file_client.destroy()
+ if loader and isinstance(loader, salt.utils.jinja.SaltCacheLoader):
+ loader.destroy()
# Workaround a bug in Jinja that removes the final newline
# (https://github.com/mitsuhiko/jinja2/issues/75)
@@ -569,9 +575,8 @@ def render_mako_tmpl(tmplstr, context, tmplpath=None):
except Exception: # pylint: disable=broad-except
raise SaltRenderError(mako.exceptions.text_error_template().render())
finally:
- if lookup and hasattr(lookup, "_file_client"):
- if hasattr(lookup._file_client, "destroy"):
- lookup._file_client.destroy()
+ if lookup and isinstance(lookup, SaltMakoTemplateLookup):
+ lookup.destroy()
def render_wempy_tmpl(tmplstr, context, tmplpath=None):
diff --git a/tests/pytests/integration/states/test_include.py b/tests/pytests/integration/states/test_include.py
new file mode 100644
index 00000000000..f814328c5e4
--- /dev/null
+++ b/tests/pytests/integration/states/test_include.py
@@ -0,0 +1,40 @@
+"""
+Integration tests for the jinja includes in states
+"""
+import logging
+
+import pytest
+
+log = logging.getLogger(__name__)
+
+
+@pytest.mark.slow_test
+def test_issue_64111(salt_master, salt_minion, salt_call_cli):
+ # This needs to be an integration test. A functional test does not trigger
+ # the issue fixed.
+
+ macros_jinja = """
+ {% macro a_jinja_macro(arg) -%}
+ {{ arg }}
+ {%- endmacro %}
+ """
+
+ init_sls = """
+ include:
+ - common.file1
+ """
+
+ file1_sls = """
+ {% from 'common/macros.jinja' import a_jinja_macro with context %}
+
+ a state id:
+ cmd.run:
+ - name: echo {{ a_jinja_macro("hello world") }}
+ """
+ tf = salt_master.state_tree.base.temp_file
+
+ with tf("common/macros.jinja", macros_jinja):
+ with tf("common/init.sls", init_sls):
+ with tf("common/file1.sls", file1_sls):
+ ret = salt_call_cli.run("state.apply", "common")
+ assert ret.returncode == 0
diff --git a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py
index 38c5ce5b724..e0f5fa158ff 100644
--- a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py
+++ b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py
@@ -15,7 +15,7 @@ import salt.utils.json # pylint: disable=unused-import
import salt.utils.stringutils # pylint: disable=unused-import
import salt.utils.yaml # pylint: disable=unused-import
from salt.utils.jinja import SaltCacheLoader
-from tests.support.mock import Mock, patch
+from tests.support.mock import Mock, call, patch
@pytest.fixture
@@ -224,14 +224,45 @@ def test_file_client_kwarg(minion_opts, mock_file_client):
assert loader._file_client is mock_file_client
-def test_cache_loader_shutdown(minion_opts, mock_file_client):
+def test_cache_loader_passed_file_client(minion_opts, mock_file_client):
"""
The shudown method can be called without raising an exception when the
file_client does not have a destroy method
"""
- assert not hasattr(mock_file_client, "destroy")
- mock_file_client.opts = minion_opts
- loader = SaltCacheLoader(minion_opts, _file_client=mock_file_client)
- assert loader._file_client is mock_file_client
- # Shutdown method should not raise any exceptions
- loader.shutdown()
+ # Test SaltCacheLoader creating and destroying the file client created
+ file_client = Mock()
+ with patch("salt.fileclient.get_file_client", return_value=file_client):
+ loader = SaltCacheLoader(minion_opts)
+ assert loader._file_client is None
+ with loader:
+ assert loader._file_client is file_client
+ assert loader._file_client is None
+ assert file_client.mock_calls == [call.destroy()]
+
+ # Test SaltCacheLoader reusing the file client passed
+ file_client = Mock()
+ file_client.opts = {"file_roots": minion_opts["file_roots"]}
+ with patch("salt.fileclient.get_file_client", return_value=Mock()):
+ loader = SaltCacheLoader(minion_opts, _file_client=file_client)
+ assert loader._file_client is file_client
+ with loader:
+ assert loader._file_client is file_client
+ assert loader._file_client is file_client
+ assert file_client.mock_calls == []
+
+ # Test SaltCacheLoader creating a client even though a file client was
+ # passed because the "file_roots" option is different, and, as such,
+ # the destroy method on the new file client is called, but not on the
+ # file client passed in.
+ file_client = Mock()
+ file_client.opts = {"file_roots": ""}
+ new_file_client = Mock()
+ with patch("salt.fileclient.get_file_client", return_value=new_file_client):
+ loader = SaltCacheLoader(minion_opts, _file_client=file_client)
+ assert loader._file_client is file_client
+ with loader:
+ assert loader._file_client is not file_client
+ assert loader._file_client is new_file_client
+ assert loader._file_client is None
+ assert file_client.mock_calls == []
+ assert new_file_client.mock_calls == [call.destroy()]
--
2.40.0

View File

@ -0,0 +1,480 @@
From c1408333364ac25ff5d316afa9674f7687217b0c Mon Sep 17 00:00:00 2001
From: Dominik Gedon <dgedon@suse.de>
Date: Thu, 3 Aug 2023 11:08:21 +0200
Subject: [PATCH] Mark Salt 3006 as released (#586)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* Mark Salt 3006 as released
Without this, commands like
```
salt '*' salt_version.equal 'Sulfur'
```
will not work properly and return False although Salt 3006 is used.
Signed-off-by: Dominik Gedon <dominik.gedon@suse.com>
* Fix detection of Salt codename by salt_version module
* Fix mess with version detection bad version definition
* Add some new and fix unit tests
* Fix SaltStackVersion string for new versions format
* Do not crash when passing numbers to 'salt_version.get_release_number'
* Fix salt_version execution module documentation
---------
Signed-off-by: Dominik Gedon <dominik.gedon@suse.com>
Co-authored-by: Pablo Suárez Hernández <psuarezhernandez@suse.com>
---
salt/modules/salt_version.py | 8 +-
salt/version.py | 218 +++++++++---------
.../pytests/unit/modules/test_salt_version.py | 55 ++++-
tests/pytests/unit/test_version.py | 10 +-
4 files changed, 176 insertions(+), 115 deletions(-)
diff --git a/salt/modules/salt_version.py b/salt/modules/salt_version.py
index 1b5421fee4..99dae5f61a 100644
--- a/salt/modules/salt_version.py
+++ b/salt/modules/salt_version.py
@@ -20,7 +20,7 @@ A simple example might be something like the following:
.. code-block:: jinja
{# a boolean check #}
- {% set option_deprecated = salt['salt_version.less_than']("3001") %}
+ {% set option_deprecated = salt['salt_version.less_than']("Sodium") %}
{% if option_deprecated %}
<use old syntax>
@@ -35,6 +35,7 @@ import logging
import salt.utils.versions
import salt.version
+from salt.exceptions import CommandExecutionError
log = logging.getLogger(__name__)
@@ -51,7 +52,7 @@ def __virtual__():
def get_release_number(name):
"""
Returns the release number of a given release code name in a
- ``MAJOR.PATCH`` format.
+ ``MAJOR.PATCH`` format (for Salt versions < 3000) or ``MAJOR`` for newer Salt versions.
If the release name has not been given an assigned release number, the
function returns a string. If the release cannot be found, it returns
@@ -66,6 +67,9 @@ def get_release_number(name):
salt '*' salt_version.get_release_number 'Oxygen'
"""
+ if not isinstance(name, str):
+ raise CommandExecutionError("'name' argument must be a string")
+
name = name.lower()
version_map = salt.version.SaltStackVersion.LNAMES
version = version_map.get(name)
diff --git a/salt/version.py b/salt/version.py
index 67719bd020..44372830b2 100644
--- a/salt/version.py
+++ b/salt/version.py
@@ -77,109 +77,109 @@ class SaltVersionsInfo(type):
ALUMINIUM = SaltVersion("Aluminium" , info=3003, released=True)
SILICON = SaltVersion("Silicon" , info=3004, released=True)
PHOSPHORUS = SaltVersion("Phosphorus" , info=3005, released=True)
- SULFUR = SaltVersion("Sulfur" , info=(3006, 0))
- CHLORINE = SaltVersion("Chlorine" , info=(3007, 0))
- ARGON = SaltVersion("Argon" , info=(3008, 0))
- POTASSIUM = SaltVersion("Potassium" , info=(3009, 0))
- CALCIUM = SaltVersion("Calcium" , info=(3010, 0))
- SCANDIUM = SaltVersion("Scandium" , info=(3011, 0))
- TITANIUM = SaltVersion("Titanium" , info=(3012, 0))
- VANADIUM = SaltVersion("Vanadium" , info=(3013, 0))
- CHROMIUM = SaltVersion("Chromium" , info=(3014, 0))
- MANGANESE = SaltVersion("Manganese" , info=(3015, 0))
- IRON = SaltVersion("Iron" , info=(3016, 0))
- COBALT = SaltVersion("Cobalt" , info=(3017, 0))
- NICKEL = SaltVersion("Nickel" , info=(3018, 0))
- COPPER = SaltVersion("Copper" , info=(3019, 0))
- ZINC = SaltVersion("Zinc" , info=(3020, 0))
- GALLIUM = SaltVersion("Gallium" , info=(3021, 0))
- GERMANIUM = SaltVersion("Germanium" , info=(3022, 0))
- ARSENIC = SaltVersion("Arsenic" , info=(3023, 0))
- SELENIUM = SaltVersion("Selenium" , info=(3024, 0))
- BROMINE = SaltVersion("Bromine" , info=(3025, 0))
- KRYPTON = SaltVersion("Krypton" , info=(3026, 0))
- RUBIDIUM = SaltVersion("Rubidium" , info=(3027, 0))
- STRONTIUM = SaltVersion("Strontium" , info=(3028, 0))
- YTTRIUM = SaltVersion("Yttrium" , info=(3029, 0))
- ZIRCONIUM = SaltVersion("Zirconium" , info=(3030, 0))
- NIOBIUM = SaltVersion("Niobium" , info=(3031, 0))
- MOLYBDENUM = SaltVersion("Molybdenum" , info=(3032, 0))
- TECHNETIUM = SaltVersion("Technetium" , info=(3033, 0))
- RUTHENIUM = SaltVersion("Ruthenium" , info=(3034, 0))
- RHODIUM = SaltVersion("Rhodium" , info=(3035, 0))
- PALLADIUM = SaltVersion("Palladium" , info=(3036, 0))
- SILVER = SaltVersion("Silver" , info=(3037, 0))
- CADMIUM = SaltVersion("Cadmium" , info=(3038, 0))
- INDIUM = SaltVersion("Indium" , info=(3039, 0))
- TIN = SaltVersion("Tin" , info=(3040, 0))
- ANTIMONY = SaltVersion("Antimony" , info=(3041, 0))
- TELLURIUM = SaltVersion("Tellurium" , info=(3042, 0))
- IODINE = SaltVersion("Iodine" , info=(3043, 0))
- XENON = SaltVersion("Xenon" , info=(3044, 0))
- CESIUM = SaltVersion("Cesium" , info=(3045, 0))
- BARIUM = SaltVersion("Barium" , info=(3046, 0))
- LANTHANUM = SaltVersion("Lanthanum" , info=(3047, 0))
- CERIUM = SaltVersion("Cerium" , info=(3048, 0))
- PRASEODYMIUM = SaltVersion("Praseodymium" , info=(3049, 0))
- NEODYMIUM = SaltVersion("Neodymium" , info=(3050, 0))
- PROMETHIUM = SaltVersion("Promethium" , info=(3051, 0))
- SAMARIUM = SaltVersion("Samarium" , info=(3052, 0))
- EUROPIUM = SaltVersion("Europium" , info=(3053, 0))
- GADOLINIUM = SaltVersion("Gadolinium" , info=(3054, 0))
- TERBIUM = SaltVersion("Terbium" , info=(3055, 0))
- DYSPROSIUM = SaltVersion("Dysprosium" , info=(3056, 0))
- HOLMIUM = SaltVersion("Holmium" , info=(3057, 0))
- ERBIUM = SaltVersion("Erbium" , info=(3058, 0))
- THULIUM = SaltVersion("Thulium" , info=(3059, 0))
- YTTERBIUM = SaltVersion("Ytterbium" , info=(3060, 0))
- LUTETIUM = SaltVersion("Lutetium" , info=(3061, 0))
- HAFNIUM = SaltVersion("Hafnium" , info=(3062, 0))
- TANTALUM = SaltVersion("Tantalum" , info=(3063, 0))
- TUNGSTEN = SaltVersion("Tungsten" , info=(3064, 0))
- RHENIUM = SaltVersion("Rhenium" , info=(3065, 0))
- OSMIUM = SaltVersion("Osmium" , info=(3066, 0))
- IRIDIUM = SaltVersion("Iridium" , info=(3067, 0))
- PLATINUM = SaltVersion("Platinum" , info=(3068, 0))
- GOLD = SaltVersion("Gold" , info=(3069, 0))
- MERCURY = SaltVersion("Mercury" , info=(3070, 0))
- THALLIUM = SaltVersion("Thallium" , info=(3071, 0))
- LEAD = SaltVersion("Lead" , info=(3072, 0))
- BISMUTH = SaltVersion("Bismuth" , info=(3073, 0))
- POLONIUM = SaltVersion("Polonium" , info=(3074, 0))
- ASTATINE = SaltVersion("Astatine" , info=(3075, 0))
- RADON = SaltVersion("Radon" , info=(3076, 0))
- FRANCIUM = SaltVersion("Francium" , info=(3077, 0))
- RADIUM = SaltVersion("Radium" , info=(3078, 0))
- ACTINIUM = SaltVersion("Actinium" , info=(3079, 0))
- THORIUM = SaltVersion("Thorium" , info=(3080, 0))
- PROTACTINIUM = SaltVersion("Protactinium" , info=(3081, 0))
- URANIUM = SaltVersion("Uranium" , info=(3082, 0))
- NEPTUNIUM = SaltVersion("Neptunium" , info=(3083, 0))
- PLUTONIUM = SaltVersion("Plutonium" , info=(3084, 0))
- AMERICIUM = SaltVersion("Americium" , info=(3085, 0))
- CURIUM = SaltVersion("Curium" , info=(3086, 0))
- BERKELIUM = SaltVersion("Berkelium" , info=(3087, 0))
- CALIFORNIUM = SaltVersion("Californium" , info=(3088, 0))
- EINSTEINIUM = SaltVersion("Einsteinium" , info=(3089, 0))
- FERMIUM = SaltVersion("Fermium" , info=(3090, 0))
- MENDELEVIUM = SaltVersion("Mendelevium" , info=(3091, 0))
- NOBELIUM = SaltVersion("Nobelium" , info=(3092, 0))
- LAWRENCIUM = SaltVersion("Lawrencium" , info=(3093, 0))
- RUTHERFORDIUM = SaltVersion("Rutherfordium", info=(3094, 0))
- DUBNIUM = SaltVersion("Dubnium" , info=(3095, 0))
- SEABORGIUM = SaltVersion("Seaborgium" , info=(3096, 0))
- BOHRIUM = SaltVersion("Bohrium" , info=(3097, 0))
- HASSIUM = SaltVersion("Hassium" , info=(3098, 0))
- MEITNERIUM = SaltVersion("Meitnerium" , info=(3099, 0))
- DARMSTADTIUM = SaltVersion("Darmstadtium" , info=(3100, 0))
- ROENTGENIUM = SaltVersion("Roentgenium" , info=(3101, 0))
- COPERNICIUM = SaltVersion("Copernicium" , info=(3102, 0))
- NIHONIUM = SaltVersion("Nihonium" , info=(3103, 0))
- FLEROVIUM = SaltVersion("Flerovium" , info=(3104, 0))
- MOSCOVIUM = SaltVersion("Moscovium" , info=(3105, 0))
- LIVERMORIUM = SaltVersion("Livermorium" , info=(3106, 0))
- TENNESSINE = SaltVersion("Tennessine" , info=(3107, 0))
- OGANESSON = SaltVersion("Oganesson" , info=(3108, 0))
+ SULFUR = SaltVersion("Sulfur" , info=3006, released=True)
+ CHLORINE = SaltVersion("Chlorine" , info=3007)
+ ARGON = SaltVersion("Argon" , info=3008)
+ POTASSIUM = SaltVersion("Potassium" , info=3009)
+ CALCIUM = SaltVersion("Calcium" , info=3010)
+ SCANDIUM = SaltVersion("Scandium" , info=3011)
+ TITANIUM = SaltVersion("Titanium" , info=3012)
+ VANADIUM = SaltVersion("Vanadium" , info=3013)
+ CHROMIUM = SaltVersion("Chromium" , info=3014)
+ MANGANESE = SaltVersion("Manganese" , info=3015)
+ IRON = SaltVersion("Iron" , info=3016)
+ COBALT = SaltVersion("Cobalt" , info=3017)
+ NICKEL = SaltVersion("Nickel" , info=3018)
+ COPPER = SaltVersion("Copper" , info=3019)
+ ZINC = SaltVersion("Zinc" , info=3020)
+ GALLIUM = SaltVersion("Gallium" , info=3021)
+ GERMANIUM = SaltVersion("Germanium" , info=3022)
+ ARSENIC = SaltVersion("Arsenic" , info=3023)
+ SELENIUM = SaltVersion("Selenium" , info=3024)
+ BROMINE = SaltVersion("Bromine" , info=3025)
+ KRYPTON = SaltVersion("Krypton" , info=3026)
+ RUBIDIUM = SaltVersion("Rubidium" , info=3027)
+ STRONTIUM = SaltVersion("Strontium" , info=3028)
+ YTTRIUM = SaltVersion("Yttrium" , info=3029)
+ ZIRCONIUM = SaltVersion("Zirconium" , info=3030)
+ NIOBIUM = SaltVersion("Niobium" , info=3031)
+ MOLYBDENUM = SaltVersion("Molybdenum" , info=3032)
+ TECHNETIUM = SaltVersion("Technetium" , info=3033)
+ RUTHENIUM = SaltVersion("Ruthenium" , info=3034)
+ RHODIUM = SaltVersion("Rhodium" , info=3035)
+ PALLADIUM = SaltVersion("Palladium" , info=3036)
+ SILVER = SaltVersion("Silver" , info=3037)
+ CADMIUM = SaltVersion("Cadmium" , info=3038)
+ INDIUM = SaltVersion("Indium" , info=3039)
+ TIN = SaltVersion("Tin" , info=3040)
+ ANTIMONY = SaltVersion("Antimony" , info=3041)
+ TELLURIUM = SaltVersion("Tellurium" , info=3042)
+ IODINE = SaltVersion("Iodine" , info=3043)
+ XENON = SaltVersion("Xenon" , info=3044)
+ CESIUM = SaltVersion("Cesium" , info=3045)
+ BARIUM = SaltVersion("Barium" , info=3046)
+ LANTHANUM = SaltVersion("Lanthanum" , info=3047)
+ CERIUM = SaltVersion("Cerium" , info=3048)
+ PRASEODYMIUM = SaltVersion("Praseodymium" , info=3049)
+ NEODYMIUM = SaltVersion("Neodymium" , info=3050)
+ PROMETHIUM = SaltVersion("Promethium" , info=3051)
+ SAMARIUM = SaltVersion("Samarium" , info=3052)
+ EUROPIUM = SaltVersion("Europium" , info=3053)
+ GADOLINIUM = SaltVersion("Gadolinium" , info=3054)
+ TERBIUM = SaltVersion("Terbium" , info=3055)
+ DYSPROSIUM = SaltVersion("Dysprosium" , info=3056)
+ HOLMIUM = SaltVersion("Holmium" , info=3057)
+ ERBIUM = SaltVersion("Erbium" , info=3058)
+ THULIUM = SaltVersion("Thulium" , info=3059)
+ YTTERBIUM = SaltVersion("Ytterbium" , info=3060)
+ LUTETIUM = SaltVersion("Lutetium" , info=3061)
+ HAFNIUM = SaltVersion("Hafnium" , info=3062)
+ TANTALUM = SaltVersion("Tantalum" , info=3063)
+ TUNGSTEN = SaltVersion("Tungsten" , info=3064)
+ RHENIUM = SaltVersion("Rhenium" , info=3065)
+ OSMIUM = SaltVersion("Osmium" , info=3066)
+ IRIDIUM = SaltVersion("Iridium" , info=3067)
+ PLATINUM = SaltVersion("Platinum" , info=3068)
+ GOLD = SaltVersion("Gold" , info=3069)
+ MERCURY = SaltVersion("Mercury" , info=3070)
+ THALLIUM = SaltVersion("Thallium" , info=3071)
+ LEAD = SaltVersion("Lead" , info=3072)
+ BISMUTH = SaltVersion("Bismuth" , info=3073)
+ POLONIUM = SaltVersion("Polonium" , info=3074)
+ ASTATINE = SaltVersion("Astatine" , info=3075)
+ RADON = SaltVersion("Radon" , info=3076)
+ FRANCIUM = SaltVersion("Francium" , info=3077)
+ RADIUM = SaltVersion("Radium" , info=3078)
+ ACTINIUM = SaltVersion("Actinium" , info=3079)
+ THORIUM = SaltVersion("Thorium" , info=3080)
+ PROTACTINIUM = SaltVersion("Protactinium" , info=3081)
+ URANIUM = SaltVersion("Uranium" , info=3082)
+ NEPTUNIUM = SaltVersion("Neptunium" , info=3083)
+ PLUTONIUM = SaltVersion("Plutonium" , info=3084)
+ AMERICIUM = SaltVersion("Americium" , info=3085)
+ CURIUM = SaltVersion("Curium" , info=3086)
+ BERKELIUM = SaltVersion("Berkelium" , info=3087)
+ CALIFORNIUM = SaltVersion("Californium" , info=3088)
+ EINSTEINIUM = SaltVersion("Einsteinium" , info=3089)
+ FERMIUM = SaltVersion("Fermium" , info=3090)
+ MENDELEVIUM = SaltVersion("Mendelevium" , info=3091)
+ NOBELIUM = SaltVersion("Nobelium" , info=3092)
+ LAWRENCIUM = SaltVersion("Lawrencium" , info=3093)
+ RUTHERFORDIUM = SaltVersion("Rutherfordium", info=3094)
+ DUBNIUM = SaltVersion("Dubnium" , info=3095)
+ SEABORGIUM = SaltVersion("Seaborgium" , info=3096)
+ BOHRIUM = SaltVersion("Bohrium" , info=3097)
+ HASSIUM = SaltVersion("Hassium" , info=3098)
+ MEITNERIUM = SaltVersion("Meitnerium" , info=3099)
+ DARMSTADTIUM = SaltVersion("Darmstadtium" , info=3100)
+ ROENTGENIUM = SaltVersion("Roentgenium" , info=3101)
+ COPERNICIUM = SaltVersion("Copernicium" , info=3102)
+ NIHONIUM = SaltVersion("Nihonium" , info=3103)
+ FLEROVIUM = SaltVersion("Flerovium" , info=3104)
+ MOSCOVIUM = SaltVersion("Moscovium" , info=3105)
+ LIVERMORIUM = SaltVersion("Livermorium" , info=3106)
+ TENNESSINE = SaltVersion("Tennessine" , info=3107)
+ OGANESSON = SaltVersion("Oganesson" , info=3108)
# <---- Please refrain from fixing whitespace -----------------------------------
# The idea is to keep this readable.
# -------------------------------------------------------------------------------
@@ -323,9 +323,7 @@ class SaltStackVersion:
self.mbugfix = mbugfix
self.pre_type = pre_type
self.pre_num = pre_num
- if self.can_have_dot_zero(major):
- vnames_key = (major, 0)
- elif self.new_version(major):
+ if self.new_version(major):
vnames_key = (major,)
else:
vnames_key = (major, minor)
@@ -476,8 +474,12 @@ class SaltStackVersion:
version_string = self.string
if self.sse:
version_string += " Enterprise"
- if (self.major, self.minor) in self.RMATCH:
- version_string += " ({})".format(self.RMATCH[(self.major, self.minor)])
+ if self.new_version(self.major):
+ rmatch_key = (self.major,)
+ else:
+ rmatch_key = (self.major, self.minor)
+ if rmatch_key in self.RMATCH:
+ version_string += " ({})".format(self.RMATCH[rmatch_key])
return version_string
@property
diff --git a/tests/pytests/unit/modules/test_salt_version.py b/tests/pytests/unit/modules/test_salt_version.py
index 6d734f6a76..4b7a7cd073 100644
--- a/tests/pytests/unit/modules/test_salt_version.py
+++ b/tests/pytests/unit/modules/test_salt_version.py
@@ -2,8 +2,11 @@
Unit tests for salt/modules/salt_version.py
"""
+import pytest
+
import salt.modules.salt_version as salt_version
import salt.version
+from salt.exceptions import CommandExecutionError
from tests.support.mock import MagicMock, patch
@@ -21,7 +24,7 @@ def test_mocked_objects():
for k, v in salt.version.SaltStackVersion.LNAMES.items():
assert k == k.lower()
assert isinstance(v, tuple)
- if sv.new_version(major=v[0]) and not sv.can_have_dot_zero(major=v[0]):
+ if sv.new_version(major=v[0]):
assert len(v) == 1
else:
assert len(v) == 2
@@ -64,6 +67,13 @@ def test_get_release_number_success_new_version():
assert salt_version.get_release_number("Neon") == "3000"
+def test_get_release_number_success_new_version_with_dot():
+ """
+ Test that a version is returned for new versioning (3006)
+ """
+ assert salt_version.get_release_number("Sulfur") == "3006"
+
+
def test_equal_success():
"""
Test that the current version is equal to the codename
@@ -83,6 +93,16 @@ def test_equal_success_new_version():
assert salt_version.equal("foo") is True
+def test_equal_success_new_version_with_dot():
+ """
+ Test that the current version is equal to the codename
+ while using the new versioning
+ """
+ with patch("salt.version.SaltStackVersion", MagicMock(return_value="3006.1")):
+ with patch("salt.version.SaltStackVersion.LNAMES", {"foo": (3006,)}):
+ assert salt_version.equal("foo") is True
+
+
def test_equal_older_codename():
"""
Test that when an older codename is passed in, the function returns False.
@@ -142,6 +162,17 @@ def test_greater_than_success_new_version():
assert salt_version.greater_than("Nitrogen") is True
+def test_greater_than_success_new_version_with_dot():
+ """
+ Test that the current version is newer than the codename
+ """
+ with patch(
+ "salt.modules.salt_version.get_release_number", MagicMock(return_value="3000")
+ ):
+ with patch("salt.version.SaltStackVersion", MagicMock(return_value="3006.0")):
+ assert salt_version.greater_than("Neon") is True
+
+
def test_greater_than_with_equal_codename():
"""
Test that when an equal codename is passed in, the function returns False.
@@ -200,6 +231,28 @@ def test_less_than_success_new_version():
assert salt_version.less_than("Fluorine") is True
+def test_less_than_success_new_version_with_dot():
+ """
+ Test that when a newer codename is passed in, the function returns True
+ using new version
+ """
+ with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
+ with patch(
+ "salt.modules.salt_version.get_release_number",
+ MagicMock(return_value="3006"),
+ ):
+ assert salt_version.less_than("Fluorine") is True
+
+
+def test_less_than_do_not_crash_when_input_is_a_number():
+ """
+ Test that less_than do not crash when unexpected inputs
+ """
+ with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")):
+ with pytest.raises(CommandExecutionError):
+ salt_version.less_than(1234)
+
+
def test_less_than_with_equal_codename():
"""
Test that when an equal codename is passed in, the function returns False.
diff --git a/tests/pytests/unit/test_version.py b/tests/pytests/unit/test_version.py
index 73befea4cf..1cb94c619c 100644
--- a/tests/pytests/unit/test_version.py
+++ b/tests/pytests/unit/test_version.py
@@ -187,7 +187,7 @@ def test_string_new_version_minor():
ver = SaltStackVersion(major=maj_ver, minor=min_ver)
assert ver.minor == min_ver
assert not ver.bugfix
- assert ver.string == "{}.{}".format(maj_ver, min_ver)
+ assert ver.string == f"{maj_ver}.{min_ver}"
def test_string_new_version_minor_as_string():
@@ -201,13 +201,13 @@ def test_string_new_version_minor_as_string():
ver = SaltStackVersion(major=maj_ver, minor=min_ver)
assert ver.minor == int(min_ver)
assert not ver.bugfix
- assert ver.string == "{}.{}".format(maj_ver, min_ver)
+ assert ver.string == f"{maj_ver}.{min_ver}"
# This only seems to happen on a cloned repo without its tags
maj_ver = "3000"
min_ver = ""
ver = SaltStackVersion(major=maj_ver, minor=min_ver)
- assert ver.minor is None, "{!r} is not {!r}".format(ver.minor, min_ver)
+ assert ver.minor is None, f"{ver.minor!r} is not {min_ver!r}"
assert not ver.bugfix
assert ver.string == maj_ver
@@ -222,7 +222,7 @@ def test_string_old_version():
min_ver = "2"
ver = SaltStackVersion(major=maj_ver, minor=min_ver)
assert ver.bugfix == 0
- assert ver.string == "{}.{}.0".format(maj_ver, min_ver)
+ assert ver.string == f"{maj_ver}.{min_ver}.0"
@pytest.mark.parametrize(
@@ -537,6 +537,8 @@ def test_versions_report_no_extensions_available():
("3000.1", "3000.1", "Neon"),
("3005", "3005", "Phosphorus"),
("3006", "3006.0", "Sulfur"),
+ ("3006.0", "3006.0", "Sulfur"),
+ ("3006.1", "3006.1", "Sulfur"),
("3015.1", "3015.1", "Manganese"),
("3109.3", "3109.3", None),
],
--
2.41.0

View File

@ -0,0 +1,31 @@
From b76b74bd9640adf3b6798e4de4b89aaa7af62c9f Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Mon, 2 Oct 2023 13:24:43 +0200
Subject: [PATCH] Only call native_str on curl_debug message in tornado
when needed
Co-authored-by: Ben Darnell <ben@bendarnell.com>
---
salt/ext/tornado/curl_httpclient.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/salt/ext/tornado/curl_httpclient.py b/salt/ext/tornado/curl_httpclient.py
index 8652343cf7..9e4133fd13 100644
--- a/salt/ext/tornado/curl_httpclient.py
+++ b/salt/ext/tornado/curl_httpclient.py
@@ -494,10 +494,11 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
def _curl_debug(self, debug_type, debug_msg):
debug_types = ('I', '<', '>', '<', '>')
- debug_msg = native_str(debug_msg)
if debug_type == 0:
+ debug_msg = native_str(debug_msg)
curl_log.debug('%s', debug_msg.strip())
elif debug_type in (1, 2):
+ debug_msg = native_str(debug_msg)
for line in debug_msg.splitlines():
curl_log.debug('%s %s', debug_types[debug_type], line)
elif debug_type == 4:
--
2.42.0

View File

@ -0,0 +1,276 @@
From bd671b53de8933732e2108624d7dfb6f9b183f38 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <Victor.Zhestkov@suse.com>
Date: Fri, 28 Oct 2022 13:20:13 +0300
Subject: [PATCH] Pass the context to pillar ext modules
* Pass __context__ to ext pillar
* Add test for passing the context to pillar ext module
* Align the test and pillar to prevent failing test
---
salt/master.py | 7 ++-
salt/pillar/__init__.py | 16 +++++-
tests/pytests/unit/test_master.py | 91 ++++++++++++++++++++++++++++++-
3 files changed, 108 insertions(+), 6 deletions(-)
diff --git a/salt/master.py b/salt/master.py
index a0552fa232..da1eb8cef5 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -964,6 +964,7 @@ class MWorker(salt.utils.process.SignalHandlingProcess):
self.k_mtime = 0
self.stats = collections.defaultdict(lambda: {"mean": 0, "runs": 0})
self.stat_clock = time.time()
+ self.context = {}
# We need __setstate__ and __getstate__ to also pickle 'SMaster.secrets'.
# Otherwise, 'SMaster.secrets' won't be copied over to the spawned process
@@ -1151,7 +1152,7 @@ class MWorker(salt.utils.process.SignalHandlingProcess):
self.key,
)
self.clear_funcs.connect()
- self.aes_funcs = AESFuncs(self.opts)
+ self.aes_funcs = AESFuncs(self.opts, context=self.context)
salt.utils.crypt.reinit_crypto()
self.__bind()
@@ -1214,7 +1215,7 @@ class AESFuncs(TransportMethods):
"_file_envs",
)
- def __init__(self, opts):
+ def __init__(self, opts, context=None):
"""
Create a new AESFuncs
@@ -1224,6 +1225,7 @@ class AESFuncs(TransportMethods):
:returns: Instance for handling AES operations
"""
self.opts = opts
+ self.context = context
self.event = salt.utils.event.get_master_event(
self.opts, self.opts["sock_dir"], listen=False
)
@@ -1611,6 +1613,7 @@ class AESFuncs(TransportMethods):
pillarenv=load.get("pillarenv"),
extra_minion_data=load.get("extra_minion_data"),
clean_cache=load.get("clean_cache"),
+ context=self.context,
)
data = pillar.compile_pillar()
self.fs_.update_opts()
diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py
index 5a3f5388b4..0dfab4cc57 100644
--- a/salt/pillar/__init__.py
+++ b/salt/pillar/__init__.py
@@ -46,6 +46,7 @@ def get_pillar(
pillarenv=None,
extra_minion_data=None,
clean_cache=False,
+ context=None,
):
"""
Return the correct pillar driver based on the file_client option
@@ -82,6 +83,7 @@ def get_pillar(
pillarenv=pillarenv,
clean_cache=clean_cache,
extra_minion_data=extra_minion_data,
+ context=context,
)
return ptype(
opts,
@@ -93,6 +95,7 @@ def get_pillar(
pillar_override=pillar_override,
pillarenv=pillarenv,
extra_minion_data=extra_minion_data,
+ context=context,
)
@@ -281,7 +284,7 @@ class AsyncRemotePillar(RemotePillarMixin):
raise salt.ext.tornado.gen.Return(ret_pillar)
def destroy(self):
- if self._closing:
+ if hasattr(self, "_closing") and self._closing:
return
self._closing = True
@@ -310,6 +313,7 @@ class RemotePillar(RemotePillarMixin):
pillar_override=None,
pillarenv=None,
extra_minion_data=None,
+ context=None,
):
self.opts = opts
self.opts["saltenv"] = saltenv
@@ -334,6 +338,7 @@ class RemotePillar(RemotePillarMixin):
merge_lists=True,
)
self._closing = False
+ self.context = context
def compile_pillar(self):
"""
@@ -407,6 +412,7 @@ class PillarCache:
pillarenv=None,
extra_minion_data=None,
clean_cache=False,
+ context=None,
):
# Yes, we need all of these because we need to route to the Pillar object
# if we have no cache. This is another refactor target.
@@ -434,6 +440,8 @@ class PillarCache:
minion_cache_path=self._minion_cache_path(minion_id),
)
+ self.context = context
+
def _minion_cache_path(self, minion_id):
"""
Return the path to the cache file for the minion.
@@ -458,6 +466,7 @@ class PillarCache:
pillar_override=self.pillar_override,
pillarenv=self.pillarenv,
extra_minion_data=self.extra_minion_data,
+ context=self.context,
)
return fresh_pillar.compile_pillar()
@@ -533,6 +542,7 @@ class Pillar:
pillar_override=None,
pillarenv=None,
extra_minion_data=None,
+ context=None,
):
self.minion_id = minion_id
self.ext = ext
@@ -571,7 +581,7 @@ class Pillar:
if opts.get("pillar_source_merging_strategy"):
self.merge_strategy = opts["pillar_source_merging_strategy"]
- self.ext_pillars = salt.loader.pillars(ext_pillar_opts, self.functions)
+ self.ext_pillars = salt.loader.pillars(ext_pillar_opts, self.functions, context=context)
self.ignored_pillars = {}
self.pillar_override = pillar_override or {}
if not isinstance(self.pillar_override, dict):
@@ -1338,7 +1348,7 @@ class Pillar:
"""
This method exist in order to be API compatible with RemotePillar
"""
- if self._closing:
+ if hasattr(self, "_closing") and self._closing:
return
self._closing = True
diff --git a/tests/pytests/unit/test_master.py b/tests/pytests/unit/test_master.py
index cd11d217c7..98c796912a 100644
--- a/tests/pytests/unit/test_master.py
+++ b/tests/pytests/unit/test_master.py
@@ -4,7 +4,7 @@ import pytest
import salt.master
import salt.utils.platform
-from tests.support.mock import patch
+from tests.support.mock import MagicMock, patch
@pytest.fixture
@@ -160,3 +160,92 @@ def test_when_syndic_return_processes_load_then_correct_values_should_be_returne
with patch.object(encrypted_requests, "_return", autospec=True) as fake_return:
encrypted_requests._syndic_return(payload)
fake_return.assert_called_with(expected_return)
+
+
+def test_mworker_pass_context():
+ """
+ Test of passing the __context__ to pillar ext module loader
+ """
+ req_channel_mock = MagicMock()
+ local_client_mock = MagicMock()
+
+ opts = {
+ "req_server_niceness": None,
+ "mworker_niceness": None,
+ "sock_dir": "/tmp",
+ "conf_file": "/tmp/fake_conf",
+ "transport": "zeromq",
+ "fileserver_backend": ["roots"],
+ "file_client": "local",
+ "pillar_cache": False,
+ "state_top": "top.sls",
+ "pillar_roots": {},
+ }
+
+ data = {
+ "id": "MINION_ID",
+ "grains": {},
+ "saltenv": None,
+ "pillarenv": None,
+ "pillar_override": {},
+ "extra_minion_data": {},
+ "ver": "2",
+ "cmd": "_pillar",
+ }
+
+ test_context = {"testing": 123}
+
+ def mworker_bind_mock():
+ mworker.aes_funcs.run_func(data["cmd"], data)
+
+ with patch("salt.client.get_local_client", local_client_mock), patch(
+ "salt.master.ClearFuncs", MagicMock()
+ ), patch("salt.minion.MasterMinion", MagicMock()), patch(
+ "salt.utils.verify.valid_id", return_value=True
+ ), patch(
+ "salt.loader.matchers", MagicMock()
+ ), patch(
+ "salt.loader.render", MagicMock()
+ ), patch(
+ "salt.loader.utils", MagicMock()
+ ), patch(
+ "salt.loader.fileserver", MagicMock()
+ ), patch(
+ "salt.loader.minion_mods", MagicMock()
+ ), patch(
+ "salt.loader._module_dirs", MagicMock()
+ ), patch(
+ "salt.loader.LazyLoader", MagicMock()
+ ) as loadler_pillars_mock:
+ mworker = salt.master.MWorker(opts, {}, {}, [req_channel_mock])
+
+ with patch.object(mworker, "_MWorker__bind", mworker_bind_mock), patch.dict(
+ mworker.context, test_context
+ ):
+ mworker.run()
+ assert (
+ loadler_pillars_mock.call_args_list[0][1].get("pack").get("__context__")
+ == test_context
+ )
+
+ loadler_pillars_mock.reset_mock()
+
+ opts.update(
+ {
+ "pillar_cache": True,
+ "pillar_cache_backend": "file",
+ "pillar_cache_ttl": 1000,
+ "cachedir": "/tmp",
+ }
+ )
+
+ mworker = salt.master.MWorker(opts, {}, {}, [req_channel_mock])
+
+ with patch.object(mworker, "_MWorker__bind", mworker_bind_mock), patch.dict(
+ mworker.context, test_context
+ ), patch("salt.utils.cache.CacheFactory.factory", MagicMock()):
+ mworker.run()
+ assert (
+ loadler_pillars_mock.call_args_list[0][1].get("pack").get("__context__")
+ == test_context
+ )
--
2.39.2

View File

@ -0,0 +1,135 @@
From 107de57586f0b0f784771543b942dfb6bb70453a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Yeray=20Guti=C3=A9rrez=20Cedr=C3=A9s?=
<yeray.gutierrez@suse.com>
Date: Wed, 13 Dec 2023 11:03:45 +0000
Subject: [PATCH] Prefer unittest.mock for Python versions that are
sufficient
---
requirements/pytest.txt | 2 +-
.../unit/cloud/clouds/test_dimensiondata.py | 4 +-
tests/pytests/unit/cloud/clouds/test_gce.py | 4 +-
tests/support/mock.py | 48 +++++++++----------
4 files changed, 25 insertions(+), 33 deletions(-)
diff --git a/requirements/pytest.txt b/requirements/pytest.txt
index 5b67583a3d..0bead83f5b 100644
--- a/requirements/pytest.txt
+++ b/requirements/pytest.txt
@@ -1,4 +1,4 @@
-mock >= 3.0.0
+mock >= 3.0.0; python_version < '3.8'
# PyTest
pytest >= 7.0.1; python_version <= "3.6"
pytest >= 7.2.0; python_version > "3.6"
diff --git a/tests/pytests/unit/cloud/clouds/test_dimensiondata.py b/tests/pytests/unit/cloud/clouds/test_dimensiondata.py
index e196805004..aab2e686f2 100644
--- a/tests/pytests/unit/cloud/clouds/test_dimensiondata.py
+++ b/tests/pytests/unit/cloud/clouds/test_dimensiondata.py
@@ -11,7 +11,6 @@ from salt.cloud.clouds import dimensiondata
from salt.exceptions import SaltCloudSystemExit
from salt.utils.versions import Version
from tests.support.mock import MagicMock
-from tests.support.mock import __version__ as mock_version
from tests.support.mock import patch
try:
@@ -144,8 +143,7 @@ def test_import():
with patch("salt.config.check_driver_dependencies", return_value=True) as p:
get_deps = dimensiondata.get_dependencies()
assert get_deps is True
- if Version(mock_version) >= Version("2.0.0"):
- assert p.call_count >= 1
+ assert p.call_count >= 1
def test_provider_matches():
diff --git a/tests/pytests/unit/cloud/clouds/test_gce.py b/tests/pytests/unit/cloud/clouds/test_gce.py
index 265818016e..ec1346a978 100644
--- a/tests/pytests/unit/cloud/clouds/test_gce.py
+++ b/tests/pytests/unit/cloud/clouds/test_gce.py
@@ -13,7 +13,6 @@ from salt.cloud.clouds import gce
from salt.exceptions import SaltCloudSystemExit
from salt.utils.versions import Version
from tests.support.mock import MagicMock
-from tests.support.mock import __version__ as mock_version
from tests.support.mock import call, patch
@@ -281,8 +280,7 @@ def test_import():
with patch("salt.config.check_driver_dependencies", return_value=True) as p:
get_deps = gce.get_dependencies()
assert get_deps is True
- if Version(mock_version) >= Version("2.0.0"):
- p.assert_called_once()
+ p.assert_called_once()
@pytest.mark.parametrize(
diff --git a/tests/support/mock.py b/tests/support/mock.py
index 2256ad8f5d..59e5fcbc8e 100644
--- a/tests/support/mock.py
+++ b/tests/support/mock.py
@@ -18,37 +18,33 @@ import copy
import errno
import fnmatch
import sys
-
-# By these days, we should blowup if mock is not available
-import mock # pylint: disable=blacklisted-external-import
-
-# pylint: disable=no-name-in-module,no-member
-from mock import (
- ANY,
- DEFAULT,
- FILTER_DIR,
- MagicMock,
- Mock,
- NonCallableMagicMock,
- NonCallableMock,
- PropertyMock,
- __version__,
- call,
- create_autospec,
- patch,
- sentinel,
-)
+import importlib
+
+current_version = (sys.version_info.major, sys.version_info.minor)
+
+# Prefer unittest.mock for Python versions that are sufficient
+if current_version >= (3,8):
+ mock = importlib.import_module('unittest.mock')
+else:
+ mock = importlib.import_module('mock')
+
+ANY = mock.ANY
+DEFAULT = mock.DEFAULT
+FILTER_DIR = mock.FILTER_DIR
+MagicMock = mock.MagicMock
+Mock = mock.Mock
+NonCallableMagicMock = mock.NonCallableMagicMock
+NonCallableMock = mock.NonCallableMock
+PropertyMock = mock.PropertyMock
+call = mock.call
+create_autospec = mock.create_autospec
+patch = mock.patch
+sentinel = mock.sentinel
import salt.utils.stringutils
# pylint: disable=no-name-in-module,no-member
-
-__mock_version = tuple(
- int(part) for part in mock.__version__.split(".") if part.isdigit()
-) # pylint: disable=no-member
-
-
class MockFH:
def __init__(self, filename, read_data, *args, **kwargs):
self.filename = filename
--
2.41.0

View File

@ -0,0 +1,240 @@
From 90236c844cfce7da8beb7a570be19a8677c60820 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Tue, 12 Apr 2022 10:06:43 +0300
Subject: [PATCH] Prevent affection of SSH.opts with LazyLoader
(bsc#1197637)
* Prevent affection SSH.opts with LazyLoader
* Restore parsed targets
* Fix test_ssh unit tests
Adjust unit tests
---
salt/client/ssh/__init__.py | 19 +++++++++-------
.../pytests/unit/client/ssh/test_password.py | 4 +++-
.../unit/client/ssh/test_return_events.py | 2 +-
tests/pytests/unit/client/ssh/test_ssh.py | 22 +++++++++----------
4 files changed, 26 insertions(+), 21 deletions(-)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index a527c03de6..d5a679821e 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -224,15 +224,16 @@ class SSH(MultiprocessingStateMixin):
ROSTER_UPDATE_FLAG = "#__needs_update"
def __init__(self, opts, context=None):
+ self.opts = copy.deepcopy(opts)
+ self.sopts = copy.deepcopy(self.opts)
self.__parsed_rosters = {SSH.ROSTER_UPDATE_FLAG: True}
- pull_sock = os.path.join(opts["sock_dir"], "master_event_pull.ipc")
+ pull_sock = os.path.join(self.opts["sock_dir"], "master_event_pull.ipc")
if os.path.exists(pull_sock) and zmq:
self.event = salt.utils.event.get_event(
- "master", opts["sock_dir"], opts=opts, listen=False
+ "master", self.opts["sock_dir"], opts=self.opts, listen=False
)
else:
self.event = None
- self.opts = opts
if self.opts["regen_thin"]:
self.opts["ssh_wipe"] = True
if not salt.utils.path.which("ssh"):
@@ -243,7 +244,7 @@ class SSH(MultiprocessingStateMixin):
" to run. Exiting."
),
)
- self.opts["_ssh_version"] = ssh_version()
+ self.sopts["_ssh_version"] = ssh_version()
self.tgt_type = (
self.opts["selected_target_option"]
if self.opts["selected_target_option"]
@@ -339,6 +340,9 @@ class SSH(MultiprocessingStateMixin):
self.opts["cachedir"], "salt-ssh.session.lock"
)
self.ssh_session_grace_time = int(self.opts.get("ssh_session_grace_time", 1))
+ self.sopts["tgt"] = copy.deepcopy(self.opts["tgt"])
+ self.sopts["ssh_cli_tgt"] = copy.deepcopy(self.opts["ssh_cli_tgt"])
+ self.opts = self.sopts
# __setstate__ and __getstate__ are only used on spawning platforms.
def __setstate__(self, state):
@@ -607,7 +611,6 @@ class SSH(MultiprocessingStateMixin):
Spin up the needed threads or processes and execute the subsequent
routines
"""
- opts = copy.deepcopy(self.opts)
que = multiprocessing.Queue()
running = {}
targets_queue = deque(self.targets.keys())
@@ -618,7 +621,7 @@ class SSH(MultiprocessingStateMixin):
if not self.targets:
log.error("No matching targets found in roster.")
break
- if len(running) < opts.get("ssh_max_procs", 25) and not init:
+ if len(running) < self.opts.get("ssh_max_procs", 25) and not init:
if targets_queue:
host = targets_queue.popleft()
else:
@@ -682,7 +685,7 @@ class SSH(MultiprocessingStateMixin):
continue
args = (
que,
- opts,
+ self.opts,
host,
self.targets[host],
mine,
@@ -776,7 +779,7 @@ class SSH(MultiprocessingStateMixin):
if len(rets) >= len(self.targets):
break
# Sleep when limit or all threads started
- if len(running) >= opts.get("ssh_max_procs", 25) or len(
+ if len(running) >= self.opts.get("ssh_max_procs", 25) or len(
self.targets
) >= len(running):
time.sleep(0.1)
diff --git a/tests/pytests/unit/client/ssh/test_password.py b/tests/pytests/unit/client/ssh/test_password.py
index 8a7794d2f4..0ca28d022e 100644
--- a/tests/pytests/unit/client/ssh/test_password.py
+++ b/tests/pytests/unit/client/ssh/test_password.py
@@ -27,6 +27,8 @@ def test_password_failure(temp_salt_master, tmp_path):
opts["argv"] = ["test.ping"]
opts["selected_target_option"] = "glob"
opts["tgt"] = "localhost"
+ opts["ssh_cli_tgt"] = "localhost"
+ opts["_ssh_version"] = "foobar"
opts["arg"] = []
roster = str(tmp_path / "roster")
handle_ssh_ret = [
@@ -44,7 +46,7 @@ def test_password_failure(temp_salt_master, tmp_path):
"salt.client.ssh.SSH.handle_ssh", MagicMock(return_value=handle_ssh_ret)
), patch("salt.client.ssh.SSH.key_deploy", MagicMock(return_value=expected)), patch(
"salt.output.display_output", display_output
- ):
+ ), patch("salt.client.ssh.ssh_version", MagicMock(return_value="foobar")):
client = ssh.SSH(opts)
ret = next(client.run_iter())
with pytest.raises(SystemExit):
diff --git a/tests/pytests/unit/client/ssh/test_return_events.py b/tests/pytests/unit/client/ssh/test_return_events.py
index 1f0b0dbf33..18714741b9 100644
--- a/tests/pytests/unit/client/ssh/test_return_events.py
+++ b/tests/pytests/unit/client/ssh/test_return_events.py
@@ -43,7 +43,7 @@ def test_not_missing_fun_calling_wfuncs(temp_salt_master, tmp_path):
assert "localhost" in ret
assert "fun" in ret["localhost"]
client.run()
- display_output.assert_called_once_with(expected, "nested", opts)
+ display_output.assert_called_once_with(expected, "nested", client.opts)
assert ret is handle_ssh_ret[0]
assert len(client.event.fire_event.call_args_list) == 2
assert "fun" in client.event.fire_event.call_args_list[0][0][0]
diff --git a/tests/pytests/unit/client/ssh/test_ssh.py b/tests/pytests/unit/client/ssh/test_ssh.py
index 2be96ab195..377aad9998 100644
--- a/tests/pytests/unit/client/ssh/test_ssh.py
+++ b/tests/pytests/unit/client/ssh/test_ssh.py
@@ -148,7 +148,7 @@ def test_expand_target_ip_address(opts, roster):
MagicMock(return_value=salt.utils.yaml.safe_load(roster)),
):
client._expand_target()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
def test_expand_target_no_host(opts, tmp_path):
@@ -171,7 +171,7 @@ def test_expand_target_no_host(opts, tmp_path):
assert opts["tgt"] == user + host
with patch("salt.roster.get_roster_file", MagicMock(return_value=roster_file)):
client._expand_target()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
def test_expand_target_dns(opts, roster):
@@ -192,7 +192,7 @@ def test_expand_target_dns(opts, roster):
MagicMock(return_value=salt.utils.yaml.safe_load(roster)),
):
client._expand_target()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
def test_expand_target_no_user(opts, roster):
@@ -204,7 +204,7 @@ def test_expand_target_no_user(opts, roster):
with patch("salt.utils.network.is_reachable_host", MagicMock(return_value=False)):
client = ssh.SSH(opts)
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
with patch(
"salt.roster.get_roster_file", MagicMock(return_value="/etc/salt/roster")
@@ -213,7 +213,7 @@ def test_expand_target_no_user(opts, roster):
MagicMock(return_value=salt.utils.yaml.safe_load(roster)),
):
client._expand_target()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
def test_update_targets_ip_address(opts):
@@ -228,7 +228,7 @@ def test_update_targets_ip_address(opts):
client = ssh.SSH(opts)
assert opts["tgt"] == user + host
client._update_targets()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
assert client.targets[host]["user"] == user.split("@")[0]
@@ -244,7 +244,7 @@ def test_update_targets_dns(opts):
client = ssh.SSH(opts)
assert opts["tgt"] == user + host
client._update_targets()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
assert client.targets[host]["user"] == user.split("@")[0]
@@ -259,7 +259,7 @@ def test_update_targets_no_user(opts):
client = ssh.SSH(opts)
assert opts["tgt"] == host
client._update_targets()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
def test_update_expand_target_dns(opts, roster):
@@ -281,7 +281,7 @@ def test_update_expand_target_dns(opts, roster):
):
client._expand_target()
client._update_targets()
- assert opts["tgt"] == host
+ assert client.opts["tgt"] == host
assert client.targets[host]["user"] == user.split("@")[0]
@@ -299,7 +299,7 @@ def test_parse_tgt(opts):
client = ssh.SSH(opts)
assert client.parse_tgt["hostname"] == host
assert client.parse_tgt["user"] == user.split("@")[0]
- assert opts.get("ssh_cli_tgt") == user + host
+ assert client.opts.get("ssh_cli_tgt") == user + host
def test_parse_tgt_no_user(opts):
@@ -316,7 +316,7 @@ def test_parse_tgt_no_user(opts):
client = ssh.SSH(opts)
assert client.parse_tgt["hostname"] == host
assert client.parse_tgt["user"] == opts["ssh_user"]
- assert opts.get("ssh_cli_tgt") == host
+ assert client.opts.get("ssh_cli_tgt") == host
def test_extra_filerefs(tmp_path, opts):
--
2.39.2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,102 @@
From 4240f0d5ffbc46c557885c5a28d1f2fd0b4c5e48 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Mon, 8 Nov 2021 17:42:36 +0300
Subject: [PATCH] Prevent pkg plugins errors on missing cookie path
(bsc#1186738) - 3002.2 (#415)
* Prevent pkg plugins errors on missing cookie path (bsc#1186738)
* Narrowing down exception handling
* Modify for Python 3 only
* Fix yumnotify
---
scripts/suse/yum/plugins/README.md | 2 +-
scripts/suse/yum/plugins/yumnotify.py | 17 +++++++++++++----
scripts/suse/zypper/plugins/commit/zyppnotify | 18 ++++++++++++------
3 files changed, 26 insertions(+), 11 deletions(-)
diff --git a/scripts/suse/yum/plugins/README.md b/scripts/suse/yum/plugins/README.md
index cb3abd2260..3515845b31 100644
--- a/scripts/suse/yum/plugins/README.md
+++ b/scripts/suse/yum/plugins/README.md
@@ -11,7 +11,7 @@ Configuration files are going to:
Plugin itself goes to:
- `/usr/share/yum-plugins/[name].conf`
+ `/usr/share/yum-plugins/[name].py`
## Permissions
diff --git a/scripts/suse/yum/plugins/yumnotify.py b/scripts/suse/yum/plugins/yumnotify.py
index 4e137191a0..0d117e8946 100644
--- a/scripts/suse/yum/plugins/yumnotify.py
+++ b/scripts/suse/yum/plugins/yumnotify.py
@@ -5,6 +5,7 @@
import hashlib
import os
+import sys
from yum.plugins import TYPE_CORE
@@ -51,7 +52,15 @@ def posttrans_hook(conduit):
"""
# Integrate Yum with Salt
if "SALT_RUNNING" not in os.environ:
- with open(CK_PATH, "w") as ck_fh:
- ck_fh.write(
- "{chksum} {mtime}\n".format(chksum=_get_checksum(), mtime=_get_mtime())
- )
+ try:
+ ck_dir = os.path.dirname(CK_PATH)
+ if not os.path.exists(ck_dir):
+ os.makedirs(ck_dir)
+ with open(CK_PATH, "w") as ck_fh:
+ ck_fh.write(
+ "{chksum} {mtime}\n".format(
+ chksum=_get_checksum(), mtime=_get_mtime()
+ )
+ )
+ except OSError as e:
+ print("Unable to save the cookie file: %s" % (e), file=sys.stderr)
diff --git a/scripts/suse/zypper/plugins/commit/zyppnotify b/scripts/suse/zypper/plugins/commit/zyppnotify
index bacbc8b97e..e3528e87a9 100755
--- a/scripts/suse/zypper/plugins/commit/zyppnotify
+++ b/scripts/suse/zypper/plugins/commit/zyppnotify
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/python3
#
# Copyright (c) 2016 SUSE Linux LLC
# All Rights Reserved.
@@ -55,12 +55,18 @@ class DriftDetector(Plugin):
Hook when plugin closes Zypper's transaction.
"""
if "SALT_RUNNING" not in os.environ:
- with open(self.ck_path, "w") as ck_fh:
- ck_fh.write(
- "{chksum} {mtime}\n".format(
- chksum=self._get_checksum(), mtime=self._get_mtime()
+ try:
+ ck_dir = os.path.dirname(self.ck_path)
+ if not os.path.exists(ck_dir):
+ os.makedirs(ck_dir)
+ with open(self.ck_path, "w") as ck_fh:
+ ck_fh.write(
+ "{chksum} {mtime}\n".format(
+ chksum=self._get_checksum(), mtime=self._get_mtime()
+ )
)
- )
+ except OSError as e:
+ print("Unable to save the cookie file: %s" % (e), file=sys.stderr)
self.ack()
--
2.39.2

View File

@ -0,0 +1,37 @@
From 859be3e8213d4b5814a18fa6e9f3f17bf65b3183 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 15 May 2024 09:45:26 +0200
Subject: [PATCH] Prevent possible exception in
tornado.concurrent.Future._set_done
---
salt/ext/tornado/concurrent.py | 13 +++++++------
1 file changed, 7 insertions(+), 6 deletions(-)
diff --git a/salt/ext/tornado/concurrent.py b/salt/ext/tornado/concurrent.py
index bea09ba125..011808ed27 100644
--- a/salt/ext/tornado/concurrent.py
+++ b/salt/ext/tornado/concurrent.py
@@ -330,12 +330,13 @@ class Future(object):
def _set_done(self):
self._done = True
- for cb in self._callbacks:
- try:
- cb(self)
- except Exception:
- app_log.exception("Exception in callback %r for %r", cb, self)
- self._callbacks = None
+ if self._callbacks:
+ for cb in self._callbacks:
+ try:
+ cb(self)
+ except Exception:
+ app_log.exception("Exception in callback %r for %r", cb, self)
+ self._callbacks = None
# On Python 3.3 or older, objects with a destructor part of a reference
# cycle are never destroyed. It's no longer the case on Python 3.4 thanks to
--
2.45.0

View File

@ -0,0 +1,68 @@
From 4ea91a61abbb6ef001f057685370454c85b72c3a Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Mon, 21 Aug 2023 13:04:32 +0200
Subject: [PATCH] Prevent possible exceptions on
salt.utils.user.get_group_dict (bsc#1212794)
* Prevent KeyError on calling grp.getgrnam in case of improper group
* Add test of calling salt.utils.user.get_group_dict
for the user having improper duplicate group
* Update tests/pytests/functional/utils/user/test_get_group_dict.py
Co-authored-by: Pedro Algarvio <pedro@algarvio.me>
---------
Co-authored-by: Pedro Algarvio <pedro@algarvio.me>
---
salt/utils/user.py | 6 +++++-
.../utils/user/test_get_group_dict.py | 17 +++++++++++++++++
2 files changed, 22 insertions(+), 1 deletion(-)
create mode 100644 tests/pytests/functional/utils/user/test_get_group_dict.py
diff --git a/salt/utils/user.py b/salt/utils/user.py
index 9763667443..2f1ca65cf9 100644
--- a/salt/utils/user.py
+++ b/salt/utils/user.py
@@ -352,7 +352,11 @@ def get_group_dict(user=None, include_default=True):
group_dict = {}
group_names = get_group_list(user, include_default=include_default)
for group in group_names:
- group_dict.update({group: grp.getgrnam(group).gr_gid})
+ try:
+ group_dict.update({group: grp.getgrnam(group).gr_gid})
+ except KeyError:
+ # In case if imporer duplicate group was returned by get_group_list
+ pass
return group_dict
diff --git a/tests/pytests/functional/utils/user/test_get_group_dict.py b/tests/pytests/functional/utils/user/test_get_group_dict.py
new file mode 100644
index 0000000000..653d664607
--- /dev/null
+++ b/tests/pytests/functional/utils/user/test_get_group_dict.py
@@ -0,0 +1,17 @@
+import logging
+
+import pytest
+
+import salt.utils.platform
+import salt.utils.user
+from tests.support.mock import patch
+
+log = logging.getLogger(__name__)
+
+pytestmark = [
+ pytest.mark.skip_unless_on_linux(reason="Should only run in platforms which have duplicate GID support"),
+]
+def test_get_group_dict_with_improper_duplicate_root_group():
+ with patch("salt.utils.user.get_group_list", return_value=["+", "root"]):
+ group_list = salt.utils.user.get_group_dict("root")
+ assert group_list == {"root": 0}
--
2.41.0

View File

@ -0,0 +1,33 @@
From 1b4e382856e1d5d8ef95890aec5a8e5e07254708 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 28 Feb 2022 14:25:43 +0000
Subject: [PATCH] Prevent shell injection via pre_flight_script_args
(#497)
Add tests around preflight script args
Readjust logic to validate script args
Use RLock to prevent issues in single threads
---
salt/_logging/impl.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/_logging/impl.py b/salt/_logging/impl.py
index e050f43caf..2d1a276cb8 100644
--- a/salt/_logging/impl.py
+++ b/salt/_logging/impl.py
@@ -107,7 +107,7 @@ DFLT_LOG_FMT_LOGFILE = "%(asctime)s,%(msecs)03d [%(name)-17s:%(lineno)-4d][%(lev
# LOG_LOCK is used to prevent deadlocks on using logging
# in combination with multiprocessing with salt-api
-LOG_LOCK = threading.Lock()
+LOG_LOCK = threading.RLock()
class SaltLogRecord(logging.LogRecord):
--
2.39.2

View File

@ -0,0 +1,43 @@
From e3809178f7f6db4b0a5dcca48441100cec45c69d Mon Sep 17 00:00:00 2001
From: Marek Czernek <marek.czernek@suse.com>
Date: Thu, 6 Jun 2024 10:11:26 +0200
Subject: [PATCH] Provide systemd timer unit
---
pkg/common/venv-salt-minion-postinstall.service | 7 +++++++
pkg/common/venv-salt-minion-postinstall.timer | 9 +++++++++
2 files changed, 16 insertions(+)
create mode 100644 pkg/common/venv-salt-minion-postinstall.service
create mode 100644 pkg/common/venv-salt-minion-postinstall.timer
diff --git a/pkg/common/venv-salt-minion-postinstall.service b/pkg/common/venv-salt-minion-postinstall.service
new file mode 100644
index 00000000000..b122d7d6eab
--- /dev/null
+++ b/pkg/common/venv-salt-minion-postinstall.service
@@ -0,0 +1,7 @@
+[Unit]
+Description=Clean old environment for venv-salt-minion
+
+[Service]
+ExecStart=/bin/sh -c '/usr/lib/venv-salt-minion/bin/post_start_cleanup.sh || :'
+Type=oneshot
+
diff --git a/pkg/common/venv-salt-minion-postinstall.timer b/pkg/common/venv-salt-minion-postinstall.timer
new file mode 100644
index 00000000000..e6bd86d86e7
--- /dev/null
+++ b/pkg/common/venv-salt-minion-postinstall.timer
@@ -0,0 +1,9 @@
+[Unit]
+Description=Clean old venv-salt-minion environment in 60 seconds
+
+[Timer]
+OnActiveSec=60
+
+[Install]
+WantedBy=timers.target
+
--
2.45.1

Some files were not shown because too many files have changed in this diff Show More