Accepting request 1108398 from systemsmanagement:saltstack
OBS-URL: https://build.opensuse.org/request/show/1108398 OBS-URL: https://build.opensuse.org/package/show/openSUSE:Factory/salt?expand=0&rev=140
This commit is contained in:
commit
778f7d2163
@ -1 +1 @@
|
||||
9c59105c906c6b5eb9345c79e71e7d33f119ed22
|
||||
27bd69d21f942f677c6f257ae1b69a6e6ac6ec39
|
833
fixed-gitfs-cachedir_basename-to-avoid-hash-collisio.patch
Normal file
833
fixed-gitfs-cachedir_basename-to-avoid-hash-collisio.patch
Normal file
@ -0,0 +1,833 @@
|
||||
From 7051f86bb48dbd618a7422d469f3aae4c6f18008 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
|
||||
<psuarezhernandez@suse.com>
|
||||
Date: Thu, 31 Aug 2023 10:41:53 +0100
|
||||
Subject: [PATCH] Fixed gitfs cachedir_basename to avoid hash collisions
|
||||
(#599)
|
||||
|
||||
(bsc#1193948, bsc#1214797, CVE-2023-20898)
|
||||
|
||||
Fix gitfs tests
|
||||
|
||||
It's `gitfs` not `gtfs`, plus some code fixes and cleanup
|
||||
|
||||
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
|
||||
|
||||
fix doc
|
||||
|
||||
wrap sha in base64
|
||||
|
||||
clean up cache name
|
||||
|
||||
stop branch collision
|
||||
|
||||
run pre
|
||||
|
||||
Co-authored-by: cmcmarrow <charles.mcmarrow.4@gmail.com>
|
||||
---
|
||||
changelog/cve-2023-20898.security.md | 1 +
|
||||
salt/utils/gitfs.py | 83 ++++++-
|
||||
tests/pytests/unit/utils/test_gitfs.py | 255 +++++++++++++++++++++
|
||||
tests/unit/utils/test_gitfs.py | 305 ++++++-------------------
|
||||
4 files changed, 403 insertions(+), 241 deletions(-)
|
||||
create mode 100644 changelog/cve-2023-20898.security.md
|
||||
create mode 100644 tests/pytests/unit/utils/test_gitfs.py
|
||||
|
||||
diff --git a/changelog/cve-2023-20898.security.md b/changelog/cve-2023-20898.security.md
|
||||
new file mode 100644
|
||||
index 0000000000..44f1729192
|
||||
--- /dev/null
|
||||
+++ b/changelog/cve-2023-20898.security.md
|
||||
@@ -0,0 +1 @@
|
||||
+Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions.
|
||||
diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py
|
||||
index 38e84f38aa..af61aa0dda 100644
|
||||
--- a/salt/utils/gitfs.py
|
||||
+++ b/salt/utils/gitfs.py
|
||||
@@ -3,6 +3,7 @@ Classes which provide the shared base for GitFS, git_pillar, and winrepo
|
||||
"""
|
||||
|
||||
|
||||
+import base64
|
||||
import contextlib
|
||||
import copy
|
||||
import errno
|
||||
@@ -11,10 +12,12 @@ import glob
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
+import multiprocessing
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import stat
|
||||
+import string
|
||||
import subprocess
|
||||
import time
|
||||
import weakref
|
||||
@@ -22,6 +25,7 @@ from datetime import datetime
|
||||
|
||||
import salt.ext.tornado.ioloop
|
||||
import salt.fileserver
|
||||
+import salt.syspaths
|
||||
import salt.utils.configparser
|
||||
import salt.utils.data
|
||||
import salt.utils.files
|
||||
@@ -34,7 +38,6 @@ import salt.utils.stringutils
|
||||
import salt.utils.url
|
||||
import salt.utils.user
|
||||
import salt.utils.versions
|
||||
-import salt.syspaths
|
||||
from salt.config import DEFAULT_MASTER_OPTS as _DEFAULT_MASTER_OPTS
|
||||
from salt.exceptions import FileserverConfigError, GitLockError, get_error_message
|
||||
from salt.utils.event import tagify
|
||||
@@ -226,6 +229,10 @@ class GitProvider:
|
||||
invoking the parent class' __init__.
|
||||
"""
|
||||
|
||||
+ # master lock should only be locked for very short periods of times "seconds"
|
||||
+ # the master lock should be used when ever git provider reads or writes to one if it locks
|
||||
+ _master_lock = multiprocessing.Lock()
|
||||
+
|
||||
def __init__(
|
||||
self,
|
||||
opts,
|
||||
@@ -452,13 +459,44 @@ class GitProvider:
|
||||
failhard(self.role)
|
||||
|
||||
hash_type = getattr(hashlib, self.opts.get("hash_type", "md5"))
|
||||
+ # Generate full id.
|
||||
+ # Full id helps decrease the chances of collections in the gitfs cache.
|
||||
+ try:
|
||||
+ target = str(self.get_checkout_target())
|
||||
+ except AttributeError:
|
||||
+ target = ""
|
||||
+ self._full_id = "-".join(
|
||||
+ [
|
||||
+ getattr(self, "name", ""),
|
||||
+ self.id,
|
||||
+ getattr(self, "env", ""),
|
||||
+ getattr(self, "_root", ""),
|
||||
+ self.role,
|
||||
+ getattr(self, "base", ""),
|
||||
+ getattr(self, "branch", ""),
|
||||
+ target,
|
||||
+ ]
|
||||
+ )
|
||||
# We loaded this data from yaml configuration files, so, its safe
|
||||
# to use UTF-8
|
||||
- self.hash = hash_type(self.id.encode("utf-8")).hexdigest()
|
||||
- self.cachedir_basename = getattr(self, "name", self.hash)
|
||||
+ base64_hash = str(
|
||||
+ base64.b64encode(hash_type(self._full_id.encode("utf-8")).digest()),
|
||||
+ encoding="ascii", # base64 only outputs ascii
|
||||
+ ).replace(
|
||||
+ "/", "_"
|
||||
+ ) # replace "/" with "_" to not cause trouble with file system
|
||||
+
|
||||
+ # limit name length to 19, so we don't eat up all the path length for windows
|
||||
+ # this is due to pygit2 limitations
|
||||
+ # replace any unknown char with "_" to not cause trouble with file system
|
||||
+ name_chars = string.ascii_letters + string.digits + "-"
|
||||
+ cache_name = "".join(
|
||||
+ c if c in name_chars else "_" for c in getattr(self, "name", "")[:19]
|
||||
+ )
|
||||
+
|
||||
+ self.cachedir_basename = f"{cache_name}-{base64_hash}"
|
||||
self.cachedir = salt.utils.path.join(cache_root, self.cachedir_basename)
|
||||
self.linkdir = salt.utils.path.join(cache_root, "links", self.cachedir_basename)
|
||||
-
|
||||
if not os.path.isdir(self.cachedir):
|
||||
os.makedirs(self.cachedir)
|
||||
|
||||
@@ -473,6 +511,12 @@ class GitProvider:
|
||||
log.critical(msg, exc_info=True)
|
||||
failhard(self.role)
|
||||
|
||||
+ def full_id(self):
|
||||
+ return self._full_id
|
||||
+
|
||||
+ def get_cachedir_basename(self):
|
||||
+ return self.cachedir_basename
|
||||
+
|
||||
def _get_envs_from_ref_paths(self, refs):
|
||||
"""
|
||||
Return the names of remote refs (stripped of the remote name) and tags
|
||||
@@ -663,6 +707,19 @@ class GitProvider:
|
||||
"""
|
||||
Clear update.lk
|
||||
"""
|
||||
+ if self.__class__._master_lock.acquire(timeout=60) is False:
|
||||
+ # if gitfs works right we should never see this timeout error.
|
||||
+ log.error("gitfs master lock timeout!")
|
||||
+ raise TimeoutError("gitfs master lock timeout!")
|
||||
+ try:
|
||||
+ return self._clear_lock(lock_type)
|
||||
+ finally:
|
||||
+ self.__class__._master_lock.release()
|
||||
+
|
||||
+ def _clear_lock(self, lock_type="update"):
|
||||
+ """
|
||||
+ Clear update.lk without MultiProcessing locks
|
||||
+ """
|
||||
lock_file = self._get_lock_file(lock_type=lock_type)
|
||||
|
||||
def _add_error(errlist, exc):
|
||||
@@ -838,6 +895,20 @@ class GitProvider:
|
||||
"""
|
||||
Place a lock file if (and only if) it does not already exist.
|
||||
"""
|
||||
+ if self.__class__._master_lock.acquire(timeout=60) is False:
|
||||
+ # if gitfs works right we should never see this timeout error.
|
||||
+ log.error("gitfs master lock timeout!")
|
||||
+ raise TimeoutError("gitfs master lock timeout!")
|
||||
+ try:
|
||||
+ return self.__lock(lock_type, failhard)
|
||||
+ finally:
|
||||
+ self.__class__._master_lock.release()
|
||||
+
|
||||
+ def __lock(self, lock_type="update", failhard=False):
|
||||
+ """
|
||||
+ Place a lock file if (and only if) it does not already exist.
|
||||
+ Without MultiProcessing locks.
|
||||
+ """
|
||||
try:
|
||||
fh_ = os.open(
|
||||
self._get_lock_file(lock_type), os.O_CREAT | os.O_EXCL | os.O_WRONLY
|
||||
@@ -904,9 +975,9 @@ class GitProvider:
|
||||
lock_type,
|
||||
lock_file,
|
||||
)
|
||||
- success, fail = self.clear_lock()
|
||||
+ success, fail = self._clear_lock()
|
||||
if success:
|
||||
- return self._lock(lock_type="update", failhard=failhard)
|
||||
+ return self.__lock(lock_type="update", failhard=failhard)
|
||||
elif failhard:
|
||||
raise
|
||||
return
|
||||
diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py
|
||||
new file mode 100644
|
||||
index 0000000000..e9915de412
|
||||
--- /dev/null
|
||||
+++ b/tests/pytests/unit/utils/test_gitfs.py
|
||||
@@ -0,0 +1,255 @@
|
||||
+import os
|
||||
+import string
|
||||
+import time
|
||||
+
|
||||
+import pytest
|
||||
+
|
||||
+import salt.fileserver.gitfs
|
||||
+import salt.utils.gitfs
|
||||
+from salt.exceptions import FileserverConfigError
|
||||
+from tests.support.helpers import patched_environ
|
||||
+from tests.support.mock import MagicMock, patch
|
||||
+
|
||||
+try:
|
||||
+ HAS_PYGIT2 = (
|
||||
+ salt.utils.gitfs.PYGIT2_VERSION
|
||||
+ and salt.utils.gitfs.PYGIT2_VERSION >= salt.utils.gitfs.PYGIT2_MINVER
|
||||
+ and salt.utils.gitfs.LIBGIT2_VERSION
|
||||
+ and salt.utils.gitfs.LIBGIT2_VERSION >= salt.utils.gitfs.LIBGIT2_MINVER
|
||||
+ )
|
||||
+except AttributeError:
|
||||
+ HAS_PYGIT2 = False
|
||||
+
|
||||
+
|
||||
+if HAS_PYGIT2:
|
||||
+ import pygit2
|
||||
+
|
||||
+
|
||||
+@pytest.mark.parametrize(
|
||||
+ "role_name,role_class",
|
||||
+ (
|
||||
+ ("gitfs", salt.utils.gitfs.GitFS),
|
||||
+ ("git_pillar", salt.utils.gitfs.GitPillar),
|
||||
+ ("winrepo", salt.utils.gitfs.WinRepo),
|
||||
+ ),
|
||||
+)
|
||||
+def test_provider_case_insensitive_gitfs_provider(minion_opts, role_name, role_class):
|
||||
+ """
|
||||
+ Ensure that both lowercase and non-lowercase values are supported
|
||||
+ """
|
||||
+ provider = "GitPython"
|
||||
+ key = "{}_provider".format(role_name)
|
||||
+ with patch.object(role_class, "verify_gitpython", MagicMock(return_value=True)):
|
||||
+ with patch.object(role_class, "verify_pygit2", MagicMock(return_value=False)):
|
||||
+ args = [minion_opts, {}]
|
||||
+ kwargs = {"init_remotes": False}
|
||||
+ if role_name == "winrepo":
|
||||
+ kwargs["cache_root"] = "/tmp/winrepo-dir"
|
||||
+ with patch.dict(minion_opts, {key: provider}):
|
||||
+ # Try to create an instance with uppercase letters in
|
||||
+ # provider name. If it fails then a
|
||||
+ # FileserverConfigError will be raised, so no assert is
|
||||
+ # necessary.
|
||||
+ role_class(*args, **kwargs)
|
||||
+ # Now try to instantiate an instance with all lowercase
|
||||
+ # letters. Again, no need for an assert here.
|
||||
+ role_class(*args, **kwargs)
|
||||
+
|
||||
+
|
||||
+@pytest.mark.parametrize(
|
||||
+ "role_name,role_class",
|
||||
+ (
|
||||
+ ("gitfs", salt.utils.gitfs.GitFS),
|
||||
+ ("git_pillar", salt.utils.gitfs.GitPillar),
|
||||
+ ("winrepo", salt.utils.gitfs.WinRepo),
|
||||
+ ),
|
||||
+)
|
||||
+def test_valid_provider_gitfs_provider(minion_opts, role_name, role_class):
|
||||
+ """
|
||||
+ Ensure that an invalid provider is not accepted, raising a
|
||||
+ FileserverConfigError.
|
||||
+ """
|
||||
+
|
||||
+ def _get_mock(verify, provider):
|
||||
+ """
|
||||
+ Return a MagicMock with the desired return value
|
||||
+ """
|
||||
+ return MagicMock(return_value=verify.endswith(provider))
|
||||
+
|
||||
+ key = "{}_provider".format(role_name)
|
||||
+ for provider in salt.utils.gitfs.GIT_PROVIDERS:
|
||||
+ verify = "verify_gitpython"
|
||||
+ mock1 = _get_mock(verify, provider)
|
||||
+ with patch.object(role_class, verify, mock1):
|
||||
+ verify = "verify_pygit2"
|
||||
+ mock2 = _get_mock(verify, provider)
|
||||
+ with patch.object(role_class, verify, mock2):
|
||||
+ args = [minion_opts, {}]
|
||||
+ kwargs = {"init_remotes": False}
|
||||
+ if role_name == "winrepo":
|
||||
+ kwargs["cache_root"] = "/tmp/winrepo-dir"
|
||||
+ with patch.dict(minion_opts, {key: provider}):
|
||||
+ role_class(*args, **kwargs)
|
||||
+ with patch.dict(minion_opts, {key: "foo"}):
|
||||
+ # Set the provider name to a known invalid provider
|
||||
+ # and make sure it raises an exception.
|
||||
+ with pytest.raises(FileserverConfigError):
|
||||
+ role_class(*args, **kwargs)
|
||||
+
|
||||
+
|
||||
+@pytest.fixture
|
||||
+def _prepare_remote_repository_pygit2(tmp_path):
|
||||
+ remote = os.path.join(tmp_path, "pygit2-repo")
|
||||
+ filecontent = "This is an empty README file"
|
||||
+ filename = "README"
|
||||
+ signature = pygit2.Signature(
|
||||
+ "Dummy Commiter", "dummy@dummy.com", int(time.time()), 0
|
||||
+ )
|
||||
+ repository = pygit2.init_repository(remote, False)
|
||||
+ builder = repository.TreeBuilder()
|
||||
+ tree = builder.write()
|
||||
+ commit = repository.create_commit(
|
||||
+ "HEAD", signature, signature, "Create master branch", tree, []
|
||||
+ )
|
||||
+ repository.create_reference("refs/tags/simple_tag", commit)
|
||||
+ with salt.utils.files.fopen(
|
||||
+ os.path.join(repository.workdir, filename), "w"
|
||||
+ ) as file:
|
||||
+ file.write(filecontent)
|
||||
+ blob = repository.create_blob_fromworkdir(filename)
|
||||
+ builder = repository.TreeBuilder()
|
||||
+ builder.insert(filename, blob, pygit2.GIT_FILEMODE_BLOB)
|
||||
+ tree = builder.write()
|
||||
+ repository.index.read()
|
||||
+ repository.index.add(filename)
|
||||
+ repository.index.write()
|
||||
+ commit = repository.create_commit(
|
||||
+ "HEAD",
|
||||
+ signature,
|
||||
+ signature,
|
||||
+ "Added a README",
|
||||
+ tree,
|
||||
+ [repository.head.target],
|
||||
+ )
|
||||
+ repository.create_tag(
|
||||
+ "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message"
|
||||
+ )
|
||||
+ return remote
|
||||
+
|
||||
+
|
||||
+@pytest.fixture
|
||||
+def _prepare_provider(tmp_path, minion_opts, _prepare_remote_repository_pygit2):
|
||||
+ cache = tmp_path / "pygit2-repo-cache"
|
||||
+ minion_opts.update(
|
||||
+ {
|
||||
+ "cachedir": str(cache),
|
||||
+ "gitfs_disable_saltenv_mapping": False,
|
||||
+ "gitfs_base": "master",
|
||||
+ "gitfs_insecure_auth": False,
|
||||
+ "gitfs_mountpoint": "",
|
||||
+ "gitfs_passphrase": "",
|
||||
+ "gitfs_password": "",
|
||||
+ "gitfs_privkey": "",
|
||||
+ "gitfs_provider": "pygit2",
|
||||
+ "gitfs_pubkey": "",
|
||||
+ "gitfs_ref_types": ["branch", "tag", "sha"],
|
||||
+ "gitfs_refspecs": [
|
||||
+ "+refs/heads/*:refs/remotes/origin/*",
|
||||
+ "+refs/tags/*:refs/tags/*",
|
||||
+ ],
|
||||
+ "gitfs_root": "",
|
||||
+ "gitfs_saltenv_blacklist": [],
|
||||
+ "gitfs_saltenv_whitelist": [],
|
||||
+ "gitfs_ssl_verify": True,
|
||||
+ "gitfs_update_interval": 3,
|
||||
+ "gitfs_user": "",
|
||||
+ "verified_gitfs_provider": "pygit2",
|
||||
+ }
|
||||
+ )
|
||||
+ per_remote_defaults = {
|
||||
+ "base": "master",
|
||||
+ "disable_saltenv_mapping": False,
|
||||
+ "insecure_auth": False,
|
||||
+ "ref_types": ["branch", "tag", "sha"],
|
||||
+ "passphrase": "",
|
||||
+ "mountpoint": "",
|
||||
+ "password": "",
|
||||
+ "privkey": "",
|
||||
+ "pubkey": "",
|
||||
+ "refspecs": [
|
||||
+ "+refs/heads/*:refs/remotes/origin/*",
|
||||
+ "+refs/tags/*:refs/tags/*",
|
||||
+ ],
|
||||
+ "root": "",
|
||||
+ "saltenv_blacklist": [],
|
||||
+ "saltenv_whitelist": [],
|
||||
+ "ssl_verify": True,
|
||||
+ "update_interval": 60,
|
||||
+ "user": "",
|
||||
+ }
|
||||
+ per_remote_only = ("all_saltenvs", "name", "saltenv")
|
||||
+ override_params = tuple(per_remote_defaults)
|
||||
+ cache_root = cache / "gitfs"
|
||||
+ role = "gitfs"
|
||||
+ provider = salt.utils.gitfs.Pygit2(
|
||||
+ minion_opts,
|
||||
+ _prepare_remote_repository_pygit2,
|
||||
+ per_remote_defaults,
|
||||
+ per_remote_only,
|
||||
+ override_params,
|
||||
+ str(cache_root),
|
||||
+ role,
|
||||
+ )
|
||||
+ return provider
|
||||
+
|
||||
+
|
||||
+@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support")
|
||||
+@pytest.mark.skip_on_windows(
|
||||
+ reason="Skip Pygit2 on windows, due to pygit2 access error on windows"
|
||||
+)
|
||||
+def test_checkout_pygit2(_prepare_provider):
|
||||
+ provider = _prepare_provider
|
||||
+ provider.remotecallbacks = None
|
||||
+ provider.credentials = None
|
||||
+ provider.init_remote()
|
||||
+ provider.fetch()
|
||||
+ provider.branch = "master"
|
||||
+ assert provider.cachedir in provider.checkout()
|
||||
+ provider.branch = "simple_tag"
|
||||
+ assert provider.cachedir in provider.checkout()
|
||||
+ provider.branch = "annotated_tag"
|
||||
+ assert provider.cachedir in provider.checkout()
|
||||
+ provider.branch = "does_not_exist"
|
||||
+ assert provider.checkout() is None
|
||||
+
|
||||
+
|
||||
+@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support")
|
||||
+@pytest.mark.skip_on_windows(
|
||||
+ reason="Skip Pygit2 on windows, due to pygit2 access error on windows"
|
||||
+)
|
||||
+def test_checkout_pygit2_with_home_env_unset(_prepare_provider):
|
||||
+ provider = _prepare_provider
|
||||
+ provider.remotecallbacks = None
|
||||
+ provider.credentials = None
|
||||
+ with patched_environ(__cleanup__=["HOME"]):
|
||||
+ assert "HOME" not in os.environ
|
||||
+ provider.init_remote()
|
||||
+ provider.fetch()
|
||||
+ assert "HOME" in os.environ
|
||||
+
|
||||
+
|
||||
+def test_full_id_pygit2(_prepare_provider):
|
||||
+ assert _prepare_provider.full_id().startswith("-")
|
||||
+ assert _prepare_provider.full_id().endswith("/pygit2-repo---gitfs-master--")
|
||||
+
|
||||
+
|
||||
+@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support")
|
||||
+@pytest.mark.skip_on_windows(
|
||||
+ reason="Skip Pygit2 on windows, due to pygit2 access error on windows"
|
||||
+)
|
||||
+def test_get_cachedir_basename_pygit2(_prepare_provider):
|
||||
+ basename = _prepare_provider.get_cachedir_basename()
|
||||
+ assert len(basename) == 45
|
||||
+ assert basename[0] == "-"
|
||||
+ # check that a valid base64 is given '/' -> '_'
|
||||
+ assert all(c in string.ascii_letters + string.digits + "+_=" for c in basename[1:])
|
||||
diff --git a/tests/unit/utils/test_gitfs.py b/tests/unit/utils/test_gitfs.py
|
||||
index 7c400b69af..6d8e97a239 100644
|
||||
--- a/tests/unit/utils/test_gitfs.py
|
||||
+++ b/tests/unit/utils/test_gitfs.py
|
||||
@@ -2,37 +2,20 @@
|
||||
These only test the provider selection and verification logic, they do not init
|
||||
any remotes.
|
||||
"""
|
||||
-import os
|
||||
-import shutil
|
||||
-from time import time
|
||||
+
|
||||
+import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
+import salt.ext.tornado.ioloop
|
||||
import salt.fileserver.gitfs
|
||||
import salt.utils.files
|
||||
import salt.utils.gitfs
|
||||
+import salt.utils.path
|
||||
import salt.utils.platform
|
||||
-import tests.support.paths
|
||||
-from salt.exceptions import FileserverConfigError
|
||||
-from tests.support.helpers import patched_environ
|
||||
from tests.support.mixins import AdaptedConfigurationTestCaseMixin
|
||||
-from tests.support.mock import MagicMock, patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
-try:
|
||||
- HAS_PYGIT2 = (
|
||||
- salt.utils.gitfs.PYGIT2_VERSION
|
||||
- and salt.utils.gitfs.PYGIT2_VERSION >= salt.utils.gitfs.PYGIT2_MINVER
|
||||
- and salt.utils.gitfs.LIBGIT2_VERSION
|
||||
- and salt.utils.gitfs.LIBGIT2_VERSION >= salt.utils.gitfs.LIBGIT2_MINVER
|
||||
- )
|
||||
-except AttributeError:
|
||||
- HAS_PYGIT2 = False
|
||||
-
|
||||
-
|
||||
-if HAS_PYGIT2:
|
||||
- import pygit2
|
||||
-
|
||||
|
||||
def _clear_instance_map():
|
||||
try:
|
||||
@@ -45,6 +28,9 @@ def _clear_instance_map():
|
||||
|
||||
class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin):
|
||||
def setUp(self):
|
||||
+ self._tmp_dir = tempfile.TemporaryDirectory()
|
||||
+ tmp_name = self._tmp_dir.name
|
||||
+
|
||||
class MockedProvider(
|
||||
salt.utils.gitfs.GitProvider
|
||||
): # pylint: disable=abstract-method
|
||||
@@ -71,6 +57,7 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin):
|
||||
)
|
||||
|
||||
def init_remote(self):
|
||||
+ self.gitdir = salt.utils.path.join(tmp_name, ".git")
|
||||
self.repo = True
|
||||
new = False
|
||||
return new
|
||||
@@ -107,6 +94,7 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin):
|
||||
for remote in self.main_class.remotes:
|
||||
remote.fetched = False
|
||||
del self.main_class
|
||||
+ self._tmp_dir.cleanup()
|
||||
|
||||
def test_update_all(self):
|
||||
self.main_class.update()
|
||||
@@ -126,226 +114,73 @@ class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin):
|
||||
self.assertTrue(self.main_class.remotes[0].fetched)
|
||||
self.assertFalse(self.main_class.remotes[1].fetched)
|
||||
|
||||
-
|
||||
-class TestGitFSProvider(TestCase):
|
||||
- def setUp(self):
|
||||
- self.opts = {"cachedir": "/tmp/gitfs-test-cache"}
|
||||
-
|
||||
- def tearDown(self):
|
||||
- self.opts = None
|
||||
-
|
||||
- def test_provider_case_insensitive(self):
|
||||
- """
|
||||
- Ensure that both lowercase and non-lowercase values are supported
|
||||
- """
|
||||
- provider = "GitPython"
|
||||
- for role_name, role_class in (
|
||||
- ("gitfs", salt.utils.gitfs.GitFS),
|
||||
- ("git_pillar", salt.utils.gitfs.GitPillar),
|
||||
- ("winrepo", salt.utils.gitfs.WinRepo),
|
||||
- ):
|
||||
-
|
||||
- key = "{}_provider".format(role_name)
|
||||
- with patch.object(
|
||||
- role_class, "verify_gitpython", MagicMock(return_value=True)
|
||||
- ):
|
||||
- with patch.object(
|
||||
- role_class, "verify_pygit2", MagicMock(return_value=False)
|
||||
- ):
|
||||
- args = [self.opts, {}]
|
||||
- kwargs = {"init_remotes": False}
|
||||
- if role_name == "winrepo":
|
||||
- kwargs["cache_root"] = "/tmp/winrepo-dir"
|
||||
- with patch.dict(self.opts, {key: provider}):
|
||||
- # Try to create an instance with uppercase letters in
|
||||
- # provider name. If it fails then a
|
||||
- # FileserverConfigError will be raised, so no assert is
|
||||
- # necessary.
|
||||
- role_class(*args, **kwargs)
|
||||
- # Now try to instantiate an instance with all lowercase
|
||||
- # letters. Again, no need for an assert here.
|
||||
- role_class(*args, **kwargs)
|
||||
-
|
||||
- def test_valid_provider(self):
|
||||
- """
|
||||
- Ensure that an invalid provider is not accepted, raising a
|
||||
- FileserverConfigError.
|
||||
- """
|
||||
-
|
||||
- def _get_mock(verify, provider):
|
||||
- """
|
||||
- Return a MagicMock with the desired return value
|
||||
- """
|
||||
- return MagicMock(return_value=verify.endswith(provider))
|
||||
-
|
||||
- for role_name, role_class in (
|
||||
- ("gitfs", salt.utils.gitfs.GitFS),
|
||||
- ("git_pillar", salt.utils.gitfs.GitPillar),
|
||||
- ("winrepo", salt.utils.gitfs.WinRepo),
|
||||
- ):
|
||||
- key = "{}_provider".format(role_name)
|
||||
- for provider in salt.utils.gitfs.GIT_PROVIDERS:
|
||||
- verify = "verify_gitpython"
|
||||
- mock1 = _get_mock(verify, provider)
|
||||
- with patch.object(role_class, verify, mock1):
|
||||
- verify = "verify_pygit2"
|
||||
- mock2 = _get_mock(verify, provider)
|
||||
- with patch.object(role_class, verify, mock2):
|
||||
- args = [self.opts, {}]
|
||||
- kwargs = {"init_remotes": False}
|
||||
- if role_name == "winrepo":
|
||||
- kwargs["cache_root"] = "/tmp/winrepo-dir"
|
||||
-
|
||||
- with patch.dict(self.opts, {key: provider}):
|
||||
- role_class(*args, **kwargs)
|
||||
-
|
||||
- with patch.dict(self.opts, {key: "foo"}):
|
||||
- # Set the provider name to a known invalid provider
|
||||
- # and make sure it raises an exception.
|
||||
- self.assertRaises(
|
||||
- FileserverConfigError, role_class, *args, **kwargs
|
||||
- )
|
||||
-
|
||||
-
|
||||
-@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support")
|
||||
-@pytest.mark.skip_on_windows(
|
||||
- reason="Skip Pygit2 on windows, due to pygit2 access error on windows"
|
||||
-)
|
||||
-class TestPygit2(TestCase):
|
||||
- def _prepare_remote_repository(self, path):
|
||||
- shutil.rmtree(path, ignore_errors=True)
|
||||
-
|
||||
- filecontent = "This is an empty README file"
|
||||
- filename = "README"
|
||||
-
|
||||
- signature = pygit2.Signature(
|
||||
- "Dummy Commiter", "dummy@dummy.com", int(time()), 0
|
||||
+ def test_full_id(self):
|
||||
+ self.assertEqual(
|
||||
+ self.main_class.remotes[0].full_id(), "-file://repo1.git---gitfs-master--"
|
||||
)
|
||||
|
||||
- repository = pygit2.init_repository(path, False)
|
||||
- builder = repository.TreeBuilder()
|
||||
- tree = builder.write()
|
||||
- commit = repository.create_commit(
|
||||
- "HEAD", signature, signature, "Create master branch", tree, []
|
||||
+ def test_full_id_with_name(self):
|
||||
+ self.assertEqual(
|
||||
+ self.main_class.remotes[1].full_id(),
|
||||
+ "repo2-file://repo2.git---gitfs-master--",
|
||||
)
|
||||
- repository.create_reference("refs/tags/simple_tag", commit)
|
||||
|
||||
- with salt.utils.files.fopen(
|
||||
- os.path.join(repository.workdir, filename), "w"
|
||||
- ) as file:
|
||||
- file.write(filecontent)
|
||||
-
|
||||
- blob = repository.create_blob_fromworkdir(filename)
|
||||
- builder = repository.TreeBuilder()
|
||||
- builder.insert(filename, blob, pygit2.GIT_FILEMODE_BLOB)
|
||||
- tree = builder.write()
|
||||
-
|
||||
- repository.index.read()
|
||||
- repository.index.add(filename)
|
||||
- repository.index.write()
|
||||
-
|
||||
- commit = repository.create_commit(
|
||||
- "HEAD",
|
||||
- signature,
|
||||
- signature,
|
||||
- "Added a README",
|
||||
- tree,
|
||||
- [repository.head.target],
|
||||
- )
|
||||
- repository.create_tag(
|
||||
- "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message"
|
||||
+ def test_get_cachedir_basename(self):
|
||||
+ self.assertEqual(
|
||||
+ self.main_class.remotes[0].get_cachedir_basename(),
|
||||
+ "-jXhnbGDemchtZwTwaD2s6VOaVvs98a7w+AtiYlmOVb0=",
|
||||
)
|
||||
|
||||
- def _prepare_cache_repository(self, remote, cache):
|
||||
- opts = {
|
||||
- "cachedir": cache,
|
||||
- "__role": "minion",
|
||||
- "gitfs_disable_saltenv_mapping": False,
|
||||
- "gitfs_base": "master",
|
||||
- "gitfs_insecure_auth": False,
|
||||
- "gitfs_mountpoint": "",
|
||||
- "gitfs_passphrase": "",
|
||||
- "gitfs_password": "",
|
||||
- "gitfs_privkey": "",
|
||||
- "gitfs_provider": "pygit2",
|
||||
- "gitfs_pubkey": "",
|
||||
- "gitfs_ref_types": ["branch", "tag", "sha"],
|
||||
- "gitfs_refspecs": [
|
||||
- "+refs/heads/*:refs/remotes/origin/*",
|
||||
- "+refs/tags/*:refs/tags/*",
|
||||
- ],
|
||||
- "gitfs_root": "",
|
||||
- "gitfs_saltenv_blacklist": [],
|
||||
- "gitfs_saltenv_whitelist": [],
|
||||
- "gitfs_ssl_verify": True,
|
||||
- "gitfs_update_interval": 3,
|
||||
- "gitfs_user": "",
|
||||
- "verified_gitfs_provider": "pygit2",
|
||||
- }
|
||||
- per_remote_defaults = {
|
||||
- "base": "master",
|
||||
- "disable_saltenv_mapping": False,
|
||||
- "insecure_auth": False,
|
||||
- "ref_types": ["branch", "tag", "sha"],
|
||||
- "passphrase": "",
|
||||
- "mountpoint": "",
|
||||
- "password": "",
|
||||
- "privkey": "",
|
||||
- "pubkey": "",
|
||||
- "refspecs": [
|
||||
- "+refs/heads/*:refs/remotes/origin/*",
|
||||
- "+refs/tags/*:refs/tags/*",
|
||||
- ],
|
||||
- "root": "",
|
||||
- "saltenv_blacklist": [],
|
||||
- "saltenv_whitelist": [],
|
||||
- "ssl_verify": True,
|
||||
- "update_interval": 60,
|
||||
- "user": "",
|
||||
- }
|
||||
- per_remote_only = ("all_saltenvs", "name", "saltenv")
|
||||
- override_params = tuple(per_remote_defaults.keys())
|
||||
- cache_root = os.path.join(cache, "gitfs")
|
||||
- role = "gitfs"
|
||||
- shutil.rmtree(cache_root, ignore_errors=True)
|
||||
- provider = salt.utils.gitfs.Pygit2(
|
||||
- opts,
|
||||
- remote,
|
||||
- per_remote_defaults,
|
||||
- per_remote_only,
|
||||
- override_params,
|
||||
- cache_root,
|
||||
- role,
|
||||
+ def test_get_cachedir_base_with_name(self):
|
||||
+ self.assertEqual(
|
||||
+ self.main_class.remotes[1].get_cachedir_basename(),
|
||||
+ "repo2-nuezpiDtjQRFC0ZJDByvi+F6Vb8ZhfoH41n_KFxTGsU=",
|
||||
)
|
||||
- return provider
|
||||
|
||||
- def test_checkout(self):
|
||||
- remote = os.path.join(tests.support.paths.TMP, "pygit2-repo")
|
||||
- cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache")
|
||||
- self._prepare_remote_repository(remote)
|
||||
- provider = self._prepare_cache_repository(remote, cache)
|
||||
- provider.remotecallbacks = None
|
||||
- provider.credentials = None
|
||||
- provider.init_remote()
|
||||
- provider.fetch()
|
||||
- provider.branch = "master"
|
||||
- self.assertIn(provider.cachedir, provider.checkout())
|
||||
- provider.branch = "simple_tag"
|
||||
- self.assertIn(provider.cachedir, provider.checkout())
|
||||
- provider.branch = "annotated_tag"
|
||||
- self.assertIn(provider.cachedir, provider.checkout())
|
||||
- provider.branch = "does_not_exist"
|
||||
- self.assertIsNone(provider.checkout())
|
||||
+ def test_git_provider_mp_lock(self):
|
||||
+ """
|
||||
+ Check that lock is released after provider.lock()
|
||||
+ """
|
||||
+ provider = self.main_class.remotes[0]
|
||||
+ provider.lock()
|
||||
+ # check that lock has been released
|
||||
+ self.assertTrue(provider._master_lock.acquire(timeout=5))
|
||||
+ provider._master_lock.release()
|
||||
|
||||
- def test_checkout_with_home_env_unset(self):
|
||||
- remote = os.path.join(tests.support.paths.TMP, "pygit2-repo")
|
||||
- cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache")
|
||||
- self._prepare_remote_repository(remote)
|
||||
- provider = self._prepare_cache_repository(remote, cache)
|
||||
- provider.remotecallbacks = None
|
||||
- provider.credentials = None
|
||||
- with patched_environ(__cleanup__=["HOME"]):
|
||||
- self.assertTrue("HOME" not in os.environ)
|
||||
- provider.init_remote()
|
||||
- provider.fetch()
|
||||
- self.assertTrue("HOME" in os.environ)
|
||||
+ def test_git_provider_mp_clear_lock(self):
|
||||
+ """
|
||||
+ Check that lock is released after provider.clear_lock()
|
||||
+ """
|
||||
+ provider = self.main_class.remotes[0]
|
||||
+ provider.clear_lock()
|
||||
+ # check that lock has been released
|
||||
+ self.assertTrue(provider._master_lock.acquire(timeout=5))
|
||||
+ provider._master_lock.release()
|
||||
+
|
||||
+ @pytest.mark.slow_test
|
||||
+ def test_git_provider_mp_lock_timeout(self):
|
||||
+ """
|
||||
+ Check that lock will time out if master lock is locked.
|
||||
+ """
|
||||
+ provider = self.main_class.remotes[0]
|
||||
+ # Hijack the lock so git provider is fooled into thinking another instance is doing somthing.
|
||||
+ self.assertTrue(provider._master_lock.acquire(timeout=5))
|
||||
+ try:
|
||||
+ # git provider should raise timeout error to avoid lock race conditions
|
||||
+ self.assertRaises(TimeoutError, provider.lock)
|
||||
+ finally:
|
||||
+ provider._master_lock.release()
|
||||
+
|
||||
+ @pytest.mark.slow_test
|
||||
+ def test_git_provider_mp_clear_lock_timeout(self):
|
||||
+ """
|
||||
+ Check that clear lock will time out if master lock is locked.
|
||||
+ """
|
||||
+ provider = self.main_class.remotes[0]
|
||||
+ # Hijack the lock so git provider is fooled into thinking another instance is doing somthing.
|
||||
+ self.assertTrue(provider._master_lock.acquire(timeout=5))
|
||||
+ try:
|
||||
+ # git provider should raise timeout error to avoid lock race conditions
|
||||
+ self.assertRaises(TimeoutError, provider.clear_lock)
|
||||
+ finally:
|
||||
+ provider._master_lock.release()
|
||||
--
|
||||
2.41.0
|
||||
|
||||
|
418
revert-usage-of-long-running-req-channel-bsc-1213960.patch
Normal file
418
revert-usage-of-long-running-req-channel-bsc-1213960.patch
Normal file
@ -0,0 +1,418 @@
|
||||
From 3cc2aee2290bd9a4fba9e0cebe3b65370aa76af6 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
|
||||
<psuarezhernandez@suse.com>
|
||||
Date: Fri, 1 Sep 2023 08:22:44 +0100
|
||||
Subject: [PATCH] Revert usage of long running REQ channel (bsc#1213960,
|
||||
bsc#1213630, bsc#1213257)
|
||||
|
||||
* Revert usage of long running REQ channel (bsc#1213960, bsc#1213630, bsc#1213257)
|
||||
|
||||
This reverts commits:
|
||||
https://github.com/saltstack/salt/commit/a99ffb557b8a1142225d4925aba4a5e493923d2f
|
||||
https://github.com/saltstack/salt/commit/80ae5188807550e7592fa12d8661ee83c4313ec8
|
||||
https://github.com/saltstack/salt/commit/3c7e1ec1f08abd7cd1ba78ad7880acb6ba6fdce7
|
||||
https://github.com/saltstack/salt/commit/171926cc57618b51bf3fdc042b62212e681180fc
|
||||
|
||||
From this PR: https://github.com/saltstack/salt/pull/61468
|
||||
|
||||
See: https://github.com/saltstack/salt/issues/62959#issuecomment-1658335432
|
||||
|
||||
* Revert "Fix linter"
|
||||
|
||||
This reverts commit d09d2d3f31e06c554b4ed869b7dc4f8b8bce5dc9.
|
||||
|
||||
* Revert "add a regression test"
|
||||
|
||||
This reverts commit b2c32be0a80c92585a9063409c42895357bb0dbe.
|
||||
|
||||
* Fix failing tests after reverting commits
|
||||
---
|
||||
doc/topics/development/architecture.rst | 8 +-
|
||||
salt/channel/client.py | 9 +--
|
||||
salt/minion.py | 47 +++--------
|
||||
salt/transport/ipc.py | 5 +-
|
||||
salt/utils/asynchronous.py | 2 +-
|
||||
.../transport/server/test_req_channel.py | 16 ++--
|
||||
tests/pytests/unit/test_minion.py | 79 ++++---------------
|
||||
7 files changed, 39 insertions(+), 127 deletions(-)
|
||||
|
||||
diff --git a/doc/topics/development/architecture.rst b/doc/topics/development/architecture.rst
|
||||
index 17400db001..1c717092f8 100644
|
||||
--- a/doc/topics/development/architecture.rst
|
||||
+++ b/doc/topics/development/architecture.rst
|
||||
@@ -220,15 +220,11 @@ the received message.
|
||||
4) The new minion thread is created. The _thread_return() function starts up
|
||||
and actually calls out to the requested function contained in the job.
|
||||
5) The requested function runs and returns a result. [Still in thread.]
|
||||
-6) The result of the function that's run is published on the minion's local event bus with event
|
||||
-tag "__master_req_channel_payload" [Still in thread.]
|
||||
+6) The result of the function that's run is encrypted and returned to the
|
||||
+master's ReqServer (TCP 4506 on master). [Still in thread.]
|
||||
7) Thread exits. Because the main thread was only blocked for the time that it
|
||||
took to initialize the worker thread, many other requests could have been
|
||||
received and processed during this time.
|
||||
-8) Minion event handler gets the event with tag "__master_req_channel_payload"
|
||||
-and sends the payload to master's ReqServer (TCP 4506 on master), via the long-running async request channel
|
||||
-that was opened when minion first started up.
|
||||
-
|
||||
|
||||
|
||||
A Note on ClearFuncs vs. AESFuncs
|
||||
diff --git a/salt/channel/client.py b/salt/channel/client.py
|
||||
index e5b073ccdb..76d7a8e5b9 100644
|
||||
--- a/salt/channel/client.py
|
||||
+++ b/salt/channel/client.py
|
||||
@@ -98,7 +98,6 @@ class AsyncReqChannel:
|
||||
"_crypted_transfer",
|
||||
"_uncrypted_transfer",
|
||||
"send",
|
||||
- "connect",
|
||||
]
|
||||
close_methods = [
|
||||
"close",
|
||||
@@ -128,7 +127,7 @@ class AsyncReqChannel:
|
||||
else:
|
||||
auth = None
|
||||
|
||||
- transport = salt.transport.request_client(opts, io_loop=io_loop)
|
||||
+ transport = salt.transport.request_client(opts, io_loop)
|
||||
return cls(opts, transport, auth)
|
||||
|
||||
def __init__(self, opts, transport, auth, **kwargs):
|
||||
@@ -271,10 +270,6 @@ class AsyncReqChannel:
|
||||
|
||||
raise salt.ext.tornado.gen.Return(ret)
|
||||
|
||||
- @salt.ext.tornado.gen.coroutine
|
||||
- def connect(self):
|
||||
- yield self.transport.connect()
|
||||
-
|
||||
@salt.ext.tornado.gen.coroutine
|
||||
def send(self, load, tries=3, timeout=60, raw=False):
|
||||
"""
|
||||
@@ -295,7 +290,7 @@ class AsyncReqChannel:
|
||||
ret = yield self._crypted_transfer(load, timeout=timeout, raw=raw)
|
||||
break
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
- log.trace("Failed to send msg %r", exc)
|
||||
+ log.error("Failed to send msg %r", exc)
|
||||
if _try >= tries:
|
||||
raise
|
||||
else:
|
||||
diff --git a/salt/minion.py b/salt/minion.py
|
||||
index c3b65f16c3..9597d6e63a 100644
|
||||
--- a/salt/minion.py
|
||||
+++ b/salt/minion.py
|
||||
@@ -1361,30 +1361,11 @@ class Minion(MinionBase):
|
||||
"""
|
||||
Return a future which will complete when you are connected to a master
|
||||
"""
|
||||
- # Consider refactoring so that eval_master does not have a subtle side-effect on the contents of the opts array
|
||||
master, self.pub_channel = yield self.eval_master(
|
||||
self.opts, self.timeout, self.safe, failed
|
||||
)
|
||||
-
|
||||
- # a long-running req channel
|
||||
- self.req_channel = salt.transport.client.AsyncReqChannel.factory(
|
||||
- self.opts, io_loop=self.io_loop
|
||||
- )
|
||||
-
|
||||
- if hasattr(
|
||||
- self.req_channel, "connect"
|
||||
- ): # TODO: consider generalizing this for all channels
|
||||
- log.debug("Connecting minion's long-running req channel")
|
||||
- yield self.req_channel.connect()
|
||||
-
|
||||
yield self._post_master_init(master)
|
||||
|
||||
- @salt.ext.tornado.gen.coroutine
|
||||
- def handle_payload(self, payload, reply_func):
|
||||
- self.payloads.append(payload)
|
||||
- yield reply_func(payload)
|
||||
- self.payload_ack.notify()
|
||||
-
|
||||
# TODO: better name...
|
||||
@salt.ext.tornado.gen.coroutine
|
||||
def _post_master_init(self, master):
|
||||
@@ -1599,6 +1580,7 @@ class Minion(MinionBase):
|
||||
return functions, returners, errors, executors
|
||||
|
||||
def _send_req_sync(self, load, timeout):
|
||||
+
|
||||
if self.opts["minion_sign_messages"]:
|
||||
log.trace("Signing event to be published onto the bus.")
|
||||
minion_privkey_path = os.path.join(self.opts["pki_dir"], "minion.pem")
|
||||
@@ -1607,11 +1589,9 @@ class Minion(MinionBase):
|
||||
)
|
||||
load["sig"] = sig
|
||||
|
||||
- with salt.utils.event.get_event(
|
||||
- "minion", opts=self.opts, listen=False
|
||||
- ) as event:
|
||||
- return event.fire_event(
|
||||
- load, "__master_req_channel_payload", timeout=timeout
|
||||
+ with salt.channel.client.ReqChannel.factory(self.opts) as channel:
|
||||
+ return channel.send(
|
||||
+ load, timeout=timeout, tries=self.opts["return_retry_tries"]
|
||||
)
|
||||
|
||||
@salt.ext.tornado.gen.coroutine
|
||||
@@ -1624,11 +1604,9 @@ class Minion(MinionBase):
|
||||
)
|
||||
load["sig"] = sig
|
||||
|
||||
- with salt.utils.event.get_event(
|
||||
- "minion", opts=self.opts, listen=False
|
||||
- ) as event:
|
||||
- ret = yield event.fire_event_async(
|
||||
- load, "__master_req_channel_payload", timeout=timeout
|
||||
+ with salt.channel.client.AsyncReqChannel.factory(self.opts) as channel:
|
||||
+ ret = yield channel.send(
|
||||
+ load, timeout=timeout, tries=self.opts["return_retry_tries"]
|
||||
)
|
||||
raise salt.ext.tornado.gen.Return(ret)
|
||||
|
||||
@@ -2055,7 +2033,7 @@ class Minion(MinionBase):
|
||||
minion_instance._return_pub(ret)
|
||||
|
||||
# Add default returners from minion config
|
||||
- # Should have been converted to comma-delimited string already
|
||||
+ # Should have been coverted to comma-delimited string already
|
||||
if isinstance(opts.get("return"), str):
|
||||
if data["ret"]:
|
||||
data["ret"] = ",".join((data["ret"], opts["return"]))
|
||||
@@ -2662,7 +2640,6 @@ class Minion(MinionBase):
|
||||
"""
|
||||
Send mine data to the master
|
||||
"""
|
||||
- # Consider using a long-running req channel to send mine data
|
||||
with salt.channel.client.ReqChannel.factory(self.opts) as channel:
|
||||
data["tok"] = self.tok
|
||||
try:
|
||||
@@ -2699,12 +2676,6 @@ class Minion(MinionBase):
|
||||
force_refresh=data.get("force_refresh", False),
|
||||
notify=data.get("notify", False),
|
||||
)
|
||||
- elif tag.startswith("__master_req_channel_payload"):
|
||||
- yield _minion.req_channel.send(
|
||||
- data,
|
||||
- timeout=_minion._return_retry_timer(),
|
||||
- tries=_minion.opts["return_retry_tries"],
|
||||
- )
|
||||
elif tag.startswith("pillar_refresh"):
|
||||
yield _minion.pillar_refresh(
|
||||
force_refresh=data.get("force_refresh", False),
|
||||
@@ -3175,7 +3146,7 @@ class Minion(MinionBase):
|
||||
if self._target_load(payload["load"]):
|
||||
self._handle_decoded_payload(payload["load"])
|
||||
elif self.opts["zmq_filtering"]:
|
||||
- # In the filtering enabled case, we'd like to know when minion sees something it shouldn't
|
||||
+ # In the filtering enabled case, we'd like to know when minion sees something it shouldnt
|
||||
log.trace(
|
||||
"Broadcast message received not for this minion, Load: %s",
|
||||
payload["load"],
|
||||
diff --git a/salt/transport/ipc.py b/salt/transport/ipc.py
|
||||
index 3a3f0c7a5f..cee100b086 100644
|
||||
--- a/salt/transport/ipc.py
|
||||
+++ b/salt/transport/ipc.py
|
||||
@@ -208,10 +208,7 @@ class IPCServer:
|
||||
log.error("Exception occurred while handling stream: %s", exc)
|
||||
|
||||
def handle_connection(self, connection, address):
|
||||
- log.trace(
|
||||
- "IPCServer: Handling connection to address: %s",
|
||||
- address if address else connection,
|
||||
- )
|
||||
+ log.trace("IPCServer: Handling connection to address: %s", address)
|
||||
try:
|
||||
with salt.utils.asynchronous.current_ioloop(self.io_loop):
|
||||
stream = IOStream(
|
||||
diff --git a/salt/utils/asynchronous.py b/salt/utils/asynchronous.py
|
||||
index 0c645bbc3b..88596a4a20 100644
|
||||
--- a/salt/utils/asynchronous.py
|
||||
+++ b/salt/utils/asynchronous.py
|
||||
@@ -34,7 +34,7 @@ class SyncWrapper:
|
||||
This is uses as a simple wrapper, for example:
|
||||
|
||||
asynchronous = AsyncClass()
|
||||
- # this method would regularly return a future
|
||||
+ # this method would reguarly return a future
|
||||
future = asynchronous.async_method()
|
||||
|
||||
sync = SyncWrapper(async_factory_method, (arg1, arg2), {'kwarg1': 'val'})
|
||||
diff --git a/tests/pytests/functional/transport/server/test_req_channel.py b/tests/pytests/functional/transport/server/test_req_channel.py
|
||||
index 4a74802a0d..555c040c1c 100644
|
||||
--- a/tests/pytests/functional/transport/server/test_req_channel.py
|
||||
+++ b/tests/pytests/functional/transport/server/test_req_channel.py
|
||||
@@ -124,7 +124,7 @@ def req_channel_crypt(request):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
-def push_channel(req_server_channel, salt_minion, req_channel_crypt):
|
||||
+def req_channel(req_server_channel, salt_minion, req_channel_crypt):
|
||||
with salt.channel.client.ReqChannel.factory(
|
||||
salt_minion.config, crypt=req_channel_crypt
|
||||
) as _req_channel:
|
||||
@@ -135,7 +135,7 @@ def push_channel(req_server_channel, salt_minion, req_channel_crypt):
|
||||
_req_channel.obj._refcount = 0
|
||||
|
||||
|
||||
-def test_basic(push_channel):
|
||||
+def test_basic(req_channel):
|
||||
"""
|
||||
Test a variety of messages, make sure we get the expected responses
|
||||
"""
|
||||
@@ -145,11 +145,11 @@ def test_basic(push_channel):
|
||||
{"baz": "qux", "list": [1, 2, 3]},
|
||||
]
|
||||
for msg in msgs:
|
||||
- ret = push_channel.send(dict(msg), timeout=5, tries=1)
|
||||
+ ret = req_channel.send(dict(msg), timeout=5, tries=1)
|
||||
assert ret["load"] == msg
|
||||
|
||||
|
||||
-def test_normalization(push_channel):
|
||||
+def test_normalization(req_channel):
|
||||
"""
|
||||
Since we use msgpack, we need to test that list types are converted to lists
|
||||
"""
|
||||
@@ -160,21 +160,21 @@ def test_normalization(push_channel):
|
||||
{"list": tuple([1, 2, 3])},
|
||||
]
|
||||
for msg in msgs:
|
||||
- ret = push_channel.send(msg, timeout=5, tries=1)
|
||||
+ ret = req_channel.send(msg, timeout=5, tries=1)
|
||||
for key, value in ret["load"].items():
|
||||
assert types[key] == type(value)
|
||||
|
||||
|
||||
-def test_badload(push_channel, req_channel_crypt):
|
||||
+def test_badload(req_channel, req_channel_crypt):
|
||||
"""
|
||||
Test a variety of bad requests, make sure that we get some sort of error
|
||||
"""
|
||||
msgs = ["", [], tuple()]
|
||||
if req_channel_crypt == "clear":
|
||||
for msg in msgs:
|
||||
- ret = push_channel.send(msg, timeout=5, tries=1)
|
||||
+ ret = req_channel.send(msg, timeout=5, tries=1)
|
||||
assert ret == "payload and load must be a dict"
|
||||
else:
|
||||
for msg in msgs:
|
||||
with pytest.raises(salt.exceptions.AuthenticationError):
|
||||
- push_channel.send(msg, timeout=5, tries=1)
|
||||
+ req_channel.send(msg, timeout=5, tries=1)
|
||||
diff --git a/tests/pytests/unit/test_minion.py b/tests/pytests/unit/test_minion.py
|
||||
index 1cee025a48..4508eaee95 100644
|
||||
--- a/tests/pytests/unit/test_minion.py
|
||||
+++ b/tests/pytests/unit/test_minion.py
|
||||
@@ -55,27 +55,26 @@ def test_minion_load_grains_default():
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
- "event",
|
||||
+ "req_channel",
|
||||
[
|
||||
(
|
||||
- "fire_event",
|
||||
- lambda data, tag, cb=None, timeout=60: True,
|
||||
+ "salt.channel.client.AsyncReqChannel.factory",
|
||||
+ lambda load, timeout, tries: salt.ext.tornado.gen.maybe_future(tries),
|
||||
),
|
||||
(
|
||||
- "fire_event_async",
|
||||
- lambda data, tag, cb=None, timeout=60: salt.ext.tornado.gen.maybe_future(
|
||||
- True
|
||||
- ),
|
||||
+ "salt.channel.client.ReqChannel.factory",
|
||||
+ lambda load, timeout, tries: tries,
|
||||
),
|
||||
],
|
||||
)
|
||||
-def test_send_req_fires_completion_event(event, minion_opts):
|
||||
- event_enter = MagicMock()
|
||||
- event_enter.send.side_effect = event[1]
|
||||
- event = MagicMock()
|
||||
- event.__enter__.return_value = event_enter
|
||||
+def test_send_req_tries(req_channel, minion_opts):
|
||||
+ channel_enter = MagicMock()
|
||||
+ channel_enter.send.side_effect = req_channel[1]
|
||||
+ channel = MagicMock()
|
||||
+ channel.__enter__.return_value = channel_enter
|
||||
|
||||
- with patch("salt.utils.event.get_event", return_value=event):
|
||||
+ with patch(req_channel[0], return_value=channel):
|
||||
+ minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
minion_opts["random_startup_delay"] = 0
|
||||
minion_opts["return_retry_tries"] = 30
|
||||
minion_opts["grains"] = {}
|
||||
@@ -85,62 +84,16 @@ def test_send_req_fires_completion_event(event, minion_opts):
|
||||
load = {"load": "value"}
|
||||
timeout = 60
|
||||
|
||||
- # XXX This is buggy because "async" in event[0] will never evaluate
|
||||
- # to True and if it *did* evaluate to true the test would fail
|
||||
- # because you Mock isn't a co-routine.
|
||||
- if "async" in event[0]:
|
||||
+ if "Async" in req_channel[0]:
|
||||
rtn = minion._send_req_async(load, timeout).result()
|
||||
else:
|
||||
rtn = minion._send_req_sync(load, timeout)
|
||||
|
||||
- # get the
|
||||
- for idx, call in enumerate(event.mock_calls, 1):
|
||||
- if "fire_event" in call[0]:
|
||||
- condition_event_tag = (
|
||||
- len(call.args) > 1
|
||||
- and call.args[1] == "__master_req_channel_payload"
|
||||
- )
|
||||
- condition_event_tag_error = "{} != {}; Call(number={}): {}".format(
|
||||
- idx, call, call.args[1], "__master_req_channel_payload"
|
||||
- )
|
||||
- condition_timeout = (
|
||||
- len(call.kwargs) == 1 and call.kwargs["timeout"] == timeout
|
||||
- )
|
||||
- condition_timeout_error = "{} != {}; Call(number={}): {}".format(
|
||||
- idx, call, call.kwargs["timeout"], timeout
|
||||
- )
|
||||
-
|
||||
- fire_event_called = True
|
||||
- assert condition_event_tag, condition_event_tag_error
|
||||
- assert condition_timeout, condition_timeout_error
|
||||
-
|
||||
- assert fire_event_called
|
||||
- assert rtn
|
||||
-
|
||||
-
|
||||
-async def test_send_req_async_regression_62453(minion_opts):
|
||||
- event_enter = MagicMock()
|
||||
- event_enter.send.side_effect = (
|
||||
- lambda data, tag, cb=None, timeout=60: salt.ext.tornado.gen.maybe_future(True)
|
||||
- )
|
||||
- event = MagicMock()
|
||||
- event.__enter__.return_value = event_enter
|
||||
-
|
||||
- minion_opts["random_startup_delay"] = 0
|
||||
- minion_opts["return_retry_tries"] = 30
|
||||
- minion_opts["grains"] = {}
|
||||
- with patch("salt.loader.grains"):
|
||||
- minion = salt.minion.Minion(minion_opts)
|
||||
-
|
||||
- load = {"load": "value"}
|
||||
- timeout = 60
|
||||
-
|
||||
- # We are just validating no exception is raised
|
||||
- rtn = await minion._send_req_async(load, timeout)
|
||||
- assert rtn is False
|
||||
+ assert rtn == 30
|
||||
|
||||
|
||||
-def test_mine_send_tries():
|
||||
+@patch("salt.channel.client.ReqChannel.factory")
|
||||
+def test_mine_send_tries(req_channel_factory):
|
||||
channel_enter = MagicMock()
|
||||
channel_enter.send.side_effect = lambda load, timeout, tries: tries
|
||||
channel = MagicMock()
|
||||
--
|
||||
2.41.0
|
||||
|
21
salt.changes
21
salt.changes
@ -1,8 +1,27 @@
|
||||
-------------------------------------------------------------------
|
||||
Fri Sep 1 07:33:18 UTC 2023 - Victor Zhestkov <vzhestkov@suse.com>
|
||||
|
||||
- Revert usage of long running REQ channel to prevent possible
|
||||
missing responses on requests and dublicated responses
|
||||
(bsc#1213960, bsc#1213630, bsc#1213257)
|
||||
|
||||
- Added:
|
||||
* revert-usage-of-long-running-req-channel-bsc-1213960.patch
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Thu Aug 31 09:51:20 UTC 2023 - Pablo Suárez Hernández <pablo.suarezhernandez@suse.com>
|
||||
|
||||
- Fix gitfs cachedir basename to avoid hash collisions
|
||||
(bsc#1193948, bsc#1214797, CVE-2023-20898)
|
||||
|
||||
- Added:
|
||||
* fixed-gitfs-cachedir_basename-to-avoid-hash-collisio.patch
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Tue Aug 22 12:03:21 UTC 2023 - Pablo Suárez Hernández <pablo.suarezhernandez@suse.com>
|
||||
|
||||
- Make sure configured user is properly set by Salt (bsc#1210994)
|
||||
- Do not fail on bad message pack message (bsc#1213441, CVE-2023-20897)
|
||||
- Do not fail on bad message pack message (bsc#1213441, CVE-2023-20897, bsc#1214796)
|
||||
- Fix broken tests to make them running in the testsuite
|
||||
- Prevent possible exceptions on salt.utils.user.get_group_dict (bsc#1212794)
|
||||
|
||||
|
50
salt.spec
50
salt.spec
@ -249,55 +249,59 @@ Patch53: add-support-for-gpgautoimport-539.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62519
|
||||
Patch54: change-the-delimeters-to-prevent-possible-tracebacks.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62898
|
||||
Patch55: pass-the-context-to-pillar-ext-modules.patch
|
||||
Patch55: pass-the-context-to-pillar-ext-modules.patch
|
||||
# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/c6be36eeea49ee0d0641da272087305f79c32c99 (not yet upstream)
|
||||
# Fix problem caused by: https://github.com/openSUSE/salt/pull/493 (Patch47) affecting only 3005.1.
|
||||
Patch56: use-rlock-to-avoid-deadlocks-in-salt-ssh.patch
|
||||
Patch56: use-rlock-to-avoid-deadlocks-in-salt-ssh.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61064
|
||||
# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/5e3ff4d662321c237ddd5b2c5c83f35a84af594c (not PR to master yet)
|
||||
Patch57: fixes-for-python-3.10-502.patch
|
||||
Patch57: fixes-for-python-3.10-502.patch
|
||||
# PATCH-FIX-OPENSUSE: https://github.com/openSUSE/salt/pull/571
|
||||
Patch58: control-the-collection-of-lvm-grains-via-config.patch
|
||||
Patch58: control-the-collection-of-lvm-grains-via-config.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/63460
|
||||
Patch59: 3005.1-implement-zypper-removeptf-573.patch
|
||||
Patch59: 3005.1-implement-zypper-removeptf-573.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/63460
|
||||
# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/578
|
||||
Patch60: skip-package-names-without-colon-bsc-1208691-578.patch
|
||||
Patch60: skip-package-names-without-colon-bsc-1208691-578.patch
|
||||
# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/c0fae09e5a4f6997a60007d970c7c6a5614d9102
|
||||
Patch61: fix-version-detection-and-avoid-building-and-testing.patch
|
||||
Patch61: fix-version-detection-and-avoid-building-and-testing.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/64113
|
||||
Patch62: make-sure-the-file-client-is-destroyed-upon-used.patch
|
||||
Patch62: make-sure-the-file-client-is-destroyed-upon-used.patch
|
||||
# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/581
|
||||
Patch63: avoid-conflicts-with-dependencies-versions-bsc-12116.patch
|
||||
Patch63: avoid-conflicts-with-dependencies-versions-bsc-12116.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/64369
|
||||
Patch64: define-__virtualname__-for-transactional_update-modu.patch
|
||||
Patch64: define-__virtualname__-for-transactional_update-modu.patch
|
||||
# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/587
|
||||
Patch65: make-master_tops-compatible-with-salt-3000-and-older.patch
|
||||
Patch65: make-master_tops-compatible-with-salt-3000-and-older.patch
|
||||
# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/583
|
||||
Patch66: tornado-fix-an-open-redirect-in-staticfilehandler-cv.patch
|
||||
Patch66: tornado-fix-an-open-redirect-in-staticfilehandler-cv.patch
|
||||
# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/580
|
||||
Patch67: fix-some-issues-detected-in-salt-support-cli-module-.patch
|
||||
Patch67: fix-some-issues-detected-in-salt-support-cli-module-.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/64510
|
||||
Patch68: 3006.0-prevent-_pygit2.giterror-error-loading-known_.patch
|
||||
Patch68: 3006.0-prevent-_pygit2.giterror-error-loading-known_.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/64300
|
||||
Patch69: fix-utf8-handling-in-pass-renderer-and-make-it-more-.patch
|
||||
Patch69: fix-utf8-handling-in-pass-renderer-and-make-it-more-.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/63403
|
||||
Patch70: zypper-pkgrepo-alreadyconfigured-585.patch
|
||||
Patch70: zypper-pkgrepo-alreadyconfigured-585.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/64530
|
||||
Patch71: fix-the-regression-of-user.present-state-when-group-.patch
|
||||
Patch71: fix-the-regression-of-user.present-state-when-group-.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/64179
|
||||
Patch72: fix-regression-multiple-values-for-keyword-argument-.patch
|
||||
Patch72: fix-regression-multiple-values-for-keyword-argument-.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/64554
|
||||
Patch73: mark-salt-3006-as-released-586.patch
|
||||
Patch73: mark-salt-3006-as-released-586.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/64599
|
||||
Patch74: prevent-possible-exceptions-on-salt.utils.user.get_g.patch
|
||||
Patch74: prevent-possible-exceptions-on-salt.utils.user.get_g.patch
|
||||
# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/592
|
||||
Patch75: fix-tests-to-make-them-running-with-salt-testsuite.patch
|
||||
Patch75: fix-tests-to-make-them-running-with-salt-testsuite.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/commit/f82860b8ad3ee786762fa02fa1a6eaf6e24dc8d4
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/65020
|
||||
Patch76: do-not-fail-on-bad-message-pack-message-bsc-1213441-.patch
|
||||
Patch76: do-not-fail-on-bad-message-pack-message-bsc-1213441-.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/64510
|
||||
Patch77: make-sure-configured-user-is-properly-set-by-salt-bs.patch
|
||||
Patch77: make-sure-configured-user-is-properly-set-by-salt-bs.patch
|
||||
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/64959
|
||||
Patch78: fixed-gitfs-cachedir_basename-to-avoid-hash-collisio.patch
|
||||
# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/600
|
||||
Patch79: revert-usage-of-long-running-req-channel-bsc-1213960.patch
|
||||
|
||||
### IMPORTANT: The line below is used as a snippet marker. Do not touch it.
|
||||
### SALT PATCHES LIST END
|
||||
|
Loading…
Reference in New Issue
Block a user