Accepting request 862930 from systemsmanagement:saltstack

- Remove deprecated warning that breaks minion execution when "server_id_use_crc" opts is missing
- Added:
  * remove-deprecated-warning-that-breaks-miniion-execut.patch

- Revert wrong zypper patch to support vendorchanges flags on pkg.install
- Added:
  * revert-add-patch-support-for-allow-vendor-change-opt.patch

- Force zyppnotify to prefer Packages.db than Packages if it exists
- Allow vendor change option with zypper
- Add pkg.services_need_restart
- Fix for file.check_perms to work with numeric uid/gid
- Added:
  * force-zyppnotify-to-prefer-packages.db-than-packages.patch
  * fix-salt.utils.stringutils.to_str-calls-to-make-it-w.patch
  * add-patch-support-for-allow-vendor-change-option-wit.patch
  * add-pkg.services_need_restart-302.patch

- virt: more network support
  Add more network and PCI/USB host devices passthrough support
  to virt module and states
- Added:
  * open-suse-3002.2-virt-network-311.patch

- Bigvm backports
  virt consoles, CPU tuning and topology, and memory tuning.
- Added:
  * open-suse-3002.2-bigvm-310.patch

- Fix pkg states when DEB package has "all" arch

OBS-URL: https://build.opensuse.org/request/show/862930
OBS-URL: https://build.opensuse.org/package/show/openSUSE:Factory/salt?expand=0&rev=113
This commit is contained in:
Dominique Leuenberger 2021-01-15 18:45:08 +00:00 committed by Git OBS Bridge
commit e6e8c978db
168 changed files with 35403 additions and 47929 deletions

View File

@ -1 +1 @@
1a73678e768b896323b9d2d1f903a400e48e51e1
73673e4ab1d13c4393183b8ad6066dfab39c7e63

View File

@ -3,7 +3,7 @@
<param name="url">https://github.com/openSUSE/salt-packaging.git</param>
<param name="subdir">salt</param>
<param name="filename">package</param>
<param name="revision">3000.3</param>
<param name="revision">3002.2</param>
<param name="scm">git</param>
</service>
<service name="extract_file" mode="disabled">
@ -12,8 +12,8 @@
</service>
<service name="download_url" mode="disabled">
<param name="host">codeload.github.com</param>
<param name="path">openSUSE/salt/tar.gz/v3000.3-suse</param>
<param name="filename">v3000.3.tar.gz</param>
<param name="path">openSUSE/salt/tar.gz/v3002.2-suse</param>
<param name="filename">v3002.2.tar.gz</param>
</service>
<service name="update_changelog" mode="disabled"></service>
</services>

View File

@ -1,4 +1,4 @@
From 951d2a385a40c5322155f952e08430e8402bfbde Mon Sep 17 00:00:00 2001
From 828650500159fd7040d2fa76b2fc4d2b627f7065 Mon Sep 17 00:00:00 2001
From: Alberto Planas <aplanas@gmail.com>
Date: Tue, 22 Oct 2019 11:02:33 +0200
Subject: [PATCH] Accumulated changes from Yomi (#167)
@ -17,190 +17,207 @@ This patch ignore this kind of issue during the grains creation.
(cherry picked from commit b865491b74679140f7a71c5ba50d482db47b600f)
---
salt/grains/core.py | 4 +++
salt/modules/zypperpkg.py | 30 +++++++++++-----
tests/unit/grains/test_core.py | 68 ++++++++++++++++++++++++++++++++++++
tests/unit/modules/test_zypperpkg.py | 26 ++++++++++++++
4 files changed, 119 insertions(+), 9 deletions(-)
salt/grains/core.py | 6 +--
salt/modules/zypperpkg.py | 22 ----------
tests/unit/grains/test_core.py | 64 +++++++++++++++++++++++++++-
tests/unit/modules/test_zypperpkg.py | 38 +++++++++++++++++
4 files changed, 103 insertions(+), 27 deletions(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 77ae99590f..68c43482d3 100644
index 0dc1d97f97..a2983e388b 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -997,6 +997,10 @@ def _virtual(osdata):
grains['virtual'] = 'gce'
elif 'BHYVE' in output:
grains['virtual'] = 'bhyve'
+ except UnicodeDecodeError:
+ # Some firmwares provide non-valid 'product_name'
+ # files, ignore them
@@ -1046,7 +1046,7 @@ def _virtual(osdata):
if os.path.isfile("/sys/devices/virtual/dmi/id/product_name"):
try:
with salt.utils.files.fopen(
- "/sys/devices/virtual/dmi/id/product_name", "rb"
+ "/sys/devices/virtual/dmi/id/product_name", "r"
) as fhr:
output = salt.utils.stringutils.to_unicode(
fhr.read(), errors="replace"
@@ -1066,9 +1066,7 @@ def _virtual(osdata):
except UnicodeDecodeError:
# Some firmwares provide non-valid 'product_name'
# files, ignore them
- log.debug(
- "The content in /sys/devices/virtual/dmi/id/product_name is not valid"
- )
+ pass
except IOError:
except OSError:
pass
elif osdata['kernel'] == 'FreeBSD':
elif osdata["kernel"] == "FreeBSD":
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index f7158e0810..5f3b6d6855 100644
index 2daec0f380..b5621174a4 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -863,23 +863,35 @@ def list_pkgs(versions_as_list=False, root=None, includes=None, **kwargs):
_ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version'])
@@ -958,28 +958,6 @@ def list_pkgs(versions_as_list=False, root=None, includes=None, **kwargs):
}
]
for include in includes:
+ if include == 'product':
+ products = list_products(all=False, root=root)
+ for product in products:
+ extended_name = '{}:{}'.format(include, product['name'])
+ _ret[extended_name] = [{
+ 'epoch': product['epoch'],
+ 'version': product['version'],
+ 'release': product['release'],
+ 'arch': product['arch'],
+ 'install_date': None,
+ 'install_date_time_t': None,
+ }]
if include in ('pattern', 'patch'):
if include == 'pattern':
- for include in includes:
- if include in ("pattern", "patch"):
- if include == "pattern":
- pkgs = list_installed_patterns(root=root)
+ elements = list_installed_patterns(root=root)
elif include == 'patch':
- elif include == "patch":
- pkgs = list_installed_patches(root=root)
+ elements = list_installed_patches(root=root)
else:
- else:
- pkgs = []
- for pkg in pkgs:
- pkg_extended_name = '{}:{}'.format(include, pkg)
- info = info_available(pkg_extended_name,
+ elements = []
+ for element in elements:
+ extended_name = '{}:{}'.format(include, element)
+ info = info_available(extended_name,
refresh=False,
root=root)
- _ret[pkg_extended_name] = [{
+ _ret[extended_name] = [{
'epoch': None,
- 'version': info[pkg]['version'],
+ 'version': info[element]['version'],
'release': None,
- 'arch': info[pkg]['arch'],
+ 'arch': info[element]['arch'],
'install_date': None,
'install_date_time_t': None,
}]
- pkg_extended_name = "{}:{}".format(include, pkg)
- info = info_available(pkg_extended_name, refresh=False, root=root)
- _ret[pkg_extended_name] = [
- {
- "epoch": None,
- "version": info[pkg]["version"],
- "release": None,
- "arch": info[pkg]["arch"],
- "install_date": None,
- "install_date_time_t": None,
- }
- ]
-
__context__[contextkey] = _ret
return __salt__["pkg_resource.format_pkg_list"](
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index e722bfab5b..33d6a9507f 100644
index a5ceeb8317..0dc3423646 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -1559,3 +1559,71 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
assert len(info) == 2
assert all([x is not None for x in info])
assert all([isinstance(x, int) for x in info])
+
+ @skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
@@ -2047,13 +2047,74 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
result = core.path()
assert result == {"path": path, "systempath": comps}, result
+ @skipIf(not salt.utils.platform.is_linux(), "System is not Linux")
+ @patch("os.path.exists")
+ @patch("salt.utils.platform.is_proxy")
+ def test_kernelparams_return(self):
+ expectations = [
+ ('BOOT_IMAGE=/vmlinuz-3.10.0-693.2.2.el7.x86_64',
+ {'kernelparams': [('BOOT_IMAGE', '/vmlinuz-3.10.0-693.2.2.el7.x86_64')]}),
+ ('root=/dev/mapper/centos_daemon-root',
+ {'kernelparams': [('root', '/dev/mapper/centos_daemon-root')]}),
+ ('rhgb quiet ro',
+ {'kernelparams': [('rhgb', None), ('quiet', None), ('ro', None)]}),
+ ('param="value1"',
+ {'kernelparams': [('param', 'value1')]}),
+ ('param="value1 value2 value3"',
+ {'kernelparams': [('param', 'value1 value2 value3')]}),
+ ('param="value1 value2 value3" LANG="pl" ro',
+ {'kernelparams': [('param', 'value1 value2 value3'), ('LANG', 'pl'), ('ro', None)]}),
+ ('ipv6.disable=1',
+ {'kernelparams': [('ipv6.disable', '1')]}),
+ ('param="value1:value2:value3"',
+ {'kernelparams': [('param', 'value1:value2:value3')]}),
+ ('param="value1,value2,value3"',
+ {'kernelparams': [('param', 'value1,value2,value3')]}),
+ ('param="value1" param="value2" param="value3"',
+ {'kernelparams': [('param', 'value1'), ('param', 'value2'), ('param', 'value3')]}),
+ (
+ "BOOT_IMAGE=/vmlinuz-3.10.0-693.2.2.el7.x86_64",
+ {
+ "kernelparams": [
+ ("BOOT_IMAGE", "/vmlinuz-3.10.0-693.2.2.el7.x86_64")
+ ]
+ },
+ ),
+ (
+ "root=/dev/mapper/centos_daemon-root",
+ {"kernelparams": [("root", "/dev/mapper/centos_daemon-root")]},
+ ),
+ (
+ "rhgb quiet ro",
+ {"kernelparams": [("rhgb", None), ("quiet", None), ("ro", None)]},
+ ),
+ ('param="value1"', {"kernelparams": [("param", "value1")]}),
+ (
+ 'param="value1 value2 value3"',
+ {"kernelparams": [("param", "value1 value2 value3")]},
+ ),
+ (
+ 'param="value1 value2 value3" LANG="pl" ro',
+ {
+ "kernelparams": [
+ ("param", "value1 value2 value3"),
+ ("LANG", "pl"),
+ ("ro", None),
+ ]
+ },
+ ),
+ ("ipv6.disable=1", {"kernelparams": [("ipv6.disable", "1")]}),
+ (
+ 'param="value1:value2:value3"',
+ {"kernelparams": [("param", "value1:value2:value3")]},
+ ),
+ (
+ 'param="value1,value2,value3"',
+ {"kernelparams": [("param", "value1,value2,value3")]},
+ ),
+ (
+ 'param="value1" param="value2" param="value3"',
+ {
+ "kernelparams": [
+ ("param", "value1"),
+ ("param", "value2"),
+ ("param", "value3"),
+ ]
+ },
+ ),
+ ]
+
+ for cmdline, expectation in expectations:
+ with patch('salt.utils.files.fopen', mock_open(read_data=cmdline)):
+ with patch("salt.utils.files.fopen", mock_open(read_data=cmdline)):
+ self.assertEqual(core.kernelparams(), expectation)
+
+ @skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
+ @patch('os.path.exists')
+ @patch('salt.utils.platform.is_proxy')
+ def test__hw_data_linux_empty(self, is_proxy, exists):
+ is_proxy.return_value = False
+ exists.return_value = True
+ with patch('salt.utils.files.fopen', mock_open(read_data='')):
+ self.assertEqual(core._hw_data({'kernel': 'Linux'}), {
+ 'biosreleasedate': '',
+ 'biosversion': '',
+ 'manufacturer': '',
+ 'productname': '',
+ 'serialnumber': '',
+ 'uuid': ''
+ })
+
+ @skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
+ @skipIf(six.PY2, 'UnicodeDecodeError is throw in Python 3')
+ @patch('os.path.exists')
+ @patch('salt.utils.platform.is_proxy')
+ def test__hw_data_linux_unicode_error(self, is_proxy, exists):
+ def _fopen(*args):
+ class _File(object):
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ pass
+
+ def read(self):
+ raise UnicodeDecodeError('enconding', b'', 1, 2, 'reason')
+
+ return _File()
+
+ is_proxy.return_value = False
+ exists.return_value = True
+ with patch('salt.utils.files.fopen', _fopen):
+ self.assertEqual(core._hw_data({'kernel': 'Linux'}), {})
@skipIf(not salt.utils.platform.is_linux(), "System is not Linux")
@patch("os.path.exists")
@patch("salt.utils.platform.is_proxy")
def test__hw_data_linux_empty(self, is_proxy, exists):
is_proxy.return_value = False
exists.return_value = True
- with patch("salt.utils.files.fopen", mock_open(read_data=b"")):
+ with patch("salt.utils.files.fopen", mock_open(read_data="")):
self.assertEqual(
core._hw_data({"kernel": "Linux"}),
{
@@ -2067,6 +2128,7 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
)
@skipIf(not salt.utils.platform.is_linux(), "System is not Linux")
+ @skipIf(six.PY2, "UnicodeDecodeError is throw in Python 3")
@patch("os.path.exists")
@patch("salt.utils.platform.is_proxy")
def test__hw_data_linux_unicode_error(self, is_proxy, exists):
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index 6102043384..76937cc358 100644
index 5d4e7766b6..1b62122e0e 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -944,6 +944,32 @@ Repository 'DUMMY' not found by its alias, number, or URI.
with self.assertRaisesRegex(CommandExecutionError, '^Advisory id "SUSE-PATCH-XXX" not found$'):
zypper.install(advisory_ids=['SUSE-PATCH-XXX'])
@@ -1424,6 +1424,44 @@ Repository 'DUMMY' not found by its alias, number, or URI.
ret, {"product:openSUSE": {"old": "15.2", "new": "15.3"}}
)
+ @patch('salt.modules.zypperpkg._systemd_scope',
+ MagicMock(return_value=False))
+ @patch('salt.modules.zypperpkg.list_products',
+ MagicMock(return_value={'openSUSE': {'installed': False, 'summary': 'test'}}))
+ @patch('salt.modules.zypperpkg.list_pkgs', MagicMock(side_effect=[{"product:openSUSE": "15.2"},
+ {"product:openSUSE": "15.3"}]))
+ @patch("salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False))
+ @patch(
+ "salt.modules.zypperpkg.list_products",
+ MagicMock(return_value={"openSUSE": {"installed": False, "summary": "test"}}),
+ )
+ @patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(
+ side_effect=[{"product:openSUSE": "15.2"}, {"product:openSUSE": "15.3"}]
+ ),
+ )
+ def test_install_product_ok(self):
+ '''
+ """
+ Test successfully product installation.
+ '''
+ with patch.dict(zypper.__salt__,
+ {
+ 'pkg_resource.parse_targets': MagicMock(
+ return_value=(['product:openSUSE'], None))
+ }):
+ with patch('salt.modules.zypperpkg.__zypper__.noraise.call', MagicMock()) as zypper_mock:
+ ret = zypper.install('product:openSUSE', includes=['product'])
+ zypper_mock.assert_called_once_with(
+ '--no-refresh',
+ 'install',
+ '--auto-agree-with-licenses',
+ '--name',
+ 'product:openSUSE'
+ """
+ with patch.dict(
+ zypper.__salt__,
+ {
+ "pkg_resource.parse_targets": MagicMock(
+ return_value=(["product:openSUSE"], None)
+ )
+ },
+ ):
+ with patch(
+ "salt.modules.zypperpkg.__zypper__.noraise.call", MagicMock()
+ ) as zypper_mock:
+ ret = zypper.install("product:openSUSE", includes=["product"])
+ zypper_mock.assert_called_once_with(
+ "--no-refresh",
+ "install",
+ "--auto-agree-with-licenses",
+ "--name",
+ "product:openSUSE",
+ )
+ self.assertDictEqual(
+ ret, {"product:openSUSE": {"old": "15.2", "new": "15.3"}}
+ )
+ self.assertDictEqual(ret, {"product:openSUSE": {"old": "15.2", "new": "15.3"}})
+
def test_remove_purge(self):
'''
"""
Test package removal
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 9f29577b75cac1e79ec7c30a5dff0dff0ab9da3a Mon Sep 17 00:00:00 2001
From 7d35fdba84b6e1b62a3abc71e518366a35efb662 Mon Sep 17 00:00:00 2001
From: Alberto Planas <aplanas@gmail.com>
Date: Tue, 30 Jul 2019 11:23:12 +0200
Subject: [PATCH] Accumulated changes required for Yomi (#165)
@ -58,143 +58,60 @@ so the cached data will be separated too.
(cherry picked from commit 9c54bb3e8c93ba21fc583bdefbcadbe53cbcd7b5)
---
salt/modules/cmdmod.py | 12 +++++++++---
salt/modules/zypperpkg.py | 13 ++++++++++---
tests/unit/modules/test_cmdmod.py | 16 ++++++++++++++++
tests/unit/modules/test_zypperpkg.py | 21 +++++++++++++++++++++
4 files changed, 56 insertions(+), 6 deletions(-)
salt/modules/zypperpkg.py | 1 -
tests/unit/modules/test_zypperpkg.py | 22 +++++++++++++++++++++-
2 files changed, 21 insertions(+), 2 deletions(-)
diff --git a/salt/modules/cmdmod.py b/salt/modules/cmdmod.py
index eed7656a6d..0d2f720bbb 100644
--- a/salt/modules/cmdmod.py
+++ b/salt/modules/cmdmod.py
@@ -3094,13 +3094,19 @@ def run_chroot(root,
if isinstance(cmd, (list, tuple)):
cmd = ' '.join([six.text_type(i) for i in cmd])
- cmd = 'chroot {0} {1} -c {2}'.format(root, sh_, _cmd_quote(cmd))
+
+ # If runas and group are provided, we expect that the user lives
+ # inside the chroot, not outside.
+ if runas:
+ userspec = '--userspec {}:{}'.format(runas, group if group else '')
+ else:
+ userspec = ''
+
+ cmd = 'chroot {} {} {} -c {}'.format(userspec, root, sh_, _cmd_quote(cmd))
run_func = __context__.pop('cmd.run_chroot.func', run_all)
ret = run_func(cmd,
- runas=runas,
- group=group,
cwd=cwd,
stdin=stdin,
shell=shell,
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 3760b525e7..8179cd8c1d 100644
index c996935bff..b099f3e5d7 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -449,8 +449,14 @@ def _clean_cache():
'''
Clean cached results
'''
+ keys = []
for cache_name in ['pkg.list_pkgs', 'pkg.list_provides']:
- __context__.pop(cache_name, None)
+ for contextkey in __context__:
+ if contextkey.startswith(cache_name):
+ keys.append(contextkey)
+
+ for key in keys:
+ __context__.pop(key, None)
def list_upgrades(refresh=True, root=None, **kwargs):
@@ -811,9 +817,10 @@ def list_pkgs(versions_as_list=False, root=None, includes=None, **kwargs):
includes = includes if includes else []
- contextkey = 'pkg.list_pkgs'
+ # Results can be different if a different root or a different
+ # inclusion types are passed
+ contextkey = 'pkg.list_pkgs_{}_{}'.format(root, includes)
@@ -879,7 +879,6 @@ def list_pkgs(versions_as_list=False, root=None, includes=None, **kwargs):
# inclusion types are passed
contextkey = "pkg.list_pkgs_{}_{}".format(root, includes)
- # TODO(aplanas): this cached value depends on the parameters
if contextkey not in __context__:
ret = {}
cmd = ['rpm']
diff --git a/tests/unit/modules/test_cmdmod.py b/tests/unit/modules/test_cmdmod.py
index f8fba59294..8d763435f8 100644
--- a/tests/unit/modules/test_cmdmod.py
+++ b/tests/unit/modules/test_cmdmod.py
@@ -371,6 +371,22 @@ class CMDMODTestCase(TestCase, LoaderModuleMockMixin):
else:
raise RuntimeError
+ @skipIf(salt.utils.platform.is_windows(), 'Do not run on Windows')
+ @skipIf(salt.utils.platform.is_darwin(), 'Do not run on MacOS')
+ def test_run_cwd_in_combination_with_runas(self):
+ '''
+ cmd.run executes command in the cwd directory
+ when the runas parameter is specified
+ '''
+ cmd = 'pwd'
+ cwd = '/tmp'
+ runas = os.getlogin()
+
+ with patch.dict(cmdmod.__grains__, {'os': 'Darwin',
+ 'os_family': 'Solaris'}):
+ stdout = cmdmod._run(cmd, cwd=cwd, runas=runas).get('stdout')
+ self.assertEqual(stdout, cwd)
+
def test_run_all_binary_replace(self):
'''
Test for failed decoding of binary data, for instance when doing
cmd = ["rpm"]
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index 12c22bfcb2..6102043384 100644
index 032785395e..5d4e7766b6 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -571,6 +571,7 @@ Repository 'DUMMY' not found by its alias, number, or URI.
patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}):
pkgs = zypper.list_pkgs(versions_as_list=True)
self.assertFalse(pkgs.get('gpg-pubkey', False))
+ self.assertTrue('pkg.list_pkgs_None_[]' in zypper.__context__)
for pkg_name, pkg_version in {
'jakarta-commons-discovery': ['0.4-129.686'],
'yast2-ftp-server': ['3.1.8-8.1'],
@@ -613,6 +614,7 @@ Repository 'DUMMY' not found by its alias, number, or URI.
patch.dict(pkg_resource.__salt__, {'pkg.parse_arch_from_name': zypper.parse_arch_from_name}):
pkgs = zypper.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
self.assertFalse(pkgs.get('gpg-pubkey', False))
+ self.assertTrue('pkg.list_pkgs_None_[]' in zypper.__context__)
for pkg_name, pkg_attr in {
'jakarta-commons-discovery': [{
'version': '0.4',
@@ -1456,3 +1458,22 @@ pattern() = package-c'''),
'summary': 'description b',
},
@@ -912,7 +912,8 @@ Repository 'DUMMY' not found by its alias, number, or URI.
), patch.dict(
zypper.__salt__, {"pkg_resource.stringify": MagicMock()}
), patch.dict(
- pkg_resource.__salt__, {"pkg.parse_arch": zypper.parse_arch}
+ pkg_resource.__salt__,
+ {"pkg.parse_arch_from_name": zypper.parse_arch_from_name},
):
pkgs = zypper.list_pkgs(
attr=["epoch", "release", "arch", "install_date_time_t"]
@@ -1950,3 +1951,22 @@ pattern() = package-c"""
"package-a": {"installed": True, "summary": "description a",},
"package-b": {"installed": False, "summary": "description b",},
}
+
+ def test__clean_cache_empty(self):
+ '''Test that an empty cached can be cleaned'''
+ """Test that an empty cached can be cleaned"""
+ context = {}
+ with patch.dict(zypper.__context__, context):
+ zypper._clean_cache()
+ assert context == {}
+
+ def test__clean_cache_filled(self):
+ '''Test that a filled cached can be cleaned'''
+ """Test that a filled cached can be cleaned"""
+ context = {
+ 'pkg.list_pkgs_/mnt_[]': None,
+ 'pkg.list_pkgs_/mnt_[patterns]': None,
+ 'pkg.list_provides': None,
+ 'pkg.other_data': None,
+ "pkg.list_pkgs_/mnt_[]": None,
+ "pkg.list_pkgs_/mnt_[patterns]": None,
+ "pkg.list_provides": None,
+ "pkg.other_data": None,
+ }
+ with patch.dict(zypper.__context__, context):
+ zypper._clean_cache()
+ self.assertEqual(zypper.__context__, {'pkg.other_data': None})
+ self.assertEqual(zypper.__context__, {"pkg.other_data": None})
--
2.16.4
2.29.2

View File

@ -1,26 +1,28 @@
From 6df4cef549665aad5b9e2af50eb06124a2bb0997 Mon Sep 17 00:00:00 2001
From c44b897eb1305c6b9c341fc16f729d2293ab24e4 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 17 Oct 2017 16:52:33 +0200
Subject: [PATCH] Activate all beacons sources: config/pillar/grains
---
salt/minion.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
salt/minion.py | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/salt/minion.py b/salt/minion.py
index 6a77d90185..457f485b0a 100644
index c255f37c26..4da665a130 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -483,7 +483,7 @@ class MinionBase(object):
@@ -508,9 +508,7 @@ class MinionBase:
the pillar or grains changed
'''
if 'config.merge' in functions:
- b_conf = functions['config.merge']('beacons', self.opts['beacons'], omit_opts=True)
+ b_conf = functions['config.merge']('beacons', self.opts['beacons'])
"""
if "config.merge" in functions:
- b_conf = functions["config.merge"](
- "beacons", self.opts["beacons"], omit_opts=True
- )
+ b_conf = functions["config.merge"]("beacons", self.opts["beacons"])
if b_conf:
return self.beacons.process(b_conf, self.opts['grains']) # pylint: disable=no-member
return []
return self.beacons.process(
b_conf, self.opts["grains"]
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From cd66b1e6636013440577a38a5a68729fec2f3f99 Mon Sep 17 00:00:00 2001
From 2e300c770c227cf394929b7d5d025d5c52f1ae2c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 14 May 2018 11:33:13 +0100
@ -19,21 +19,119 @@ Refactor: use dict.setdefault instead if-else statement
Allow removing only specific package versions with zypper and yum
---
salt/states/pkg.py | 21 +++++++++++++++++++++
1 file changed, 21 insertions(+)
salt/states/pkg.py | 285 +++++++++++++++++++++++----------------------
1 file changed, 146 insertions(+), 139 deletions(-)
diff --git a/salt/states/pkg.py b/salt/states/pkg.py
index a13d418400..c0fa2f6b69 100644
index 51b5a06e8f..a1b2a122bb 100644
--- a/salt/states/pkg.py
+++ b/salt/states/pkg.py
@@ -450,6 +450,16 @@ def _find_remove_targets(name=None,
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
Installation of packages using OS package managers such as yum or apt-get
=========================================================================
@@ -71,21 +70,16 @@ state module
used. This will be addressed in a future release of Salt.
"""
if __grains__['os'] == 'FreeBSD' and origin:
cver = [k for k, v in six.iteritems(cur_pkgs) if v['origin'] == pkgname]
+ elif __grains__['os_family'] == 'Suse':
-# Import python libs
-from __future__ import absolute_import, print_function, unicode_literals
import fnmatch
import logging
import os
import re
-# Import Salt libs
import salt.utils.pkg
import salt.utils.platform
import salt.utils.versions
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError
-
-# Import 3rd-party libs
from salt.ext import six
from salt.modules.pkg_resource import _repack_pkgs
from salt.output import nested
@@ -323,7 +317,7 @@ def _find_download_targets(
"name": name,
"changes": {},
"result": True,
- "comment": "Version {0} of package '{1}' is already "
+ "comment": "Version {} of package '{}' is already "
"downloaded".format(version, name),
}
@@ -334,7 +328,7 @@ def _find_download_targets(
"name": name,
"changes": {},
"result": True,
- "comment": "Package {0} is already " "downloaded".format(name),
+ "comment": "Package {} is already " "downloaded".format(name),
}
version_spec = False
@@ -349,13 +343,13 @@ def _find_download_targets(
comments.append(
"The following package(s) were not found, and no "
"possible matches were found in the package db: "
- "{0}".format(", ".join(sorted(problems["no_suggest"])))
+ "{}".format(", ".join(sorted(problems["no_suggest"])))
)
if problems.get("suggest"):
- for pkgname, suggestions in six.iteritems(problems["suggest"]):
+ for pkgname, suggestions in problems["suggest"].items():
comments.append(
- "Package '{0}' not found (possible matches: "
- "{1})".format(pkgname, ", ".join(suggestions))
+ "Package '{}' not found (possible matches: "
+ "{})".format(pkgname, ", ".join(suggestions))
)
if comments:
if len(comments) > 1:
@@ -371,7 +365,7 @@ def _find_download_targets(
# Check current downloaded versions against specified versions
targets = {}
problems = []
- for pkgname, pkgver in six.iteritems(to_download):
+ for pkgname, pkgver in to_download.items():
cver = cur_pkgs.get(pkgname, {})
# Package not yet downloaded, so add to targets
if not cver:
@@ -401,7 +395,7 @@ def _find_download_targets(
if not targets:
# All specified packages are already downloaded
- msg = "All specified packages{0} are already downloaded".format(
+ msg = "All specified packages{} are already downloaded".format(
" (matching specified versions)" if version_spec else ""
)
return {"name": name, "changes": {}, "result": True, "comment": msg}
@@ -425,7 +419,7 @@ def _find_advisory_targets(name=None, advisory_ids=None, **kwargs):
"name": name,
"changes": {},
"result": True,
- "comment": "Advisory patch {0} is already " "installed".format(name),
+ "comment": "Advisory patch {} is already " "installed".format(name),
}
# Find out which advisory patches will be targeted in the call to pkg.install
@@ -477,12 +471,22 @@ def _find_remove_targets(
# Check current versions against specified versions
targets = []
problems = []
- for pkgname, pkgver in six.iteritems(to_remove):
+ for pkgname, pkgver in to_remove.items():
# FreeBSD pkg supports `openjdk` and `java/openjdk7` package names
origin = bool(re.search("/", pkgname))
if __grains__["os"] == "FreeBSD" and origin:
- cver = [k for k, v in six.iteritems(cur_pkgs) if v["origin"] == pkgname]
+ cver = [k for k, v in cur_pkgs.items() if v["origin"] == pkgname]
+ elif __grains__["os_family"] == "Suse":
+ # On SUSE systems. Zypper returns packages without "arch" in name
+ try:
+ namepart, archpart = pkgname.rsplit('.', 1)
+ namepart, archpart = pkgname.rsplit(".", 1)
+ except ValueError:
+ cver = cur_pkgs.get(pkgname, [])
+ else:
@ -43,14 +141,162 @@ index a13d418400..c0fa2f6b69 100644
else:
cver = cur_pkgs.get(pkgname, [])
@@ -856,6 +866,17 @@ def _verify_install(desired, new_pkgs, ignore_epoch=False, new_caps=None):
cver = new_pkgs.get(pkgname.split('%')[0])
elif __grains__['os_family'] == 'Debian':
cver = new_pkgs.get(pkgname.split('=')[0])
+ elif __grains__['os_family'] == 'Suse':
@@ -518,7 +522,7 @@ def _find_remove_targets(
if not targets:
# All specified packages are already absent
- msg = "All specified packages{0} are already absent".format(
+ msg = "All specified packages{} are already absent".format(
" (matching specified versions)" if version_spec else ""
)
return {"name": name, "changes": {}, "result": True, "comment": msg}
@@ -619,7 +623,7 @@ def _find_install_targets(
"name": name,
"changes": {},
"result": False,
- "comment": "Invalidly formatted '{0}' parameter. See "
+ "comment": "Invalidly formatted '{}' parameter. See "
"minion log.".format("pkgs" if pkgs else "sources"),
}
@@ -634,7 +638,7 @@ def _find_install_targets(
"name": name,
"changes": {},
"result": False,
- "comment": "Package {0} not found in the "
+ "comment": "Package {} not found in the "
"repository.".format(name),
}
if version is None:
@@ -656,7 +660,7 @@ def _find_install_targets(
origin = bool(re.search("/", name))
if __grains__["os"] == "FreeBSD" and origin:
- cver = [k for k, v in six.iteritems(cur_pkgs) if v["origin"] == name]
+ cver = [k for k, v in cur_pkgs.items() if v["origin"] == name]
else:
cver = cur_pkgs.get(name, [])
@@ -667,7 +671,7 @@ def _find_install_targets(
"name": name,
"changes": {},
"result": True,
- "comment": "Version {0} of package '{1}' is already "
+ "comment": "Version {} of package '{}' is already "
"installed".format(version, name),
}
@@ -678,7 +682,7 @@ def _find_install_targets(
"name": name,
"changes": {},
"result": True,
- "comment": "Package {0} is already " "installed".format(name),
+ "comment": "Package {} is already " "installed".format(name),
}
version_spec = False
@@ -687,21 +691,19 @@ def _find_install_targets(
# enforced. Takes extra time. Disable for improved performance
if not skip_suggestions:
# Perform platform-specific pre-flight checks
- not_installed = dict(
- [
- (name, version)
- for name, version in desired.items()
- if not (
- name in cur_pkgs
- and (
- version is None
- or _fulfills_version_string(
- cur_pkgs[name], version, ignore_epoch=ignore_epoch
- )
+ not_installed = {
+ name: version
+ for name, version in desired.items()
+ if not (
+ name in cur_pkgs
+ and (
+ version is None
+ or _fulfills_version_string(
+ cur_pkgs[name], version, ignore_epoch=ignore_epoch
)
)
- ]
- )
+ )
+ }
if not_installed:
try:
problems = _preflight_check(not_installed, **kwargs)
@@ -713,13 +715,13 @@ def _find_install_targets(
comments.append(
"The following package(s) were not found, and no "
"possible matches were found in the package db: "
- "{0}".format(", ".join(sorted(problems["no_suggest"])))
+ "{}".format(", ".join(sorted(problems["no_suggest"])))
)
if problems.get("suggest"):
- for pkgname, suggestions in six.iteritems(problems["suggest"]):
+ for pkgname, suggestions in problems["suggest"].items():
comments.append(
- "Package '{0}' not found (possible matches: "
- "{1})".format(pkgname, ", ".join(suggestions))
+ "Package '{}' not found (possible matches: "
+ "{})".format(pkgname, ", ".join(suggestions))
)
if comments:
if len(comments) > 1:
@@ -733,9 +735,7 @@ def _find_install_targets(
# Resolve the latest package version for any packages with "latest" in the
# package version
- wants_latest = (
- [] if sources else [x for x, y in six.iteritems(desired) if y == "latest"]
- )
+ wants_latest = [] if sources else [x for x, y in desired.items() if y == "latest"]
if wants_latest:
resolved_latest = __salt__["pkg.latest_version"](
*wants_latest, refresh=refresh, **kwargs
@@ -766,7 +766,7 @@ def _find_install_targets(
problems = []
warnings = []
failed_verify = False
- for package_name, version_string in six.iteritems(desired):
+ for package_name, version_string in desired.items():
cver = cur_pkgs.get(package_name, [])
if resolve_capabilities and not cver and package_name in cur_prov:
cver = cur_pkgs.get(cur_prov.get(package_name)[0], [])
@@ -795,12 +795,12 @@ def _find_install_targets(
problems.append(err.format(version_string, "file not found"))
continue
elif not os.path.exists(cached_path):
- problems.append("{0} does not exist on minion".format(version_string))
+ problems.append("{} does not exist on minion".format(version_string))
continue
source_info = __salt__["lowpkg.bin_pkg_info"](cached_path)
if source_info is None:
warnings.append(
- "Failed to parse metadata for {0}".format(version_string)
+ "Failed to parse metadata for {}".format(version_string)
)
continue
else:
@@ -923,13 +923,24 @@ def _verify_install(desired, new_pkgs, ignore_epoch=None, new_caps=None):
has_origin = "/" in pkgname
if __grains__["os"] == "FreeBSD" and has_origin:
- cver = [k for k, v in six.iteritems(new_pkgs) if v["origin"] == pkgname]
+ cver = [k for k, v in new_pkgs.items() if v["origin"] == pkgname]
elif __grains__["os"] == "MacOS" and has_origin:
cver = new_pkgs.get(pkgname, new_pkgs.get(pkgname.split("/")[-1]))
elif __grains__["os"] == "OpenBSD":
cver = new_pkgs.get(pkgname.split("%")[0])
elif __grains__["os_family"] == "Debian":
cver = new_pkgs.get(pkgname.split("=")[0])
+ elif __grains__["os_family"] == "Suse":
+ # On SUSE systems. Zypper returns packages without "arch" in name
+ try:
+ namepart, archpart = pkgname.rsplit('.', 1)
+ namepart, archpart = pkgname.rsplit(".", 1)
+ except ValueError:
+ cver = new_pkgs.get(pkgname)
+ else:
@ -61,7 +307,653 @@ index a13d418400..c0fa2f6b69 100644
else:
cver = new_pkgs.get(pkgname)
if not cver and pkgname in new_caps:
@@ -964,7 +975,7 @@ def _get_desired_pkg(name, desired):
oper = ""
else:
oper = "="
- return "{0}{1}{2}".format(name, oper, "" if not desired[name] else desired[name])
+ return "{}{}{}".format(name, oper, "" if not desired[name] else desired[name])
def _preflight_check(desired, fromrepo, **kwargs):
@@ -1709,8 +1720,8 @@ def installed(
"comment": "pkg.verify not implemented",
}
- if not isinstance(version, six.string_types) and version is not None:
- version = six.text_type(version)
+ if not isinstance(version, str) and version is not None:
+ version = str(version)
kwargs["allow_updates"] = allow_updates
@@ -1754,7 +1765,7 @@ def installed(
"name": name,
"changes": {},
"result": False,
- "comment": six.text_type(exc),
+ "comment": str(exc),
}
if "result" in hold_ret and not hold_ret["result"]:
@@ -1763,7 +1774,7 @@ def installed(
"changes": {},
"result": False,
"comment": "An error was encountered while "
- "holding/unholding package(s): {0}".format(hold_ret["comment"]),
+ "holding/unholding package(s): {}".format(hold_ret["comment"]),
}
else:
modified_hold = [
@@ -1779,16 +1790,16 @@ def installed(
]
for i in modified_hold:
- result["comment"] += ".\n{0}".format(i["comment"])
+ result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
result["changes"][i["name"]] = i["changes"]
for i in not_modified_hold:
- result["comment"] += ".\n{0}".format(i["comment"])
+ result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
for i in failed_hold:
- result["comment"] += ".\n{0}".format(i["comment"])
+ result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
return result
@@ -1805,8 +1816,8 @@ def installed(
# Remove any targets not returned by _find_install_targets
if pkgs:
- pkgs = [dict([(x, y)]) for x, y in six.iteritems(targets)]
- pkgs.extend([dict([(x, y)]) for x, y in six.iteritems(to_reinstall)])
+ pkgs = [dict([(x, y)]) for x, y in targets.items()]
+ pkgs.extend([dict([(x, y)]) for x, y in to_reinstall.items()])
elif sources:
oldsources = sources
sources = [x for x in oldsources if next(iter(list(x.keys()))) in targets]
@@ -1823,12 +1834,12 @@ def installed(
summary = ", ".join([_get_desired_pkg(x, targets) for x in targets])
comment.append(
"The following packages would be "
- "installed/updated: {0}".format(summary)
+ "installed/updated: {}".format(summary)
)
if to_unpurge:
comment.append(
"The following packages would have their selection status "
- "changed from 'purge' to 'install': {0}".format(", ".join(to_unpurge))
+ "changed from 'purge' to 'install': {}".format(", ".join(to_unpurge))
)
if to_reinstall:
# Add a comment for each package in to_reinstall with its
@@ -1852,7 +1863,7 @@ def installed(
else:
pkgstr = _get_desired_pkg(reinstall_pkg, to_reinstall)
comment.append(
- "Package '{0}' would be reinstalled because the "
+ "Package '{}' would be reinstalled because the "
"following files have been altered:".format(pkgstr)
)
comment.append(_nested_output(altered_files[reinstall_pkg]))
@@ -1896,7 +1907,7 @@ def installed(
ret["changes"] = {}
ret["comment"] = (
"An error was encountered while installing "
- "package(s): {0}".format(exc)
+ "package(s): {}".format(exc)
)
if warnings:
ret.setdefault("warnings", []).extend(warnings)
@@ -1907,7 +1918,7 @@ def installed(
if isinstance(pkg_ret, dict):
changes["installed"].update(pkg_ret)
- elif isinstance(pkg_ret, six.string_types):
+ elif isinstance(pkg_ret, str):
comment.append(pkg_ret)
# Code below will be looking for a dictionary. If this is a string
# it means that there was an exception raised and that no packages
@@ -1921,7 +1932,7 @@ def installed(
action = "pkg.hold" if kwargs["hold"] else "pkg.unhold"
hold_ret = __salt__[action](name=name, pkgs=desired)
except (CommandExecutionError, SaltInvocationError) as exc:
- comment.append(six.text_type(exc))
+ comment.append(str(exc))
ret = {
"name": name,
"changes": changes,
@@ -1938,7 +1949,7 @@ def installed(
"changes": {},
"result": False,
"comment": "An error was encountered while "
- "holding/unholding package(s): {0}".format(hold_ret["comment"]),
+ "holding/unholding package(s): {}".format(hold_ret["comment"]),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
@@ -1996,11 +2007,11 @@ def installed(
summary = ", ".join([_get_desired_pkg(x, desired) for x in modified])
if len(summary) < 20:
comment.append(
- "The following packages were installed/updated: " "{0}".format(summary)
+ "The following packages were installed/updated: " "{}".format(summary)
)
else:
comment.append(
- "{0} targeted package{1} {2} installed/updated.".format(
+ "{} targeted package{} {} installed/updated.".format(
len(modified),
"s" if len(modified) > 1 else "",
"were" if len(modified) > 1 else "was",
@@ -2014,14 +2025,14 @@ def installed(
comment.append(i["comment"])
if len(changes[change_name]["new"]) > 0:
changes[change_name]["new"] += "\n"
- changes[change_name]["new"] += "{0}".format(i["changes"]["new"])
+ changes[change_name]["new"] += "{}".format(i["changes"]["new"])
if len(changes[change_name]["old"]) > 0:
changes[change_name]["old"] += "\n"
- changes[change_name]["old"] += "{0}".format(i["changes"]["old"])
+ changes[change_name]["old"] += "{}".format(i["changes"]["old"])
else:
comment.append(i["comment"])
changes[change_name] = {}
- changes[change_name]["new"] = "{0}".format(i["changes"]["new"])
+ changes[change_name]["new"] = "{}".format(i["changes"]["new"])
# Any requested packages that were not targeted for install or reinstall
if not_modified:
@@ -2031,11 +2042,11 @@ def installed(
summary = ", ".join([_get_desired_pkg(x, desired) for x in not_modified])
if len(not_modified) <= 20:
comment.append(
- "The following packages were already installed: " "{0}".format(summary)
+ "The following packages were already installed: " "{}".format(summary)
)
else:
comment.append(
- "{0} targeted package{1} {2} already installed".format(
+ "{} targeted package{} {} already installed".format(
len(not_modified),
"s" if len(not_modified) > 1 else "",
"were" if len(not_modified) > 1 else "was",
@@ -2054,7 +2065,7 @@ def installed(
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in failed])
comment.insert(
- 0, "The following packages failed to " "install/update: {0}".format(summary)
+ 0, "The following packages failed to " "install/update: {}".format(summary)
)
result = False
@@ -2118,7 +2129,7 @@ def installed(
pkgstr = modified_pkg
else:
pkgstr = _get_desired_pkg(modified_pkg, desired)
- msg = "Package {0} was reinstalled.".format(pkgstr)
+ msg = "Package {} was reinstalled.".format(pkgstr)
if modified_pkg in altered_files:
msg += " The following files were remediated:"
comment.append(msg)
@@ -2133,7 +2144,7 @@ def installed(
pkgstr = failed_pkg
else:
pkgstr = _get_desired_pkg(failed_pkg, desired)
- msg = "Reinstall was not successful for package {0}.".format(pkgstr)
+ msg = "Reinstall was not successful for package {}.".format(pkgstr)
if failed_pkg in altered_files:
msg += " The following files could not be remediated:"
comment.append(msg)
@@ -2274,12 +2285,12 @@ def downloaded(
ret["result"] = False
ret[
"comment"
- ] = "An error was encountered while checking targets: " "{0}".format(targets)
+ ] = "An error was encountered while checking targets: " "{}".format(targets)
return ret
if __opts__["test"]:
summary = ", ".join(targets)
- ret["comment"] = "The following packages would be " "downloaded: {0}".format(
+ ret["comment"] = "The following packages would be " "downloaded: {}".format(
summary
)
return ret
@@ -2306,7 +2317,7 @@ def downloaded(
ret["changes"] = {}
ret["comment"] = (
"An error was encountered while downloading "
- "package(s): {0}".format(exc)
+ "package(s): {}".format(exc)
)
return ret
@@ -2316,13 +2327,13 @@ def downloaded(
if failed:
summary = ", ".join([_get_desired_pkg(x, targets) for x in failed])
ret["result"] = False
- ret["comment"] = "The following packages failed to " "download: {0}".format(
+ ret["comment"] = "The following packages failed to " "download: {}".format(
summary
)
if not ret["changes"] and not ret["comment"]:
ret["result"] = True
- ret["comment"] = "Packages downloaded: " "{0}".format(", ".join(targets))
+ ret["comment"] = "Packages downloaded: " "{}".format(", ".join(targets))
return ret
@@ -2382,14 +2393,14 @@ def patch_installed(name, advisory_ids=None, downloadonly=None, **kwargs):
ret["result"] = False
ret[
"comment"
- ] = "An error was encountered while checking targets: " "{0}".format(targets)
+ ] = "An error was encountered while checking targets: " "{}".format(targets)
return ret
if __opts__["test"]:
summary = ", ".join(targets)
ret[
"comment"
- ] = "The following advisory patches would be " "downloaded: {0}".format(summary)
+ ] = "The following advisory patches would be " "downloaded: {}".format(summary)
return ret
try:
@@ -2408,7 +2419,7 @@ def patch_installed(name, advisory_ids=None, downloadonly=None, **kwargs):
ret["changes"] = {}
ret["comment"] = (
"An error was encountered while downloading "
- "package(s): {0}".format(exc)
+ "package(s): {}".format(exc)
)
return ret
@@ -2417,7 +2428,7 @@ def patch_installed(name, advisory_ids=None, downloadonly=None, **kwargs):
ret["result"] = True
ret["comment"] = (
"Advisory patch is not needed or related packages "
- "are already {0}".format(status)
+ "are already {}".format(status)
)
return ret
@@ -2674,7 +2685,7 @@ def latest(
"changes": {},
"result": False,
"comment": "An error was encountered while checking the "
- "newest available version of package(s): {0}".format(exc),
+ "newest available version of package(s): {}".format(exc),
}
try:
@@ -2683,9 +2694,9 @@ def latest(
return {"name": name, "changes": {}, "result": False, "comment": exc.strerror}
# Repack the cur/avail data if only a single package is being checked
- if isinstance(cur, six.string_types):
+ if isinstance(cur, str):
cur = {desired_pkgs[0]: cur}
- if isinstance(avail, six.string_types):
+ if isinstance(avail, str):
avail = {desired_pkgs[0]: avail}
targets = {}
@@ -2695,7 +2706,7 @@ def latest(
# Package either a) is up-to-date, or b) does not exist
if not cur.get(pkg):
# Package does not exist
- msg = "No information found for '{0}'.".format(pkg)
+ msg = "No information found for '{}'.".format(pkg)
log.error(msg)
problems.append(msg)
elif (
@@ -2741,12 +2752,12 @@ def latest(
comments.append(
"The following packages are already up-to-date: "
+ ", ".join(
- ["{0} ({1})".format(x, cur[x]) for x in sorted(up_to_date)]
+ ["{} ({})".format(x, cur[x]) for x in sorted(up_to_date)]
)
)
else:
comments.append(
- "{0} packages are already up-to-date".format(up_to_date_count)
+ "{} packages are already up-to-date".format(up_to_date_count)
)
return {
@@ -2784,7 +2795,7 @@ def latest(
"changes": {},
"result": False,
"comment": "An error was encountered while installing "
- "package(s): {0}".format(exc),
+ "package(s): {}".format(exc),
}
if changes:
@@ -2800,7 +2811,7 @@ def latest(
comments = []
if failed:
- msg = "The following packages failed to update: " "{0}".format(
+ msg = "The following packages failed to update: " "{}".format(
", ".join(sorted(failed))
)
comments.append(msg)
@@ -2808,19 +2819,17 @@ def latest(
msg = (
"The following packages were successfully "
"installed/upgraded: "
- "{0}".format(", ".join(sorted(successful)))
+ "{}".format(", ".join(sorted(successful)))
)
comments.append(msg)
if up_to_date:
if len(up_to_date) <= 10:
msg = (
"The following packages were already up-to-date: "
- "{0}".format(", ".join(sorted(up_to_date)))
+ "{}".format(", ".join(sorted(up_to_date)))
)
else:
- msg = "{0} packages were already up-to-date ".format(
- len(up_to_date)
- )
+ msg = "{} packages were already up-to-date ".format(len(up_to_date))
comments.append(msg)
return {
@@ -2832,18 +2841,18 @@ def latest(
else:
if len(targets) > 10:
comment = (
- "{0} targeted packages failed to update. "
+ "{} targeted packages failed to update. "
"See debug log for details.".format(len(targets))
)
elif len(targets) > 1:
comment = (
"The following targeted packages failed to update. "
- "See debug log for details: ({0}).".format(
+ "See debug log for details: ({}).".format(
", ".join(sorted(targets))
)
)
else:
- comment = "Package {0} failed to " "update.".format(
+ comment = "Package {} failed to " "update.".format(
next(iter(list(targets.keys())))
)
if up_to_date:
@@ -2851,10 +2860,10 @@ def latest(
comment += (
" The following packages were already "
"up-to-date: "
- "{0}".format(", ".join(sorted(up_to_date)))
+ "{}".format(", ".join(sorted(up_to_date)))
)
else:
- comment += "{0} packages were already " "up-to-date".format(
+ comment += "{} packages were already " "up-to-date".format(
len(up_to_date)
)
@@ -2866,13 +2875,13 @@ def latest(
}
else:
if len(desired_pkgs) > 10:
- comment = "All {0} packages are up-to-date.".format(len(desired_pkgs))
+ comment = "All {} packages are up-to-date.".format(len(desired_pkgs))
elif len(desired_pkgs) > 1:
- comment = "All packages are up-to-date " "({0}).".format(
+ comment = "All packages are up-to-date " "({}).".format(
", ".join(sorted(desired_pkgs))
)
else:
- comment = "Package {0} is already " "up-to-date".format(desired_pkgs[0])
+ comment = "Package {} is already " "up-to-date".format(desired_pkgs[0])
return {"name": name, "changes": {}, "result": True, "comment": comment}
@@ -2894,8 +2903,7 @@ def _uninstall(
"name": name,
"changes": {},
"result": False,
- "comment": "Invalid action '{0}'. "
- "This is probably a bug.".format(action),
+ "comment": "Invalid action '{}'. " "This is probably a bug.".format(action),
}
try:
@@ -2908,7 +2916,7 @@ def _uninstall(
"changes": {},
"result": False,
"comment": "An error was encountered while parsing targets: "
- "{0}".format(exc),
+ "{}".format(exc),
}
targets = _find_remove_targets(
name, version, pkgs, normalize, ignore_epoch=ignore_epoch, **kwargs
@@ -2921,7 +2929,7 @@ def _uninstall(
"changes": {},
"result": False,
"comment": "An error was encountered while checking targets: "
- "{0}".format(targets),
+ "{}".format(targets),
}
if action == "purge":
old_removed = __salt__["pkg.list_pkgs"](
@@ -2936,7 +2944,7 @@ def _uninstall(
"changes": {},
"result": True,
"comment": "None of the targeted packages are installed"
- "{0}".format(" or partially installed" if action == "purge" else ""),
+ "{}".format(" or partially installed" if action == "purge" else ""),
}
if __opts__["test"]:
@@ -2944,11 +2952,11 @@ def _uninstall(
"name": name,
"changes": {},
"result": None,
- "comment": "The following packages will be {0}d: "
- "{1}.".format(action, ", ".join(targets)),
+ "comment": "The following packages will be {}d: "
+ "{}.".format(action, ", ".join(targets)),
}
- changes = __salt__["pkg.{0}".format(action)](
+ changes = __salt__["pkg.{}".format(action)](
name, pkgs=pkgs, version=version, **kwargs
)
new = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
@@ -2975,8 +2983,8 @@ def _uninstall(
"name": name,
"changes": changes,
"result": False,
- "comment": "The following packages failed to {0}: "
- "{1}.".format(action, ", ".join(failed)),
+ "comment": "The following packages failed to {}: "
+ "{}.".format(action, ", ".join(failed)),
}
comments = []
@@ -2984,14 +2992,13 @@ def _uninstall(
if not_installed:
comments.append(
"The following packages were not installed: "
- "{0}".format(", ".join(not_installed))
+ "{}".format(", ".join(not_installed))
)
comments.append(
- "The following packages were {0}d: "
- "{1}.".format(action, ", ".join(targets))
+ "The following packages were {}d: " "{}.".format(action, ", ".join(targets))
)
else:
- comments.append("All targeted packages were {0}d.".format(action))
+ comments.append("All targeted packages were {}d.".format(action))
return {
"name": name,
@@ -3089,7 +3096,7 @@ def removed(name, version=None, pkgs=None, normalize=True, ignore_epoch=None, **
ret["changes"] = {}
ret[
"comment"
- ] = "An error was encountered while removing " "package(s): {0}".format(exc)
+ ] = "An error was encountered while removing " "package(s): {}".format(exc)
return ret
@@ -3181,7 +3188,7 @@ def purged(name, version=None, pkgs=None, normalize=True, ignore_epoch=None, **k
ret["changes"] = {}
ret[
"comment"
- ] = "An error was encountered while purging " "package(s): {0}".format(exc)
+ ] = "An error was encountered while purging " "package(s): {}".format(exc)
return ret
@@ -3247,17 +3254,17 @@ def uptodate(name, refresh=False, pkgs=None, **kwargs):
"new": pkgver,
"old": __salt__["pkg.version"](pkgname, **kwargs),
}
- for pkgname, pkgver in six.iteritems(packages)
+ for pkgname, pkgver in packages.items()
}
if isinstance(pkgs, list):
packages = [pkg for pkg in packages if pkg in pkgs]
expected = {
pkgname: pkgver
- for pkgname, pkgver in six.iteritems(expected)
+ for pkgname, pkgver in expected.items()
if pkgname in pkgs
}
except Exception as exc: # pylint: disable=broad-except
- ret["comment"] = six.text_type(exc)
+ ret["comment"] = str(exc)
return ret
else:
ret["comment"] = "refresh must be either True or False"
@@ -3284,16 +3291,16 @@ def uptodate(name, refresh=False, pkgs=None, **kwargs):
ret["changes"] = {}
ret[
"comment"
- ] = "An error was encountered while updating " "packages: {0}".format(exc)
+ ] = "An error was encountered while updating " "packages: {}".format(exc)
return ret
# If a package list was provided, ensure those packages were updated
missing = []
if isinstance(pkgs, list):
- missing = [pkg for pkg in six.iterkeys(expected) if pkg not in ret["changes"]]
+ missing = [pkg for pkg in expected.keys() if pkg not in ret["changes"]]
if missing:
- ret["comment"] = "The following package(s) failed to update: {0}".format(
+ ret["comment"] = "The following package(s) failed to update: {}".format(
", ".join(missing)
)
ret["result"] = False
@@ -3362,8 +3369,8 @@ def group_installed(name, skip=None, include=None, **kwargs):
ret["comment"] = "skip must be formatted as a list"
return ret
for idx, item in enumerate(skip):
- if not isinstance(item, six.string_types):
- skip[idx] = six.text_type(item)
+ if not isinstance(item, str):
+ skip[idx] = str(item)
if include is None:
include = []
@@ -3372,15 +3379,15 @@ def group_installed(name, skip=None, include=None, **kwargs):
ret["comment"] = "include must be formatted as a list"
return ret
for idx, item in enumerate(include):
- if not isinstance(item, six.string_types):
- include[idx] = six.text_type(item)
+ if not isinstance(item, str):
+ include[idx] = str(item)
try:
diff = __salt__["pkg.group_diff"](name)
except CommandExecutionError as err:
ret["comment"] = (
"An error was encountered while installing/updating "
- "group '{0}': {1}.".format(name, err)
+ "group '{}': {}.".format(name, err)
)
return ret
@@ -3390,7 +3397,7 @@ def group_installed(name, skip=None, include=None, **kwargs):
if invalid_skip:
ret[
"comment"
- ] = "The following mandatory packages cannot be skipped: {0}".format(
+ ] = "The following mandatory packages cannot be skipped: {}".format(
", ".join(invalid_skip)
)
return ret
@@ -3401,7 +3408,7 @@ def group_installed(name, skip=None, include=None, **kwargs):
if not targets:
ret["result"] = True
- ret["comment"] = "Group '{0}' is already installed".format(name)
+ ret["comment"] = "Group '{}' is already installed".format(name)
return ret
partially_installed = (
@@ -3415,9 +3422,9 @@ def group_installed(name, skip=None, include=None, **kwargs):
if partially_installed:
ret[
"comment"
- ] = "Group '{0}' is partially installed and will be updated".format(name)
+ ] = "Group '{}' is partially installed and will be updated".format(name)
else:
- ret["comment"] = "Group '{0}' will be installed".format(name)
+ ret["comment"] = "Group '{}' will be installed".format(name)
return ret
try:
@@ -3432,19 +3439,19 @@ def group_installed(name, skip=None, include=None, **kwargs):
ret["changes"] = {}
ret["comment"] = (
"An error was encountered while "
- "installing/updating group '{0}': {1}".format(name, exc)
+ "installing/updating group '{}': {}".format(name, exc)
)
return ret
failed = [x for x in targets if x not in __salt__["pkg.list_pkgs"](**kwargs)]
if failed:
- ret["comment"] = "Failed to install the following packages: {0}".format(
+ ret["comment"] = "Failed to install the following packages: {}".format(
", ".join(failed)
)
return ret
ret["result"] = True
- ret["comment"] = "Group '{0}' was {1}".format(
+ ret["comment"] = "Group '{}' was {}".format(
name, "updated" if partially_installed else "installed"
)
return ret
@@ -3561,6 +3568,6 @@ def mod_watch(name, **kwargs):
return {
"name": name,
"changes": {},
- "comment": "pkg.{0} does not work with the watch requisite".format(sfun),
+ "comment": "pkg.{} does not work with the watch requisite".format(sfun),
"result": False,
}
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From acf0b24353d831dcc2c5b292f99480938f5ecd93 Mon Sep 17 00:00:00 2001
From d5569023c64a3fcec57a7aa6823ee94e8be91b3d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Julio=20Gonz=C3=A1lez=20Gil?=
<juliogonzalez@users.noreply.github.com>
Date: Wed, 12 Feb 2020 10:05:45 +0100
@ -11,49 +11,49 @@ Subject: [PATCH] Add Astra Linux Common Edition to the OS Family list
2 files changed, 21 insertions(+)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 20950988d9..f410985198 100644
index 5dff6ecfd4..5634327623 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1523,6 +1523,7 @@ _OS_FAMILY_MAP = {
'Funtoo': 'Gentoo',
'AIX': 'AIX',
'TurnKey': 'Debian',
+ 'AstraLinuxCE': 'Debian',
@@ -1618,6 +1618,7 @@ _OS_FAMILY_MAP = {
"Funtoo": "Gentoo",
"AIX": "AIX",
"TurnKey": "Debian",
+ "AstraLinuxCE": "Debian",
}
# Matches any possible format:
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index b4ed9379e5..c276dee9f3 100644
index 85d434dd9d..196dbcf83d 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -605,6 +605,26 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
@@ -728,6 +728,26 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
}
self._run_os_grains_tests("ubuntu-17.10", _os_release_map, expectation)
+ @skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
+ @skipIf(not salt.utils.platform.is_linux(), "System is not Linux")
+ def test_astralinuxce_2_os_grains(self):
+ '''
+ """
+ Test if OS grains are parsed correctly in Astra Linux CE 2.12.22 "orel"
+ '''
+ """
+ _os_release_map = {
+ 'linux_distribution': ('AstraLinuxCE', '2.12.22', 'orel'),
+ "linux_distribution": ("AstraLinuxCE", "2.12.22", "orel"),
+ }
+ expectation = {
+ 'os': 'AstraLinuxCE',
+ 'os_family': 'Debian',
+ 'oscodename': 'orel',
+ 'osfullname': 'AstraLinuxCE',
+ 'osrelease': '2.12.22',
+ 'osrelease_info': (2, 12, 22),
+ 'osmajorrelease': 2,
+ 'osfinger': 'AstraLinuxCE-2',
+ "os": "AstraLinuxCE",
+ "os_family": "Debian",
+ "oscodename": "orel",
+ "osfullname": "AstraLinuxCE",
+ "osrelease": "2.12.22",
+ "osrelease_info": (2, 12, 22),
+ "osmajorrelease": 2,
+ "osfinger": "AstraLinuxCE-2",
+ }
+ self._run_os_grains_tests("astralinuxce-2.12.22", _os_release_map, expectation)
+
@skipIf(not salt.utils.platform.is_windows(), 'System is not Windows')
@skipIf(not salt.utils.platform.is_windows(), "System is not Windows")
def test_windows_platform_data(self):
'''
"""
--
2.16.4
2.29.2

View File

@ -1,46 +1,55 @@
From 376a7d2eeb6b3b215fac9322f1baee4497bdb339 Mon Sep 17 00:00:00 2001
From 66f6c2540a151487b26c89a2bb66199d6c65c18d Mon Sep 17 00:00:00 2001
From: Marcelo Chiaradia <mchiaradia@suse.com>
Date: Thu, 4 Apr 2019 13:57:38 +0200
Subject: [PATCH] Add 'batch_presence_ping_timeout' and
'batch_presence_ping_gather_job_timeout' parameters for synchronous batching
---
salt/cli/batch.py | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
salt/cli/batch.py | 16 ++++++++++++++--
1 file changed, 14 insertions(+), 2 deletions(-)
diff --git a/salt/cli/batch.py b/salt/cli/batch.py
index 36e66da1af..67f03c8a45 100644
index 527cffdeb7..2bc5444aef 100644
--- a/salt/cli/batch.py
+++ b/salt/cli/batch.py
@@ -83,6 +83,9 @@ def batch_get_opts(
@@ -77,6 +77,13 @@ def batch_get_opts(
if key not in opts:
opts[key] = val
+ opts['batch_presence_ping_timeout'] = kwargs.get('batch_presence_ping_timeout', opts['timeout'])
+ opts['batch_presence_ping_gather_job_timeout'] = kwargs.get('batch_presence_ping_gather_job_timeout', opts['gather_job_timeout'])
+ opts["batch_presence_ping_timeout"] = kwargs.get(
+ "batch_presence_ping_timeout", opts["timeout"]
+ )
+ opts["batch_presence_ping_gather_job_timeout"] = kwargs.get(
+ "batch_presence_ping_gather_job_timeout", opts["gather_job_timeout"]
+ )
+
return opts
@@ -119,7 +122,7 @@ class Batch(object):
args = [self.opts['tgt'],
'test.ping',
[],
- self.opts['timeout'],
+ self.opts.get('batch_presence_ping_timeout', self.opts['timeout']),
]
@@ -115,7 +122,7 @@ class Batch:
self.opts["tgt"],
"test.ping",
[],
- self.opts["timeout"],
+ self.opts.get("batch_presence_ping_timeout", self.opts["timeout"]),
]
selected_target_option = self.opts.get('selected_target_option', None)
@@ -130,7 +133,7 @@ class Batch(object):
selected_target_option = self.opts.get("selected_target_option", None)
@@ -126,7 +133,12 @@ class Batch:
self.pub_kwargs['yield_pub_data'] = True
ping_gen = self.local.cmd_iter(*args,
- gather_job_timeout=self.opts['gather_job_timeout'],
+ gather_job_timeout=self.opts.get('batch_presence_ping_gather_job_timeout', self.opts['gather_job_timeout']),
**self.pub_kwargs)
self.pub_kwargs["yield_pub_data"] = True
ping_gen = self.local.cmd_iter(
- *args, gather_job_timeout=self.opts["gather_job_timeout"], **self.pub_kwargs
+ *args,
+ gather_job_timeout=self.opts.get(
+ "batch_presence_ping_gather_job_timeout",
+ self.opts["gather_job_timeout"],
+ ),
+ **self.pub_kwargs
)
# Broadcast to targets
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From a90f35bc03b477a63aae20c58f8957c075569465 Mon Sep 17 00:00:00 2001
From c845d56fdf1762586b1f210b1eb49193893d4312 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 9 Oct 2018 14:08:50 +0200
Subject: [PATCH] Add CPE_NAME for osversion* grain parsing (U#49946)
@ -29,10 +29,10 @@ Fix proper part name in the string-bound CPE
1 file changed, 28 insertions(+)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 9c1b5d930e..7b7e328520 100644
index 5535584d1b..bc3cf129cd 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1642,6 +1642,34 @@ def _parse_cpe_name(cpe):
@@ -1732,6 +1732,34 @@ def _parse_cpe_name(cpe):
return ret
@ -65,9 +65,9 @@ index 9c1b5d930e..7b7e328520 100644
+
+
def os_data():
'''
"""
Return grains pertaining to the operating system
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From e57dd3c2ae655422f0f6939825154ce5827d43c4 Mon Sep 17 00:00:00 2001
From 713ccfdc5c6733495d3ce7f26a8cfeddb8e9e9c4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 21 Jun 2018 11:57:57 +0100
@ -9,10 +9,10 @@ Subject: [PATCH] Add custom SUSE capabilities as Grains
1 file changed, 7 insertions(+)
diff --git a/salt/grains/extra.py b/salt/grains/extra.py
index 9ce644b766..1082b05dba 100644
index 2fdbe6526a..ddc22293ea 100644
--- a/salt/grains/extra.py
+++ b/salt/grains/extra.py
@@ -75,3 +75,10 @@ def config():
@@ -66,3 +66,10 @@ def config():
log.warning("Bad syntax in grains file! Skipping.")
return {}
return {}
@ -24,6 +24,6 @@ index 9ce644b766..1082b05dba 100644
+ '__suse_reserved_pkg_patches_support': True
+ }
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 9e6bd24b07cd2424c3805777b07b9ea84adff416 Mon Sep 17 00:00:00 2001
From 355e1e29e8f3286eeb13bc2d05089c096c9e01e3 Mon Sep 17 00:00:00 2001
From: Alexander Graul <agraul@suse.com>
Date: Mon, 18 May 2020 16:39:27 +0200
Subject: [PATCH] Add docker logout (#237)
@ -13,10 +13,10 @@ interpreted as a list of docker registries to log out of.
2 files changed, 139 insertions(+)
diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py
index 28a2107cec..119e9eb170 100644
index 934038c927..176b4db926 100644
--- a/salt/modules/dockermod.py
+++ b/salt/modules/dockermod.py
@@ -1481,6 +1481,86 @@ def login(*registries):
@@ -1586,6 +1586,86 @@ def logout(*registries):
return ret
@ -102,44 +102,15 @@ index 28a2107cec..119e9eb170 100644
+
# Functions for information gathering
def depends(name):
'''
"""
diff --git a/tests/unit/modules/test_dockermod.py b/tests/unit/modules/test_dockermod.py
index 191bfc123f..8f4ead2867 100644
index 34e2e9c610..48526acb71 100644
--- a/tests/unit/modules/test_dockermod.py
+++ b/tests/unit/modules/test_dockermod.py
@@ -164,6 +164,65 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
self.assertIn('retcode', ret)
self.assertNotEqual(ret['retcode'], 0)
@@ -199,6 +199,65 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
output_loglevel="quiet",
)
+ def test_logout_calls_docker_cli_logout_single(self):
+ client = Mock()
+ get_client_mock = MagicMock(return_value=client)
+ ref_out = {"stdout": "", "stderr": "", "retcode": 0}
+ registry_auth_data = {
+ "portus.example.com:5000": {
+ "username": "admin",
+ "password": "linux12345",
+ "email": "tux@example.com",
+ }
+ }
+ docker_mock = MagicMock(return_value=ref_out)
+ with patch.object(docker_mod, "_get_client", get_client_mock):
+ dunder_salt = {
+ "config.get": MagicMock(return_value=registry_auth_data),
+ "cmd.run_all": docker_mock,
+ "config.option": MagicMock(return_value={}),
+ }
+ with patch.dict(docker_mod.__salt__, dunder_salt):
+ ret = docker_mod.logout("portus.example.com:5000")
+ assert "retcode" in ret
+ assert ret["retcode"] == 0
+ docker_mock.assert_called_with(
+ ["docker", "logout", "portus.example.com:5000"],
+ python_shell=False,
+ output_loglevel="quiet",
+ )
+
+
+ def test_logout_calls_docker_cli_logout_all(self):
+ client = Mock()
+ get_client_mock = MagicMock(return_value=client)
@ -170,10 +141,39 @@ index 191bfc123f..8f4ead2867 100644
+ assert ret["retcode"] == 0
+ assert docker_mock.call_count == 2
+
def test_ps_with_host_true(self):
'''
Check that docker.ps called with host is ``True``,
+ def test_logout_calls_docker_cli_logout_single(self):
+ client = Mock()
+ get_client_mock = MagicMock(return_value=client)
+ ref_out = {"stdout": "", "stderr": "", "retcode": 0}
+ registry_auth_data = {
+ "portus.example.com:5000": {
+ "username": "admin",
+ "password": "linux12345",
+ "email": "tux@example.com",
+ }
+ }
+ docker_mock = MagicMock(return_value=ref_out)
+ with patch.object(docker_mod, "_get_client", get_client_mock):
+ dunder_salt = {
+ "config.get": MagicMock(return_value=registry_auth_data),
+ "cmd.run_all": docker_mock,
+ "config.option": MagicMock(return_value={}),
+ }
+ with patch.dict(docker_mod.__salt__, dunder_salt):
+ ret = docker_mod.logout("portus.example.com:5000")
+ assert "retcode" in ret
+ assert ret["retcode"] == 0
+ docker_mock.assert_called_with(
+ ["docker", "logout", "portus.example.com:5000"],
+ python_shell=False,
+ output_loglevel="quiet",
+ )
+
+
def test_logout_calls_docker_cli_logout_all(self):
client = Mock()
get_client_mock = MagicMock(return_value=client)
--
2.26.2
2.29.2

View File

@ -1,78 +1,83 @@
From 874b1229babf5244debac141cd260f695ccc1e9d Mon Sep 17 00:00:00 2001
From 7b2b5fc53d30397b8f7a11e59f5c7a57bcb63058 Mon Sep 17 00:00:00 2001
From: Marcelo Chiaradia <mchiaradia@suse.com>
Date: Thu, 7 Jun 2018 10:29:41 +0200
Subject: [PATCH] Add environment variable to know if yum is invoked from
Salt(bsc#1057635)
Subject: [PATCH] Add environment variable to know if yum is invoked
from Salt(bsc#1057635)
---
salt/modules/yumpkg.py | 18 ++++++++++++------
1 file changed, 12 insertions(+), 6 deletions(-)
salt/modules/yumpkg.py | 23 +++++++++++++++++------
1 file changed, 17 insertions(+), 6 deletions(-)
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index f7e4ac9753..c89d321a1b 100644
index b547fe6be7..c58b3e4c70 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -913,7 +913,8 @@ def list_repo_pkgs(*args, **kwargs):
yum_version = None if _yum() != 'yum' else _LooseVersion(
__salt__['cmd.run'](
['yum', '--version'],
- python_shell=False
+ python_shell=False,
+ env={"SALT_RUNNING": '1'}
).splitlines()[0].strip()
)
# Really old version of yum; does not even have --showduplicates option
@@ -2324,7 +2325,8 @@ def list_holds(pattern=__HOLD_PATTERN, full=True):
@@ -917,7 +917,9 @@ def list_repo_pkgs(*args, **kwargs):
None
if _yum() != "yum"
else _LooseVersion(
- __salt__["cmd.run"](["yum", "--version"], python_shell=False)
+ __salt__["cmd.run"](
+ ["yum", "--version"], python_shell=False, env={"SALT_RUNNING": "1"}
+ )
.splitlines()[0]
.strip()
)
@@ -2347,7 +2349,9 @@ def list_holds(pattern=__HOLD_PATTERN, full=True):
"""
_check_versionlock()
out = __salt__['cmd.run']([_yum(), 'versionlock', 'list'],
- python_shell=False)
+ python_shell=False,
+ env={"SALT_RUNNING": '1'})
- out = __salt__["cmd.run"]([_yum(), "versionlock", "list"], python_shell=False)
+ out = __salt__["cmd.run"](
+ [_yum(), "versionlock", "list"], python_shell=False, env={"SALT_RUNNING": "1"}
+ )
ret = []
for line in salt.utils.itertools.split(out, '\n'):
for line in salt.utils.itertools.split(out, "\n"):
match = _get_hold(line, pattern=pattern, full=full)
@@ -2390,7 +2392,8 @@ def group_list():
out = __salt__['cmd.run_stdout'](
[_yum(), 'grouplist', 'hidden'],
output_loglevel='trace',
- python_shell=False
@@ -2415,7 +2419,10 @@ def group_list():
}
out = __salt__["cmd.run_stdout"](
- [_yum(), "grouplist", "hidden"], output_loglevel="trace", python_shell=False
+ [_yum(), "grouplist", "hidden"],
+ output_loglevel="trace",
+ python_shell=False,
+ env={"SALT_RUNNING": '1'}
+ env={"SALT_RUNNING": "1"},
)
key = None
for line in salt.utils.itertools.split(out, '\n'):
@@ -2457,7 +2460,8 @@ def group_info(name, expand=False):
out = __salt__['cmd.run_stdout'](
cmd,
output_loglevel='trace',
- python_shell=False
+ python_shell=False,
+ env={"SALT_RUNNING": '1'}
)
for line in salt.utils.itertools.split(out, "\n"):
@@ -2486,7 +2493,9 @@ def group_info(name, expand=False, ignore_groups=None):
ret[pkgtype] = set()
cmd = [_yum(), "--quiet", "groupinfo", name]
- out = __salt__["cmd.run_stdout"](cmd, output_loglevel="trace", python_shell=False)
+ out = __salt__["cmd.run_stdout"](
+ cmd, output_loglevel="trace", python_shell=False, env={"SALT_RUNNING": "1"}
+ )
g_info = {}
@@ -3134,7 +3138,8 @@ def download(*packages):
__salt__['cmd.run'](
cmd,
output_loglevel='trace',
- python_shell=False
+ python_shell=False,
+ env={"SALT_RUNNING": '1'}
)
for line in salt.utils.itertools.split(out, "\n"):
@@ -3203,7 +3212,9 @@ def download(*packages, **kwargs):
cmd = ["yumdownloader", "-q", "--destdir={}".format(CACHE_DIR)]
cmd.extend(packages)
- __salt__["cmd.run"](cmd, output_loglevel="trace", python_shell=False)
+ __salt__["cmd.run"](
+ cmd, output_loglevel="trace", python_shell=False, env={"SALT_RUNNING": "1"}
+ )
ret = {}
for dld_result in os.listdir(CACHE_DIR):
@@ -3209,7 +3214,8 @@ def _get_patches(installed_only=False):
cmd = [_yum(), '--quiet', 'updateinfo', 'list', 'all']
ret = __salt__['cmd.run_stdout'](
cmd,
- python_shell=False
+ python_shell=False,
+ env={"SALT_RUNNING": '1'}
)
if not dld_result.endswith(".rpm"):
@@ -3279,7 +3290,7 @@ def _get_patches(installed_only=False):
patches = {}
cmd = [_yum(), "--quiet", "updateinfo", "list", "all"]
- ret = __salt__["cmd.run_stdout"](cmd, python_shell=False)
+ ret = __salt__["cmd.run_stdout"](cmd, python_shell=False, env={"SALT_RUNNING": "1"})
for line in salt.utils.itertools.split(ret, os.linesep):
inst, advisory_id, sev, pkg = re.match(r'([i|\s]) ([^\s]+) +([^\s]+) +([^\s]+)',
inst, advisory_id, sev, pkg = re.match(
r"([i|\s]) ([^\s]+) +([^\s]+) +([^\s]+)", line
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 666f62917bbc48cbee2ed0aa319a61afd1b1fcb2 Mon Sep 17 00:00:00 2001
From 6176ef8aa39626dcb450a1665231a796e9544342 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 6 Dec 2018 16:26:23 +0100
Subject: [PATCH] Add hold/unhold functions
@ -7,43 +7,324 @@ Add unhold function
Add warnings
---
salt/modules/zypperpkg.py | 88 ++++++++++++++++++++++++++++++++++++++++++++++-
1 file changed, 87 insertions(+), 1 deletion(-)
salt/modules/zypperpkg.py | 186 +++++++++++++++++++++++++++-----------
1 file changed, 131 insertions(+), 55 deletions(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 50279ccbd1..08a9c2ed4d 100644
index 44bcbbf2f2..6fa6e3e0a1 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -41,6 +41,7 @@ import salt.utils.pkg
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
Package support for openSUSE via the zypper package manager
@@ -12,8 +11,6 @@ Package support for openSUSE via the zypper package manager
"""
-# Import python libs
-from __future__ import absolute_import, print_function, unicode_literals
import datetime
import fnmatch
@@ -24,7 +21,6 @@ import time
from xml.dom import minidom as dom
from xml.parsers.expat import ExpatError
-# Import salt libs
import salt.utils.data
import salt.utils.environment
import salt.utils.event
@@ -35,9 +31,9 @@ import salt.utils.pkg
import salt.utils.pkg.rpm
import salt.utils.stringutils
import salt.utils.systemd
+import salt.utils.versions
from salt.utils.versions import LooseVersion
import salt.utils.environment
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError
@@ -1771,7 +1772,7 @@ def clean_locks():
-# Import 3rd-party libs
# pylint: disable=import-error,redefined-builtin,no-name-in-module
from salt.ext import six
from salt.ext.six.moves import configparser
@@ -51,8 +47,8 @@ log = logging.getLogger(__name__)
HAS_ZYPP = False
ZYPP_HOME = "/etc/zypp"
-LOCKS = "{0}/locks".format(ZYPP_HOME)
-REPOS = "{0}/repos.d".format(ZYPP_HOME)
+LOCKS = "{}/locks".format(ZYPP_HOME)
+REPOS = "{}/repos.d".format(ZYPP_HOME)
DEFAULT_PRIORITY = 99
PKG_ARCH_SEPARATOR = "."
@@ -75,7 +71,7 @@ def __virtual__():
return __virtualname__
-class _Zypper(object):
+class _Zypper:
"""
Zypper parallel caller.
Validates the result and either raises an exception or reports an error.
@@ -339,7 +335,7 @@ class _Zypper(object):
attrs=["pid", "name", "cmdline", "create_time"],
)
data["cmdline"] = " ".join(data["cmdline"])
- data["info"] = "Blocking process created at {0}.".format(
+ data["info"] = "Blocking process created at {}.".format(
datetime.datetime.utcfromtimestamp(
data["create_time"]
).isoformat()
@@ -347,7 +343,7 @@ class _Zypper(object):
data["success"] = True
except Exception as err: # pylint: disable=broad-except
data = {
- "info": "Unable to retrieve information about blocking process: {0}".format(
+ "info": "Unable to retrieve information about blocking process: {}".format(
err.message
),
"success": False,
@@ -382,7 +378,7 @@ class _Zypper(object):
)
if self.error_msg and not self.__no_raise and not self.__ignore_repo_failure:
raise CommandExecutionError(
- "Zypper command failure: {0}".format(self.error_msg)
+ "Zypper command failure: {}".format(self.error_msg)
)
return (
@@ -397,7 +393,7 @@ class _Zypper(object):
__zypper__ = _Zypper()
-class Wildcard(object):
+class Wildcard:
"""
.. versionadded:: 2017.7.0
@@ -439,7 +435,7 @@ class Wildcard(object):
for vrs in self._get_scope_versions(self._get_available_versions())
]
)
- return versions and "{0}{1}".format(self._op or "", versions[-1]) or None
+ return versions and "{}{}".format(self._op or "", versions[-1]) or None
def _get_available_versions(self):
"""
@@ -451,17 +447,15 @@ class Wildcard(object):
).getElementsByTagName("solvable")
if not solvables:
raise CommandExecutionError(
- "No packages found matching '{0}'".format(self.name)
+ "No packages found matching '{}'".format(self.name)
)
return sorted(
- set(
- [
- slv.getAttribute(self._attr_solvable_version)
- for slv in solvables
- if slv.getAttribute(self._attr_solvable_version)
- ]
- )
+ {
+ slv.getAttribute(self._attr_solvable_version)
+ for slv in solvables
+ if slv.getAttribute(self._attr_solvable_version)
+ }
)
def _get_scope_versions(self, pkg_versions):
@@ -489,7 +483,7 @@ class Wildcard(object):
self._op = version.replace(exact_version, "") or None
if self._op and self._op not in self.Z_OP:
raise CommandExecutionError(
- 'Zypper do not supports operator "{0}".'.format(self._op)
+ 'Zypper do not supports operator "{}".'.format(self._op)
)
self.version = exact_version
@@ -539,14 +533,11 @@ def list_upgrades(refresh=True, root=None, **kwargs):
cmd = ["list-updates"]
if "fromrepo" in kwargs:
repos = kwargs["fromrepo"]
- if isinstance(repos, six.string_types):
+ if isinstance(repos, str):
repos = [repos]
for repo in repos:
cmd.extend(
- [
- "--repo",
- repo if isinstance(repo, six.string_types) else six.text_type(repo),
- ]
+ ["--repo", repo if isinstance(repo, str) else str(repo),]
)
log.debug("Targeting repos: %s", repos)
for update_node in (
@@ -610,7 +601,7 @@ def info_installed(*names, **kwargs):
for _nfo in pkg_nfo:
t_nfo = dict()
# Translate dpkg-specific keys to a common structure
- for key, value in six.iteritems(_nfo):
+ for key, value in _nfo.items():
if key == "source_rpm":
t_nfo["source"] = value
else:
@@ -1033,9 +1024,7 @@ def list_repo_pkgs(*args, **kwargs):
fromrepo = kwargs.pop("fromrepo", "") or ""
ret = {}
- targets = [
- arg if isinstance(arg, six.string_types) else six.text_type(arg) for arg in args
- ]
+ targets = [arg if isinstance(arg, str) else str(arg) for arg in args]
def _is_match(pkgname):
"""
@@ -1124,7 +1113,7 @@ def _get_repo_info(alias, repos_cfg=None, root=None):
try:
meta = dict((repos_cfg or _get_configured_repos(root=root)).items(alias))
meta["alias"] = alias
- for key, val in six.iteritems(meta):
+ for key, val in meta.items():
if val in ["0", "1"]:
meta[key] = int(meta[key]) == 1
elif val == "NONE":
@@ -1197,7 +1186,7 @@ def del_repo(repo, root=None):
"message": msg[0].childNodes[0].nodeValue,
}
- raise CommandExecutionError("Repository '{0}' not found.".format(repo))
+ raise CommandExecutionError("Repository '{}' not found.".format(repo))
def mod_repo(repo, **kwargs):
@@ -1252,13 +1241,13 @@ def mod_repo(repo, **kwargs):
url = kwargs.get("url", kwargs.get("mirrorlist", kwargs.get("baseurl")))
if not url:
raise CommandExecutionError(
- "Repository '{0}' not found, and neither 'baseurl' nor "
+ "Repository '{}' not found, and neither 'baseurl' nor "
"'mirrorlist' was specified".format(repo)
)
if not _urlparse(url).scheme:
raise CommandExecutionError(
- "Repository '{0}' not found and URL for baseurl/mirrorlist "
+ "Repository '{}' not found and URL for baseurl/mirrorlist "
"is malformed".format(repo)
)
@@ -1281,7 +1270,7 @@ def mod_repo(repo, **kwargs):
if new_url == base_url:
raise CommandExecutionError(
- "Repository '{0}' already exists as '{1}'.".format(repo, alias)
+ "Repository '{}' already exists as '{}'.".format(repo, alias)
)
# Add new repo
@@ -1291,7 +1280,7 @@ def mod_repo(repo, **kwargs):
repos_cfg = _get_configured_repos(root=root)
if repo not in repos_cfg.sections():
raise CommandExecutionError(
- "Failed add new repository '{0}' for unspecified reason. "
+ "Failed add new repository '{}' for unspecified reason. "
"Please check zypper logs.".format(repo)
)
added = True
@@ -1327,12 +1316,10 @@ def mod_repo(repo, **kwargs):
cmd_opt.append(kwargs["gpgcheck"] and "--gpgcheck" or "--no-gpgcheck")
if "priority" in kwargs:
- cmd_opt.append(
- "--priority={0}".format(kwargs.get("priority", DEFAULT_PRIORITY))
- )
+ cmd_opt.append("--priority={}".format(kwargs.get("priority", DEFAULT_PRIORITY)))
if "humanname" in kwargs:
- cmd_opt.append("--name='{0}'".format(kwargs.get("humanname")))
+ cmd_opt.append("--name='{}'".format(kwargs.get("humanname")))
if kwargs.get("gpgautoimport") is True:
global_cmd_opt.append("--gpg-auto-import-keys")
@@ -1589,7 +1576,7 @@ def install(
if pkg_type == "repository":
targets = []
- for param, version_num in six.iteritems(pkg_params):
+ for param, version_num in pkg_params.items():
if version_num is None:
log.debug("targeting package: %s", param)
targets.append(param)
@@ -1597,7 +1584,7 @@ def install(
prefix, verstr = salt.utils.pkg.split_comparison(version_num)
if not prefix:
prefix = "="
- target = "{0}{1}{2}".format(param, prefix, verstr)
+ target = "{}{}{}".format(param, prefix, verstr)
log.debug("targeting package: %s", target)
targets.append(target)
elif pkg_type == "advisory":
@@ -1606,7 +1593,7 @@ def install(
for advisory_id in pkg_params:
if advisory_id not in cur_patches:
raise CommandExecutionError(
- 'Advisory id "{0}" not found'.format(advisory_id)
+ 'Advisory id "{}" not found'.format(advisory_id)
)
else:
# If we add here the `patch:` prefix, the
@@ -1703,7 +1690,7 @@ def install(
if errors:
raise CommandExecutionError(
- "Problem encountered {0} package(s)".format(
+ "Problem encountered {} package(s)".format(
"downloading" if downloadonly else "installing"
),
info={"errors": errors, "changes": ret},
@@ -1797,7 +1784,7 @@ def upgrade(
cmd_update.append("--dry-run")
if fromrepo:
- if isinstance(fromrepo, six.string_types):
+ if isinstance(fromrepo, str):
fromrepo = [fromrepo]
for repo in fromrepo:
cmd_update.extend(["--from" if dist_upgrade else "--repo", repo])
@@ -2052,7 +2039,7 @@ def list_locks(root=None):
)
if lock.get("solvable_name"):
locks[lock.pop("solvable_name")] = lock
- except IOError:
+ except OSError:
pass
except Exception: # pylint: disable=broad-except
log.warning("Detected a problem when accessing {}".format(_locks))
@@ -2089,7 +2076,7 @@ def clean_locks(root=None):
return out
-def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
-def remove_lock(packages, root=None, **kwargs): # pylint: disable=unused-argument
+def unhold(name=None, pkgs=None, **kwargs):
'''
"""
Remove specified package lock.
@@ -1783,7 +1784,47 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
@@ -2104,8 +2091,50 @@ def remove_lock(packages, root=None, **kwargs): # pylint: disable=unused-argume
salt '*' pkg.remove_lock <package1>,<package2>,<package3>
salt '*' pkg.remove_lock pkgs='["foo", "bar"]'
'''
"""
+ ret = {}
+ if (not name and not pkgs) or (name and pkgs):
+ raise CommandExecutionError('Name or packages must be specified.')
+ raise CommandExecutionError("Name or packages must be specified.")
+ elif name:
+ pkgs = [name]
+
+ locks = list_locks()
+ try:
+ pkgs = list(__salt__['pkg_resource.parse_targets'](pkgs)[0].keys())
+ pkgs = list(__salt__["pkg_resource.parse_targets"](pkgs)[0].keys())
+ except MinionError as exc:
+ raise CommandExecutionError(exc)
+
@ -52,39 +333,44 @@ index 50279ccbd1..08a9c2ed4d 100644
+ for pkg in pkgs:
+ if locks.get(pkg):
+ removed.append(pkg)
+ ret[pkg]['comment'] = 'Package {0} is no longer held.'.format(pkg)
+ ret[pkg]["comment"] = "Package {} is no longer held.".format(pkg)
+ else:
+ missing.append(pkg)
+ ret[pkg]['comment'] = 'Package {0} unable to be unheld.'.format(pkg)
+ ret[pkg]["comment"] = "Package {} unable to be unheld.".format(pkg)
+
+ if removed:
+ __zypper__.call('rl', *removed)
+ __zypper__.call("rl", *removed)
+
+ return ret
+
+
+def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
+ '''
+ """
+ Remove specified package lock.
+
+ CLI Example:
+
+ .. code-block:: bash
- locks = list_locks(root)
+ salt '*' pkg.remove_lock <package name>
+ salt '*' pkg.remove_lock <package1>,<package2>,<package3>
+ salt '*' pkg.remove_lock pkgs='["foo", "bar"]'
+ '''
+ salt.utils.versions.warn_until('Sodium', 'This function is deprecated. Please use unhold() instead.')
locks = list_locks()
+ """
+ salt.utils.versions.warn_until(
+ "Sodium", "This function is deprecated. Please use unhold() instead."
+ )
+ locks = list_locks()
try:
packages = list(__salt__['pkg_resource.parse_targets'](packages)[0].keys())
@@ -1804,6 +1845,50 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
return {'removed': len(removed), 'not_found': missing}
packages = list(__salt__["pkg_resource.parse_targets"](packages)[0].keys())
except MinionError as exc:
@@ -2125,7 +2154,51 @@ def remove_lock(packages, root=None, **kwargs): # pylint: disable=unused-argume
return {"removed": len(removed), "not_found": missing}
-def add_lock(packages, root=None, **kwargs): # pylint: disable=unused-argument
+def hold(name=None, pkgs=None, **kwargs):
+ '''
+ """
+ Add a package lock. Specify packages to lock by exact name.
+
+ CLI Example:
@ -99,46 +385,85 @@ index 50279ccbd1..08a9c2ed4d 100644
+ :param pkgs:
+ :param kwargs:
+ :return:
+ '''
+ """
+ ret = {}
+ if (not name and not pkgs) or (name and pkgs):
+ raise CommandExecutionError('Name or packages must be specified.')
+ raise CommandExecutionError("Name or packages must be specified.")
+ elif name:
+ pkgs = [name]
+
+ locks = list_locks()
+ added = []
+ try:
+ pkgs = list(__salt__['pkg_resource.parse_targets'](pkgs)[0].keys())
+ pkgs = list(__salt__["pkg_resource.parse_targets"](pkgs)[0].keys())
+ except MinionError as exc:
+ raise CommandExecutionError(exc)
+
+ for pkg in pkgs:
+ ret[pkg] = {'name': pkg, 'changes': {}, 'result': False, 'comment': ''}
+ ret[pkg] = {"name": pkg, "changes": {}, "result": False, "comment": ""}
+ if not locks.get(pkg):
+ added.append(pkg)
+ ret[pkg]['comment'] = 'Package {0} is now being held.'.format(pkg)
+ ret[pkg]["comment"] = "Package {} is now being held.".format(pkg)
+ else:
+ ret[pkg]['comment'] = 'Package {0} is already set to be held.'.format(pkg)
+ ret[pkg]["comment"] = "Package {} is already set to be held.".format(pkg)
+
+ if added:
+ __zypper__.call('al', *added)
+ __zypper__.call("al", *added)
+
+ return ret
+
+
def add_lock(packages, **kwargs): # pylint: disable=unused-argument
'''
+def add_lock(packages, **kwargs): # pylint: disable=unused-argument
"""
Add a package lock. Specify packages to lock by exact name.
@@ -1816,6 +1901,7 @@ def add_lock(packages, **kwargs): # pylint: disable=unused-argument
@@ -2140,7 +2213,10 @@ def add_lock(packages, root=None, **kwargs): # pylint: disable=unused-argument
salt '*' pkg.add_lock <package1>,<package2>,<package3>
salt '*' pkg.add_lock pkgs='["foo", "bar"]'
'''
+ salt.utils.versions.warn_until('Sodium', 'This function is deprecated. Please use hold() instead.')
locks = list_locks()
"""
- locks = list_locks(root)
+ salt.utils.versions.warn_until(
+ "Sodium", "This function is deprecated. Please use hold() instead."
+ )
+ locks = list_locks()
added = []
try:
packages = list(__salt__["pkg_resource.parse_targets"](packages)[0].keys())
@@ -2495,7 +2571,7 @@ def search(criteria, refresh=False, **kwargs):
.getElementsByTagName("solvable")
)
if not solvables:
- raise CommandExecutionError("No packages found matching '{0}'".format(criteria))
+ raise CommandExecutionError("No packages found matching '{}'".format(criteria))
out = {}
for solvable in solvables:
@@ -2649,13 +2725,13 @@ def download(*packages, **kwargs):
if failed:
pkg_ret[
"_error"
- ] = "The following package(s) failed to download: {0}".format(
+ ] = "The following package(s) failed to download: {}".format(
", ".join(failed)
)
return pkg_ret
raise CommandExecutionError(
- "Unable to download packages: {0}".format(", ".join(packages))
+ "Unable to download packages: {}".format(", ".join(packages))
)
@@ -2726,7 +2802,7 @@ def diff(*paths, **kwargs):
if pkg_to_paths:
local_pkgs = __salt__["pkg.download"](*pkg_to_paths.keys(), **kwargs)
- for pkg, files in six.iteritems(pkg_to_paths):
+ for pkg, files in pkg_to_paths.items():
for path in files:
ret[path] = (
__salt__["lowpkg.diff"](local_pkgs[pkg]["path"], path)
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 5254ec34316a0924edb4856f84e6092fafe479fa Mon Sep 17 00:00:00 2001
From 57cab2d4e282f8b1d17610e6b4a0e772494bfcb1 Mon Sep 17 00:00:00 2001
From: Alberto Planas <aplanas@suse.com>
Date: Tue, 20 Oct 2020 11:43:09 +0200
Subject: [PATCH] Add "migrated" state and GPG key management functions
@ -20,17 +20,16 @@ same virtual package, based on the counterpart from rpm_lowpkg API.
---
changelog/58782.added | 1 +
salt/modules/aptpkg.py | 7 +-
salt/modules/rpm_lowpkg.py | 151 ++++++++
salt/modules/rpm_lowpkg.py | 151 +++++++++
salt/modules/yumpkg.py | 88 +++++
salt/modules/zypperpkg.py | 90 ++++-
salt/states/pkgrepo.py | 208 ++++++++++
tests/unit/modules/test_rpm_lowpkg.py | 215 +++++++++++
tests/unit/modules/test_yumpkg.py | 43 ++-
tests/unit/modules/test_zypperpkg.py | 40 +-
tests/unit/states/test_pkgrepo.py | 527 ++++++++++++++++++++++++++
10 files changed, 1363 insertions(+), 7 deletions(-)
salt/modules/zypperpkg.py | 88 +++++
salt/states/pkgrepo.py | 207 ++++++++++++
tests/unit/modules/test_rpm_lowpkg.py | 236 ++++++++++++-
tests/unit/modules/test_yumpkg.py | 41 ++-
tests/unit/modules/test_zypperpkg.py | 40 ++-
tests/unit/states/test_pkgrepo.py | 468 +++++++++++++++++++++++++-
10 files changed, 1301 insertions(+), 26 deletions(-)
create mode 100644 changelog/58782.added
create mode 100644 tests/unit/states/test_pkgrepo.py
diff --git a/changelog/58782.added b/changelog/58782.added
new file mode 100644
@ -41,43 +40,43 @@ index 0000000000..f9e69f64f2
+Add GPG key functions in "lowpkg" and a "migrated" function in the "pkgrepo" state for repository and GPG key migration.
\ No newline at end of file
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 765d69aff2..28b8597ef5 100644
index e4a9872aad..e001d2f11c 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -1878,7 +1878,7 @@ def _convert_if_int(value):
@@ -1908,7 +1908,7 @@ def _convert_if_int(value):
return value
-def get_repo_keys():
+def get_repo_keys(**kwargs):
'''
"""
.. versionadded:: 2017.7.0
@@ -1950,7 +1950,9 @@ def get_repo_keys():
@@ -1990,7 +1990,9 @@ def get_repo_keys():
return ret
-def add_repo_key(path=None, text=None, keyserver=None, keyid=None, saltenv='base'):
-def add_repo_key(path=None, text=None, keyserver=None, keyid=None, saltenv="base"):
+def add_repo_key(
+ path=None, text=None, keyserver=None, keyid=None, saltenv='base', **kwargs
+ path=None, text=None, keyserver=None, keyid=None, saltenv="base", **kwargs
+):
'''
"""
.. versionadded:: 2017.7.0
@@ -1976,7 +1978,6 @@ def add_repo_key(path=None, text=None, keyserver=None, keyid=None, saltenv='base
@@ -2016,7 +2018,6 @@ def add_repo_key(path=None, text=None, keyserver=None, keyid=None, saltenv="base
salt '*' pkg.add_repo_key keyserver='keyserver.example' keyid='0000AAAA'
'''
cmd = ['apt-key']
"""
cmd = ["apt-key"]
- kwargs = {}
current_repo_keys = get_repo_keys()
diff --git a/salt/modules/rpm_lowpkg.py b/salt/modules/rpm_lowpkg.py
index c8a87276b2..fee0221a7c 100644
index 393b0f453a..57f336bacf 100644
--- a/salt/modules/rpm_lowpkg.py
+++ b/salt/modules/rpm_lowpkg.py
@@ -823,3 +823,154 @@ def checksum(*paths, **kwargs):
python_shell=False))
@@ -835,3 +835,154 @@ def checksum(*paths, **kwargs):
)
return ret
+
@ -232,12 +231,12 @@ index c8a87276b2..fee0221a7c 100644
+ cmd.extend(["-e", key])
+ return __salt__["cmd.retcode"](cmd) == 0
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index 04ab240cd4..85a2dbd857 100644
index c58b3e4c70..dd843f985b 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -3271,3 +3271,91 @@ def list_installed_patches(**kwargs):
@@ -3346,3 +3346,91 @@ def list_installed_patches(**kwargs):
salt '*' pkg.list_installed_patches
'''
"""
return _get_patches(installed_only=True)
+
+
@ -328,19 +327,10 @@ index 04ab240cd4..85a2dbd857 100644
+ """
+ return __salt__["lowpkg.remove_gpg_key"](keyid, root)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index d84a6af6e0..fab7736701 100644
index d06c265202..5e13c68708 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -1270,7 +1270,7 @@ def mod_repo(repo, **kwargs):
cmd_opt.append("--priority={0}".format(kwargs.get('priority', DEFAULT_PRIORITY)))
if 'humanname' in kwargs:
- cmd_opt.append("--name='{0}'".format(kwargs.get('humanname')))
+ cmd_opt.extend(["--name", kwargs.get("humanname")])
if kwargs.get('gpgautoimport') is True:
global_cmd_opt.append('--gpg-auto-import-keys')
@@ -2879,3 +2879,91 @@ def resolve_capabilities(pkgs, refresh=False, root=None, **kwargs):
@@ -3004,3 +3004,91 @@ def resolve_capabilities(pkgs, refresh=False, root=None, **kwargs):
else:
ret.append(name)
return ret
@ -433,27 +423,19 @@ index d84a6af6e0..fab7736701 100644
+ """
+ return __salt__["lowpkg.remove_gpg_key"](keyid, root)
diff --git a/salt/states/pkgrepo.py b/salt/states/pkgrepo.py
index c39e857580..6c42d17d32 100644
index 70cb7a1c7e..d734bb9de9 100644
--- a/salt/states/pkgrepo.py
+++ b/salt/states/pkgrepo.py
@@ -84,6 +84,7 @@ package managers are APT, DNF, YUM and Zypper. Here is some example SLS:
@@ -93,6 +93,7 @@ package managers are APT, DNF, YUM and Zypper. Here is some example SLS:
"""
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
+import os
import sys
# Import salt libs
@@ -96,6 +97,7 @@ import salt.utils.pkg.rpm
# Import 3rd-party libs
from salt.ext import six
+import salt.utils.versions
def __virtual__():
@@ -643,3 +645,209 @@ def absent(name, **kwargs):
ret['comment'] = 'Failed to remove repo {0}'.format(name)
import salt.utils.data
@@ -679,3 +680,209 @@ def absent(name, **kwargs):
ret["comment"] = "Failed to remove repo {}".format(name)
return ret
+
@ -663,21 +645,49 @@ index c39e857580..6c42d17d32 100644
+
+ return ret
diff --git a/tests/unit/modules/test_rpm_lowpkg.py b/tests/unit/modules/test_rpm_lowpkg.py
index b6cbd9e5cb..ff3678fde5 100644
index ec9ecd40cb..84020263ea 100644
--- a/tests/unit/modules/test_rpm_lowpkg.py
+++ b/tests/unit/modules/test_rpm_lowpkg.py
@@ -5,6 +5,7 @@
@@ -2,6 +2,7 @@
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
"""
# Import Python Libs
from __future__ import absolute_import
+import datetime
# Import Salt Testing Libs
import salt.modules.rpm_lowpkg as rpm
from tests.support.mixins import LoaderModuleMockMixin
@@ -205,3 +206,217 @@ class RpmTestCase(TestCase, LoaderModuleMockMixin):
with patch('salt.modules.rpm_lowpkg.rpm.labelCompare', MagicMock(return_value=0)), \
patch('salt.modules.rpm_lowpkg.HAS_RPM', False):
self.assertEqual(-1, rpm.version_cmp('1', '2')) # mock returns -1, a python implementation was called
@@ -15,8 +16,8 @@ def _called_with_root(mock):
def _called_with_root(mock):
- cmd = ' '.join(mock.call_args[0][0])
- return cmd.startswith('rpm --root /')
+ cmd = " ".join(mock.call_args[0][0])
+ return cmd.startswith("rpm --root /")
class RpmTestCase(TestCase, LoaderModuleMockMixin):
@@ -263,14 +264,223 @@ class RpmTestCase(TestCase, LoaderModuleMockMixin):
:return:
"""
- self.assertEqual(-1, rpm.version_cmp("1", "2"))
- self.assertEqual(mock_version_cmp.called, True)
- self.assertEqual(mock_log.warning.called, True)
- self.assertEqual(
- mock_log.warning.mock_calls[0][1][0],
- "Please install a package that provides rpm.labelCompare for more accurate version comparisons.",
- )
- self.assertEqual(
- mock_log.warning.mock_calls[1][1][0],
- "Falling back on salt.utils.versions.version_cmp() for version comparisons",
- )
+ with patch(
+ "salt.modules.rpm_lowpkg.rpm.labelCompare", MagicMock(return_value=0)
+ ), patch("salt.modules.rpm_lowpkg.HAS_RPM", False):
+ self.assertEqual(
+ -1, rpm.version_cmp("1", "2")
+ ) # mock returns -1, a python implementation was called
+
+ def test_list_gpg_keys_no_info(self):
+ """
@ -893,39 +903,31 @@ index b6cbd9e5cb..ff3678fde5 100644
+ self.assertTrue(rpm.remove_gpg_key("gpg-pubkey-1"))
+ self.assertFalse(_called_with_root(mock))
diff --git a/tests/unit/modules/test_yumpkg.py b/tests/unit/modules/test_yumpkg.py
index 9fbe3d051e..dfe00a7181 100644
index 4784160d25..e65a1f8b8b 100644
--- a/tests/unit/modules/test_yumpkg.py
+++ b/tests/unit/modules/test_yumpkg.py
@@ -10,15 +10,17 @@ from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
Mock,
MagicMock,
+ mock_open,
patch,
)
# Import Salt libs
@@ -5,9 +5,9 @@ import salt.modules.pkg_resource as pkg_resource
import salt.modules.rpm_lowpkg as rpm
import salt.modules.yumpkg as yumpkg
import salt.utils.platform
-from salt.exceptions import CommandExecutionError
+from salt.exceptions import CommandExecutionError, SaltInvocationError
import salt.modules.rpm_lowpkg as rpm
from salt.ext import six
import salt.modules.yumpkg as yumpkg
import salt.modules.pkg_resource as pkg_resource
+import salt.utils.platform
from tests.support.mixins import LoaderModuleMockMixin
-from tests.support.mock import MagicMock, Mock, patch
+from tests.support.mock import MagicMock, Mock, mock_open, patch
from tests.support.unit import TestCase, skipIf
try:
import pytest
@@ -799,8 +801,45 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
with pytest.raises(CommandExecutionError):
yumpkg._get_yum_config()
@@ -1630,6 +1630,43 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
ret = yumpkg.get_repo(repo, **kwargs)
assert ret == expected, ret
+ def test_get_repo_keys(self):
+ salt_mock = {"lowpkg.list_gpg_keys": MagicMock(return_value=True)}
+ with patch.dict(yumpkg.__salt__, salt_mock):
+ self.assertTrue(yumpkg.get_repo_keys(info=True, root="/mnt"))
+ salt_mock["lowpkg.list_gpg_keys"].assert_called_once_with(True, "/mnt")
-@skipIf(pytest is None, 'PyTest is missing')
+
+ def test_add_repo_key_fail(self):
+ with self.assertRaises(SaltInvocationError):
+ yumpkg.add_repo_key()
@ -957,25 +959,23 @@ index 9fbe3d051e..dfe00a7181 100644
+ self.assertTrue(yumpkg.del_repo_key(keyid="keyid", root="/mnt"))
+ salt_mock["lowpkg.remove_gpg_key"].assert_called_once_with("keyid", "/mnt")
+
+
+@skipIf(pytest is None, "PyTest is missing")
@skipIf(pytest is None, "PyTest is missing")
class YumUtilsTestCase(TestCase, LoaderModuleMockMixin):
'''
Yum/Dnf utils tests.
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index 8cc84485b5..1f2a7dc4b2 100644
index eaa4d9a76a..018c1ffbca 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -22,7 +22,7 @@ from tests.support.mock import (
import salt.utils.files
@@ -10,7 +10,7 @@ import salt.modules.pkg_resource as pkg_resource
import salt.modules.zypperpkg as zypper
import salt.modules.pkg_resource as pkg_resource
import salt.utils.files
import salt.utils.pkg
-from salt.exceptions import CommandExecutionError
+from salt.exceptions import CommandExecutionError, SaltInvocationError
# Import 3rd-party libs
from salt.ext import six
from salt.ext.six.moves import configparser
@@ -1728,3 +1728,41 @@ pattern() = package-c"""
from tests.support.mixins import LoaderModuleMockMixin
@@ -2175,3 +2175,41 @@ pattern() = package-c"""
python_shell=False,
env={"ZYPP_READONLY_HACK": "1"},
)
@ -1018,79 +1018,33 @@ index 8cc84485b5..1f2a7dc4b2 100644
+ self.assertTrue(zypper.del_repo_key(keyid="keyid", root="/mnt"))
+ salt_mock["lowpkg.remove_gpg_key"].assert_called_once_with("keyid", "/mnt")
diff --git a/tests/unit/states/test_pkgrepo.py b/tests/unit/states/test_pkgrepo.py
new file mode 100644
index 0000000000..9d8d88abd9
--- /dev/null
index b2be5b4da1..135e545220 100644
--- a/tests/unit/states/test_pkgrepo.py
+++ b/tests/unit/states/test_pkgrepo.py
@@ -0,0 +1,527 @@
+"""
+ :codeauthor: Tyler Johnson <tjohnson@saltstack.com>
+"""
+import salt.states.pkgrepo as pkgrepo
@@ -1,17 +1,12 @@
-# -*- coding: utf-8 -*-
"""
:codeauthor: Tyler Johnson <tjohnson@saltstack.com>
"""
-# Import Python libs
-from __future__ import absolute_import
-# Import Salt Libs
import salt.states.pkgrepo as pkgrepo
-
-# Import Salt Testing Libs
+import salt.utils.platform
+from tests.support.mixins import LoaderModuleMockMixin
+from tests.support.mock import MagicMock, patch
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, patch
-from tests.support.unit import TestCase
+from tests.support.unit import TestCase, skipIf
+
+
+class PkgrepoTestCase(TestCase, LoaderModuleMockMixin):
+ """
+ Test cases for salt.states.pkgrepo
+ """
+
+ def setup_loader_modules(self):
+ return {
+ pkgrepo: {
+ "__opts__": {"test": True},
+ "__grains__": {"os": "", "os_family": ""},
+ }
+ }
+
+ def test_new_key_url(self):
+ """
+ Test when only the key_url is changed that a change is triggered
+ """
+ kwargs = {
+ "name": "deb http://mock/ sid main",
+ "disabled": False,
+ }
+ key_url = "http://mock/changed_gpg.key"
+
+ with patch.dict(
+ pkgrepo.__salt__, {"pkg.get_repo": MagicMock(return_value=kwargs)}
+ ):
+ ret = pkgrepo.managed(key_url=key_url, **kwargs)
+ self.assertDictEqual(
+ {"key_url": {"old": None, "new": key_url}}, ret["changes"]
+ )
+
+ def test_update_key_url(self):
+ """
+ Test when only the key_url is changed that a change is triggered
+ """
+ kwargs = {
+ "name": "deb http://mock/ sid main",
+ "gpgcheck": 1,
+ "disabled": False,
+ "key_url": "http://mock/gpg.key",
+ }
+ changed_kwargs = kwargs.copy()
+ changed_kwargs["key_url"] = "http://mock/gpg2.key"
+
+ with patch.dict(
+ pkgrepo.__salt__, {"pkg.get_repo": MagicMock(return_value=kwargs)}
+ ):
+ ret = pkgrepo.managed(**changed_kwargs)
+ self.assertIn("key_url", ret["changes"], "Expected a change to key_url")
+ self.assertDictEqual(
+ {
+ "key_url": {
+ "old": kwargs["key_url"],
+ "new": changed_kwargs["key_url"],
+ }
+ },
+ ret["changes"],
+ )
class PkgrepoTestCase(TestCase, LoaderModuleMockMixin):
@@ -72,3 +67,462 @@ class PkgrepoTestCase(TestCase, LoaderModuleMockMixin):
},
ret["changes"],
)
+
+ def test__normalize_repo_suse(self):
+ repo = {
@ -1551,6 +1505,6 @@ index 0000000000..9d8d88abd9
+ },
+ )
--
2.29.1
2.29.2

View File

@ -1,26 +0,0 @@
From 082fa07e5301414b5b834b731aaa96bd5d966de7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 10 Mar 2020 13:16:05 +0000
Subject: [PATCH] Add missing _utils at loader grains_func
---
salt/loader.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/salt/loader.py b/salt/loader.py
index c68562988d..742b2f8e22 100644
--- a/salt/loader.py
+++ b/salt/loader.py
@@ -683,6 +683,7 @@ def grain_funcs(opts, proxy=None):
__opts__ = salt.config.minion_config('/etc/salt/minion')
grainfuncs = salt.loader.grain_funcs(__opts__)
'''
+ _utils = utils(opts)
ret = LazyLoader(
_module_dirs(
opts,
--
2.23.0

View File

@ -1,36 +0,0 @@
From 5c25babafd4e4bbe55626713851ea5d6345c43d1 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 9 Oct 2019 13:03:33 +0100
Subject: [PATCH] Add missing 'fun' for returns from wfunc executions
---
salt/client/ssh/__init__.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index 4881540837..1373274739 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -682,6 +682,8 @@ class SSH(object):
data = {'return': data}
if 'id' not in data:
data['id'] = id_
+ if 'fun' not in data:
+ data['fun'] = fun
data['jid'] = jid # make the jid in the payload the same as the jid in the tag
self.event.fire_event(
data,
@@ -797,6 +799,8 @@ class SSH(object):
data = {'return': data}
if 'id' not in data:
data['id'] = id_
+ if 'fun' not in data:
+ data['fun'] = fun
data['jid'] = jid # make the jid in the payload the same as the jid in the tag
self.event.fire_event(
data,
--
2.16.4

View File

@ -1,4 +1,4 @@
From 0a6b5e92a4a74dee94eb33a939600f8c2e429c01 Mon Sep 17 00:00:00 2001
From c5e5dc304e897f8c1664cce29fe9ee63d84f3ae6 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 12 Oct 2018 16:20:40 +0200
Subject: [PATCH] Add multi-file support and globbing to the filetree
@ -12,37 +12,37 @@ Collect system logs and boot logs
Support globbing in filetree
---
salt/cli/support/intfunc.py | 49 +++++++++++++++++++++--------------
salt/cli/support/profiles/default.yml | 7 +++++
salt/cli/support/intfunc.py | 49 ++++++++++++++++-----------
salt/cli/support/profiles/default.yml | 7 ++++
2 files changed, 37 insertions(+), 19 deletions(-)
diff --git a/salt/cli/support/intfunc.py b/salt/cli/support/intfunc.py
index 2727cd6394..f15f4d4097 100644
index d3d8f83cb8..a9f76a6003 100644
--- a/salt/cli/support/intfunc.py
+++ b/salt/cli/support/intfunc.py
@@ -6,6 +6,7 @@ Internal functions.
@@ -3,6 +3,7 @@ Internal functions.
"""
# Maybe this needs to be a modules in a future?
from __future__ import absolute_import, print_function, unicode_literals
import os
+import glob
from salt.cli.support.console import MessagesOutput
import salt.utils.files
import os
@@ -13,7 +14,7 @@ import salt.utils.files
import salt.utils.files
@@ -11,7 +12,7 @@ from salt.cli.support.console import MessagesOutput
out = MessagesOutput()
-def filetree(collector, path):
+def filetree(collector, *paths):
'''
"""
Add all files in the tree. If the "path" is a file,
only that file will be added.
@@ -21,22 +22,32 @@ def filetree(collector, path):
@@ -19,22 +20,32 @@ def filetree(collector, path):
:param path: File or directory
:return:
'''
"""
- if not path:
- out.error('Path not defined', ident=2)
- out.error("Path not defined", ident=2)
- else:
- # The filehandler needs to be explicitly passed here, so PyLint needs to accept that.
- # pylint: disable=W8470
@ -50,7 +50,7 @@ index 2727cd6394..f15f4d4097 100644
- filename = os.path.basename(path)
- try:
- file_ref = salt.utils.files.fopen(path) # pylint: disable=W
- out.put('Add {}'.format(filename), indent=2)
- out.put("Add {}".format(filename), indent=2)
- collector.add(filename)
- collector.link(title=path, path=file_ref)
- except Exception as err:
@ -62,9 +62,9 @@ index 2727cd6394..f15f4d4097 100644
+ _paths += glob.glob(path)
+ for path in set(_paths):
+ if not path:
+ out.error('Path not defined', ident=2)
+ out.error("Path not defined", ident=2)
+ elif not os.path.exists(path):
+ out.warning('Path {} does not exists'.format(path))
+ out.warning("Path {} does not exists".format(path))
else:
- for fname in os.listdir(path):
- fname = os.path.join(path, fname)
@ -75,7 +75,7 @@ index 2727cd6394..f15f4d4097 100644
+ filename = os.path.basename(path)
+ try:
+ file_ref = salt.utils.files.fopen(path) # pylint: disable=W
+ out.put('Add {}'.format(filename), indent=2)
+ out.put("Add {}".format(filename), indent=2)
+ collector.add(filename)
+ collector.link(title=path, path=file_ref)
+ except Exception as err:
@ -111,6 +111,6 @@ index 01d9a26193..3defb5eef3 100644
+ - /var/log/messages
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From ad1323b4f83fa8f2954c0a965f4acaf91575a59b Mon Sep 17 00:00:00 2001
From 70d13dcc62286d5195bbf28b53aae61616cc0f8f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 26 Mar 2020 13:08:16 +0000
@ -10,10 +10,10 @@ Subject: [PATCH] Add new custom SUSE capability for saltutil state
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/salt/grains/extra.py b/salt/grains/extra.py
index 1082b05dba7830ee53078cff86b5183b5eea2829..b30ab0091fee7cda8f74b861e9e9f95f8ad85b39 100644
index ddc22293ea..0eec27e628 100644
--- a/salt/grains/extra.py
+++ b/salt/grains/extra.py
@@ -80,5 +80,6 @@ def config():
@@ -71,5 +71,6 @@ def config():
def suse_backported_capabilities():
return {
'__suse_reserved_pkg_all_versions_support': True,
@ -22,6 +22,6 @@ index 1082b05dba7830ee53078cff86b5183b5eea2829..b30ab0091fee7cda8f74b861e9e9f95f
+ '__suse_reserved_saltutil_states_support': True
}
--
2.23.0
2.29.2

View File

@ -0,0 +1,107 @@
From cee4cc182b4740c912861c712dea7bc44eb70ffb Mon Sep 17 00:00:00 2001
From: Martin Seidl <mseidl@suse.de>
Date: Mon, 7 Dec 2020 01:10:51 +0100
Subject: [PATCH] add patch support for allow vendor change option with
zypper
---
salt/modules/zypperpkg.py | 46 +++++++++++++++++++++++++++------------
1 file changed, 32 insertions(+), 14 deletions(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 6f22994bf0..4a5cb85e7c 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -35,7 +35,6 @@ import salt.utils.versions
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError
# pylint: disable=import-error,redefined-builtin,no-name-in-module
-from salt.ext import six
from salt.ext.six.moves import configparser
from salt.ext.six.moves.urllib.parse import urlparse as _urlparse
from salt.utils.versions import LooseVersion
@@ -1431,6 +1430,7 @@ def install(
no_recommends=False,
root=None,
inclusion_detection=False,
+ novendorchange=True,
**kwargs
):
"""
@@ -1478,6 +1478,10 @@ def install(
skip_verify
Skip the GPG verification check (e.g., ``--no-gpg-checks``)
+
+ novendorchange
+ Disallow vendor change
+
version
Can be either a version number, or the combination of a comparison
operator (<, >, <=, >=, =) and a version number (ex. '>1.2.3-4').
@@ -1638,6 +1642,22 @@ def install(
cmd_install.append(
kwargs.get("resolve_capabilities") and "--capability" or "--name"
)
+ if novendorchange:
+ if __grains__["osrelease_info"][0] > 11:
+ cmd_install.append("--no-allow-vendor-change")
+ log.info("Disabling vendor changes")
+ else:
+ log.warning(
+ "Enabling/Disabling vendor changes is not supported on this Zypper version"
+ )
+ else:
+ if __grains__["osrelease_info"][0] > 11:
+ cmd_install.append("--allow-vendor-change")
+ log.info("Enabling vendor changes")
+ else:
+ log.warning(
+ "Enabling/Disabling vendor changes is not supported on this Zypper version"
+ )
if not refresh:
cmd_install.insert(0, "--no-refresh")
@@ -1649,7 +1669,6 @@ def install(
cmd_install.extend(fromrepoopt)
if no_recommends:
cmd_install.append("--no-recommends")
-
errors = []
# Split the targets into batches of 500 packages each, so that
@@ -1793,19 +1812,18 @@ def upgrade(
cmd_update.extend(["--from" if dist_upgrade else "--repo", repo])
log.info("Targeting repos: %s", fromrepo)
- if dist_upgrade:
- # TODO: Grains validation should be moved to Zypper class
- if __grains__["osrelease_info"][0] > 11:
- if novendorchange:
- cmd_update.append("--no-allow-vendor-change")
- log.info("Disabling vendor changes")
- else:
- cmd_update.append("--allow-vendor-change")
- log.info("Enabling vendor changes")
+ # TODO: Grains validation should be moved to Zypper class
+ if __grains__["osrelease_info"][0] > 11:
+ if novendorchange:
+ cmd_update.append("--no-allow-vendor-change")
+ log.info("Disabling vendor changes")
else:
- log.warning(
- "Enabling/Disabling vendor changes is not supported on this Zypper version"
- )
+ cmd_update.append("--allow-vendor-change")
+ log.info("Enabling vendor changes")
+ else:
+ log.warning(
+ "Enabling/Disabling vendor changes is not supported on this Zypper version"
+ )
if no_recommends:
cmd_update.append("--no-recommends")
--
2.29.2

View File

@ -0,0 +1,404 @@
From c79f4a8619ff1275b2ec4400c1fb27d24c22a7eb Mon Sep 17 00:00:00 2001
From: Alexander Graul <mail@agraul.de>
Date: Tue, 8 Dec 2020 15:35:49 +0100
Subject: [PATCH] Add pkg.services_need_restart (#302)
* Add utils.systemd.pid_to_service function
This function translates a given PID to the systemd service name in case
the process belongs to a running service. It uses DBUS for the
translation if DBUS is available, falling back to parsing
``systemctl status -o json'' output.
* Add zypperpkg.services_need_restart
pkg.services_need_restart returns a list of system services that were
affected by package manager operations such as updates, downgrades or
reinstallations without having been restarted. This might cause issues,
e.g. in the case a shared object was loaded by a process and then
replaced by the package manager.
(cherry picked from commit b950fcdbd6cc8cb08e1413a0ed05e0ae21717cea)
* Add aptpkg.services_need_restart
pkg.services_need_restart returns a list of system services that were
affected by package manager operations such as updates, downgrades or
reinstallations without having been restarted. This might cause issues,
e.g. in the case a shared object was loaded by a process and then
replaced by the package manager.
Requires checkrestart, which is part of the debian-goodies package and
available from official Ubuntu and Debian repositories.
(cherry picked from commit b981f6ecb1a551b98c5cebab4975fc09c6a55a22)
* Add yumpkg.services_need_restart
pkg.services_need_restart returns a list of system services that were
affected by package manager operations such as updates, downgrades or
reinstallations without having been restarted. This might cause issues,
e.g. in the case a shared object was loaded by a process and then
replaced by the package manager.
Requires dnf with the needs-restarting plugin, which is part of
dnf-plugins-core and installed by default on RHEL/CentOS/Fedora.
Also requires systemd for the mapping between PIDs and systemd services.
(cherry picked from commit 5e2be1095729c9f73394e852b82749950957e6fb)
* Add changelog entry for issue #58261
(cherry picked from commit 148877ed8ff7a47132c1186274739e648f7acf1c)
* Simplify dnf needs-restarting output parsing
Co-authored-by: Wayne Werner <waynejwerner@gmail.com>
(cherry picked from commit beb5d60f3cc64b880ec25ca188f8a73f6ec493dd)
---
changelog/58261.added | 1 +
salt/modules/aptpkg.py | 42 ++++++++++++++++-
salt/modules/yumpkg.py | 36 +++++++++++++++
salt/modules/zypperpkg.py | 25 ++++++++++
salt/utils/systemd.py | 69 ++++++++++++++++++++++++++++
tests/unit/modules/test_aptpkg.py | 22 ++++++++-
tests/unit/modules/test_yumpkg.py | 32 ++++++++++++-
tests/unit/modules/test_zypperpkg.py | 14 ++++++
8 files changed, 238 insertions(+), 3 deletions(-)
create mode 100644 changelog/58261.added
diff --git a/changelog/58261.added b/changelog/58261.added
new file mode 100644
index 0000000000..537a43e80d
--- /dev/null
+++ b/changelog/58261.added
@@ -0,0 +1 @@
+Added ``pkg.services_need_restart`` which lists system services that should be restarted after package management operations.
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 03e99af733..a0e0cc30c1 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -38,7 +38,12 @@ import salt.utils.stringutils
import salt.utils.systemd
import salt.utils.versions
import salt.utils.yaml
-from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError
+from salt.exceptions import (
+ CommandExecutionError,
+ CommandNotFoundError,
+ MinionError,
+ SaltInvocationError,
+)
from salt.modules.cmdmod import _parse_env
log = logging.getLogger(__name__)
@@ -3029,3 +3034,38 @@ def list_downloaded(root=None, **kwargs):
).isoformat(),
}
return ret
+
+
+def services_need_restart(**kwargs):
+ """
+ .. versionadded:: NEXT
+
+ List services that use files which have been changed by the
+ package manager. It might be needed to restart them.
+
+ Requires checkrestart from the debian-goodies package.
+
+ CLI Examples:
+
+ .. code-block:: bash
+
+ salt '*' pkg.services_need_restart
+ """
+ if not salt.utils.path.which_bin(["checkrestart"]):
+ raise CommandNotFoundError(
+ "'checkrestart' is needed. It is part of the 'debian-goodies' "
+ "package which can be installed from official repositories."
+ )
+
+ cmd = ["checkrestart", "--machine"]
+ services = set()
+
+ cr_output = __salt__["cmd.run_stdout"](cmd, python_shell=False)
+ for line in cr_output.split("\n"):
+ if not line.startswith("SERVICE:"):
+ continue
+ end_of_name = line.find(",")
+ service = line[8:end_of_name] # skip "SERVICE:"
+ services.add(service)
+
+ return list(services)
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index dd843f985b..df174e737d 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -3434,3 +3434,39 @@ def del_repo_key(keyid, root=None, **kwargs):
"""
return __salt__["lowpkg.remove_gpg_key"](keyid, root)
+
+
+def services_need_restart(**kwargs):
+ """
+ .. versionadded:: NEXT
+
+ List services that use files which have been changed by the
+ package manager. It might be needed to restart them.
+
+ Requires systemd.
+
+ CLI Examples:
+
+ .. code-block:: bash
+
+ salt '*' pkg.services_need_restart
+ """
+ if _yum() != "dnf":
+ raise CommandExecutionError("dnf is required to list outdated services.")
+ if not salt.utils.systemd.booted(__context__):
+ raise CommandExecutionError("systemd is required to list outdated services.")
+
+ cmd = ["dnf", "--quiet", "needs-restarting"]
+ dnf_output = __salt__["cmd.run_stdout"](cmd, python_shell=False)
+ if not dnf_output:
+ return []
+
+ services = set()
+ for line in dnf_output.split("\n"):
+ pid, has_delim, _ = line.partition(":")
+ if has_delim:
+ service = salt.utils.systemd.pid_to_service(pid.strip())
+ if service:
+ services.add(service)
+
+ return list(services)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 5e13c68708..6f22994bf0 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -3092,3 +3092,28 @@ def del_repo_key(keyid, root=None, **kwargs):
"""
return __salt__["lowpkg.remove_gpg_key"](keyid, root)
+
+
+def services_need_restart(root=None, **kwargs):
+ """
+ .. versionadded:: NEXT
+
+ List services that use files which have been changed by the
+ package manager. It might be needed to restart them.
+
+ root
+ operate on a different root directory.
+
+ CLI Examples:
+
+ .. code-block:: bash
+
+ salt '*' pkg.services_need_restart
+
+ """
+ cmd = ["ps", "-sss"]
+
+ zypper_output = __zypper__(root=root).nolock.call(*cmd)
+ services = zypper_output.split()
+
+ return services
diff --git a/salt/utils/systemd.py b/salt/utils/systemd.py
index 4d902bc920..f42d0421f8 100644
--- a/salt/utils/systemd.py
+++ b/salt/utils/systemd.py
@@ -11,6 +11,12 @@ import salt.utils.path
import salt.utils.stringutils
from salt.exceptions import SaltInvocationError
+try:
+ import dbus
+except ImportError:
+ dbus = None
+
+
log = logging.getLogger(__name__)
@@ -114,3 +120,66 @@ def has_scope(context=None):
if _sd_version is None:
return False
return _sd_version >= 205
+
+
+def pid_to_service(pid):
+ """
+ Check if a PID belongs to a systemd service and return its name.
+ Return None if the PID does not belong to a service.
+
+ Uses DBUS if available.
+ """
+ if dbus:
+ return _pid_to_service_dbus(pid)
+ else:
+ return _pid_to_service_systemctl(pid)
+
+
+def _pid_to_service_systemctl(pid):
+ systemd_cmd = ["systemctl", "--output", "json", "status", str(pid)]
+ try:
+ systemd_output = subprocess.run(
+ systemd_cmd, check=True, text=True, capture_output=True
+ )
+ status_json = salt.utils.json.find_json(systemd_output.stdout)
+ except (ValueError, subprocess.CalledProcessError):
+ return None
+
+ name = status_json.get("_SYSTEMD_UNIT")
+ if name and name.endswith(".service"):
+ return _strip_suffix(name)
+ else:
+ return None
+
+
+def _pid_to_service_dbus(pid):
+ """
+ Use DBUS to check if a PID belongs to a running systemd service and return the service name if it does.
+ """
+ bus = dbus.SystemBus()
+ systemd_object = bus.get_object(
+ "org.freedesktop.systemd1", "/org/freedesktop/systemd1"
+ )
+ systemd = dbus.Interface(systemd_object, "org.freedesktop.systemd1.Manager")
+ try:
+ service_path = systemd.GetUnitByPID(pid)
+ service_object = bus.get_object("org.freedesktop.systemd1", service_path)
+ service_props = dbus.Interface(
+ service_object, "org.freedesktop.DBus.Properties"
+ )
+ service_name = service_props.Get("org.freedesktop.systemd1.Unit", "Id")
+ name = str(service_name)
+
+ if name and name.endswith(".service"):
+ return _strip_suffix(name)
+ else:
+ return None
+ except dbus.DBusException:
+ return None
+
+
+def _strip_suffix(service_name):
+ """
+ Strip ".service" suffix from a given service name.
+ """
+ return service_name[:-8]
diff --git a/tests/unit/modules/test_aptpkg.py b/tests/unit/modules/test_aptpkg.py
index eb3f9e2da7..1d4d2f7fdc 100644
--- a/tests/unit/modules/test_aptpkg.py
+++ b/tests/unit/modules/test_aptpkg.py
@@ -13,7 +13,6 @@ import textwrap
import pytest
import salt.modules.aptpkg as aptpkg
from salt.exceptions import CommandExecutionError, SaltInvocationError
-from salt.ext import six
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, Mock, call, patch
from tests.support.unit import TestCase, skipIf
@@ -1001,3 +1000,24 @@ class AptUtilsTestCase(TestCase, LoaderModuleMockMixin):
# We should attempt to call the cmd 5 times
self.assertEqual(cmd_mock.call_count, 5)
cmd_mock.has_calls(expected_calls)
+
+ @patch("salt.utils.path.which_bin", Mock(return_value="/usr/sbin/checkrestart"))
+ def test_services_need_restart(self):
+ """
+ Test that checkrestart output is parsed correctly
+ """
+ cr_output = """
+PROCESSES: 24
+PROGRAMS: 17
+PACKAGES: 8
+SERVICE:rsyslog,385,/usr/sbin/rsyslogd
+SERVICE:cups-daemon,390,/usr/sbin/cupsd
+ """
+
+ with patch.dict(
+ aptpkg.__salt__, {"cmd.run_stdout": Mock(return_value=cr_output)}
+ ):
+ assert sorted(aptpkg.services_need_restart()) == [
+ "cups-daemon",
+ "rsyslog",
+ ]
diff --git a/tests/unit/modules/test_yumpkg.py b/tests/unit/modules/test_yumpkg.py
index e65a1f8b8b..b97e82d307 100644
--- a/tests/unit/modules/test_yumpkg.py
+++ b/tests/unit/modules/test_yumpkg.py
@@ -7,7 +7,7 @@ import salt.modules.yumpkg as yumpkg
import salt.utils.platform
from salt.exceptions import CommandExecutionError, SaltInvocationError
from tests.support.mixins import LoaderModuleMockMixin
-from tests.support.mock import MagicMock, Mock, mock_open, patch
+from tests.support.mock import MagicMock, Mock, call, mock_open, patch
from tests.support.unit import TestCase, skipIf
try:
@@ -1745,3 +1745,33 @@ class YumUtilsTestCase(TestCase, LoaderModuleMockMixin):
python_shell=True,
username="Darth Vader",
)
+
+ @skipIf(not salt.utils.systemd.booted(), "Requires systemd")
+ @patch("salt.modules.yumpkg._yum", Mock(return_value="dnf"))
+ def test_services_need_restart(self):
+ """
+ Test that dnf needs-restarting output is parsed and
+ salt.utils.systemd.pid_to_service is called as expected.
+ """
+ expected = ["firewalld", "salt-minion"]
+
+ dnf_mock = Mock(
+ return_value="123 : /usr/bin/firewalld\n456 : /usr/bin/salt-minion\n"
+ )
+ systemd_mock = Mock(side_effect=["firewalld", "salt-minion"])
+ with patch.dict(yumpkg.__salt__, {"cmd.run_stdout": dnf_mock}), patch(
+ "salt.utils.systemd.pid_to_service", systemd_mock
+ ):
+ assert sorted(yumpkg.services_need_restart()) == expected
+ systemd_mock.assert_has_calls([call("123"), call("456")])
+
+ @patch("salt.modules.yumpkg._yum", Mock(return_value="dnf"))
+ def test_services_need_restart_requires_systemd(self):
+ """Test that yumpkg.services_need_restart raises an error if systemd is unavailable."""
+ with patch("salt.utils.systemd.booted", Mock(return_value=False)):
+ pytest.raises(CommandExecutionError, yumpkg.services_need_restart)
+
+ @patch("salt.modules.yumpkg._yum", Mock(return_value="yum"))
+ def test_services_need_restart_requires_dnf(self):
+ """Test that yumpkg.services_need_restart raises an error if DNF is unavailable."""
+ pytest.raises(CommandExecutionError, yumpkg.services_need_restart)
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index 018c1ffbca..9c4a224c55 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -2213,3 +2213,17 @@ pattern() = package-c"""
with patch.dict(zypper.__salt__, salt_mock):
self.assertTrue(zypper.del_repo_key(keyid="keyid", root="/mnt"))
salt_mock["lowpkg.remove_gpg_key"].assert_called_once_with("keyid", "/mnt")
+
+ def test_services_need_restart(self):
+ """
+ Test that zypper ps is used correctly to list services that need to
+ be restarted.
+ """
+ expected = ["salt-minion", "firewalld"]
+ zypper_output = "salt-minion\nfirewalld"
+ zypper_mock = Mock()
+ zypper_mock(root=None).nolock.call = Mock(return_value=zypper_output)
+
+ with patch("salt.modules.zypperpkg.__zypper__", zypper_mock):
+ assert zypper.services_need_restart() == expected
+ zypper_mock(root=None).nolock.call.assert_called_with("ps", "-sss")
--
2.29.2

View File

@ -1,27 +1,26 @@
From da936daeebd701e147707ad814c07bfc259d4be4 Mon Sep 17 00:00:00 2001
From 2422d30358bcd0f96e399e623136f7984d136b38 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 28 May 2020 09:37:08 +0100
Subject: [PATCH] Add publish_batch to ClearFuncs exposed methods
---
salt/master.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
salt/master.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/salt/master.py b/salt/master.py
index 485c16029b12fc38fc88b54aba95f03aa95d14ee..7d7a094a1a212180bfb294df3ad8b38477981450 100644
index ab85c7f5c6..59bb19ce75 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -1906,7 +1906,7 @@ class ClearFuncs(TransportMethods):
# These methods will be exposed to the transport layer by
# MWorker._handle_clear
@@ -2042,6 +2042,7 @@ class ClearFuncs(TransportMethods):
expose_methods = (
- 'ping', 'publish', 'get_token', 'mk_token', 'wheel', 'runner',
+ 'ping', 'publish', 'publish_batch', 'get_token', 'mk_token', 'wheel', 'runner',
)
# The ClearFuncs object encapsulates the functions that can be executed in
"ping",
"publish",
+ "publish_batch",
"get_token",
"mk_token",
"wheel",
--
2.23.0
2.29.2

View File

@ -1,4 +1,4 @@
From 369567107fa18187f8cbc5040728037d0774287b Mon Sep 17 00:00:00 2001
From 99aa26e7ab4840cf38f54e7692d7d1eede3adeb4 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Mon, 12 Mar 2018 12:01:39 +0100
Subject: [PATCH] Add SaltSSH multi-version support across Python
@ -254,10 +254,9 @@ Lintfix
Set master_top_first to False by default
---
doc/topics/releases/fluorine.rst | 178 +++++++++++++++++++++++++++++++++++++++
salt/client/ssh/ssh_py_shim.py | 4 +
salt/utils/thin.py | 1 +
3 files changed, 183 insertions(+)
doc/topics/releases/fluorine.rst | 178 +++++++++++++++++++++++++++++++
salt/client/ssh/ssh_py_shim.py | 3 +
2 files changed, 181 insertions(+)
create mode 100644 doc/topics/releases/fluorine.rst
diff --git a/doc/topics/releases/fluorine.rst b/doc/topics/releases/fluorine.rst
@ -445,39 +444,20 @@ index 0000000000..40c69e25cc
+Salt version is also available on the Master machine, although does not need to be directly
+installed together with the older Python interpreter.
diff --git a/salt/client/ssh/ssh_py_shim.py b/salt/client/ssh/ssh_py_shim.py
index cd7549a178..95b3931a32 100644
index c0ce0fd7de..5ddd282ed0 100644
--- a/salt/client/ssh/ssh_py_shim.py
+++ b/salt/client/ssh/ssh_py_shim.py
@@ -165,6 +165,9 @@ def unpack_thin(thin_path):
@@ -171,6 +171,9 @@ def unpack_thin(thin_path):
old_umask = os.umask(0o077) # pylint: disable=blacklisted-function
tfile.extractall(path=OPTIONS.saltdir)
tfile.close()
+ checksum_path = os.path.normpath(os.path.join(OPTIONS.saltdir, "thin_checksum"))
+ with open(checksum_path, 'w') as chk:
+ chk.write(OPTIONS.checksum + '\n')
+ with open(checksum_path, "w") as chk:
+ chk.write(OPTIONS.checksum + "\n")
os.umask(old_umask) # pylint: disable=blacklisted-function
try:
os.unlink(thin_path)
@@ -358,5 +361,6 @@ def main(argv): # pylint: disable=W0613
return retcode
+
if __name__ == '__main__':
sys.exit(main(sys.argv))
diff --git a/salt/utils/thin.py b/salt/utils/thin.py
index 8496db9569..0ff31cef39 100644
--- a/salt/utils/thin.py
+++ b/salt/utils/thin.py
@@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import copy
import logging
import os
+import copy
import shutil
import subprocess
import sys
--
2.16.4
2.29.2

View File

@ -1,9 +1,9 @@
From 717c9bc6cb81994c5f23de87cfa91112fa7bf89c Mon Sep 17 00:00:00 2001
From 8ad65d6fa39edc7fc1967e2df1f3db0aa7df4d11 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 22 May 2019 13:00:46 +0100
Subject: [PATCH] Add standalone configuration file for enabling package
formulas
Subject: [PATCH] Add standalone configuration file for enabling
package formulas
---
conf/suse/standalone-formulas-configuration.conf | 4 ++++
@ -21,6 +21,6 @@ index 0000000000..94d05fb2ee
+ - /usr/share/salt-formulas/states
+ - /srv/salt
--
2.16.4
2.29.2

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
From 82ddc9d93f6c0d6bc1e8dc6ebd30d6809d9f4d8f Mon Sep 17 00:00:00 2001
From ca2ad86438293af6715a9890b168f159ff4d9b9b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Bosdonnat?= <cbosdonnat@suse.com>
Date: Thu, 18 Oct 2018 13:32:59 +0200
Subject: [PATCH] Add virt.all_capabilities
@ -10,100 +10,37 @@ before calling virt.domain_capabilities for each of them.
This commit embeds all this logic to get them all in a single
virt.all_capabilities call.
---
salt/modules/virt.py | 107 +++++++++++++++++++++++++++++-----------
tests/unit/modules/test_virt.py | 56 +++++++++++++++++++++
2 files changed, 134 insertions(+), 29 deletions(-)
salt/modules/virt.py | 73 +++++++++++++++++++++++++++++++--
tests/unit/modules/test_virt.py | 2 +-
2 files changed, 71 insertions(+), 4 deletions(-)
diff --git a/salt/modules/virt.py b/salt/modules/virt.py
index a2412bb745..3889238ecd 100644
index 313181c49e..362c2a68b5 100644
--- a/salt/modules/virt.py
+++ b/salt/modules/virt.py
@@ -4254,37 +4254,10 @@ def _parse_caps_loader(node):
@@ -5568,11 +5568,76 @@ def _parse_domain_caps(caps):
return result
-def domain_capabilities(emulator=None, arch=None, machine=None, domain=None, **kwargs):
+def _parse_domain_caps(caps):
'''
- Return the domain capabilities given an emulator, architecture, machine or virtualization type.
-
- .. versionadded:: 2019.2.0
-
- :param emulator: return the capabilities for the given emulator binary
- :param arch: return the capabilities for the given CPU architecture
- :param machine: return the capabilities for the given emulated machine type
- :param domain: return the capabilities for the given virtualization type.
- :param connection: libvirt connection URI, overriding defaults
- :param username: username to connect with, overriding defaults
- :param password: password to connect with, overriding defaults
-
- The list of the possible emulator, arch, machine and domain can be found in
- the host capabilities output.
-
- If none of the parameters is provided the libvirt default domain capabilities
- will be returned.
-
- CLI Example:
-
- .. code-block:: bash
-
- salt '*' virt.domain_capabilities arch='x86_64' domain='kvm'
-
+ """
+ Parse the XML document of domain capabilities into a structure.
'''
- conn = __get_conn(**kwargs)
- caps = ElementTree.fromstring(conn.getDomainCapabilities(emulator, arch, machine, domain, 0))
- conn.close()
-
result = {
'emulator': caps.find('path').text if caps.find('path') is not None else None,
'domain': caps.find('domain').text if caps.find('domain') is not None else None,
@@ -4324,6 +4297,82 @@ def domain_capabilities(emulator=None, arch=None, machine=None, domain=None, **k
return result
+def domain_capabilities(emulator=None, arch=None, machine=None, domain=None, **kwargs):
+ '''
+ Return the domain capabilities given an emulator, architecture, machine or virtualization type.
+
+ .. versionadded:: Fluorine
+
+ :param emulator: return the capabilities for the given emulator binary
+ :param arch: return the capabilities for the given CPU architecture
+ :param machine: return the capabilities for the given emulated machine type
+ :param domain: return the capabilities for the given virtualization type.
+ :param connection: libvirt connection URI, overriding defaults
+ :param username: username to connect with, overriding defaults
+ :param password: password to connect with, overriding defaults
+
+ The list of the possible emulator, arch, machine and domain can be found in
+ the host capabilities output.
+
+ If none of the parameters is provided, the libvirt default one is returned.
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' virt.domain_capabilities arch='x86_64' domain='kvm'
+
+ '''
+ conn = __get_conn(**kwargs)
+ result = []
+ try:
+ caps = ElementTree.fromstring(conn.getDomainCapabilities(emulator, arch, machine, domain, 0))
+ result = _parse_domain_caps(caps)
+ finally:
+ conn.close()
+
+ return result
+ """
+ result = {
+ "emulator": caps.find("path").text if caps.find("path") is not None else None,
+ "domain": caps.find("domain").text if caps.find("domain") is not None else None,
+ "machine": caps.find("machine").text
+ if caps.find("machine") is not None
+ else None,
+ "arch": caps.find("arch").text if caps.find("arch") is not None else None,
+ }
+
+
+def all_capabilities(**kwargs):
+ '''
+ """
+ Return the host and domain capabilities in a single call.
+
+ .. versionadded:: Neon
+ .. versionadded:: 3001
+
+ :param connection: libvirt connection URI, overriding defaults
+ :param username: username to connect with, overriding defaults
@ -115,100 +52,94 @@ index a2412bb745..3889238ecd 100644
+
+ salt '*' virt.all_capabilities
+
+ '''
+ """
+ conn = __get_conn(**kwargs)
+ result = {}
+ try:
+ host_caps = ElementTree.fromstring(conn.getCapabilities())
+ domains = [[(guest.get('arch', {}).get('name', None), key)
+ for key in guest.get('arch', {}).get('domains', {}).keys()]
+ for guest in [_parse_caps_guest(guest) for guest in host_caps.findall('guest')]]
+ domains = [
+ [
+ (guest.get("arch", {}).get("name", None), key)
+ for key in guest.get("arch", {}).get("domains", {}).keys()
+ ]
+ for guest in [
+ _parse_caps_guest(guest) for guest in host_caps.findall("guest")
+ ]
+ ]
+ flattened = [pair for item in (x for x in domains) for pair in item]
+ result = {
+ 'host': {
+ 'host': _parse_caps_host(host_caps.find('host')),
+ 'guests': [_parse_caps_guest(guest) for guest in host_caps.findall('guest')]
+ },
+ 'domains': [_parse_domain_caps(ElementTree.fromstring(
+ conn.getDomainCapabilities(None, arch, None, domain)))
+ for (arch, domain) in flattened]}
+ "host": {
+ "host": _parse_caps_host(host_caps.find("host")),
+ "guests": [
+ _parse_caps_guest(guest) for guest in host_caps.findall("guest")
+ ],
+ },
+ "domains": [
+ _parse_domain_caps(
+ ElementTree.fromstring(
+ conn.getDomainCapabilities(None, arch, None, domain)
+ )
+ )
+ for (arch, domain) in flattened
+ ],
+ }
+ return result
+ finally:
+ conn.close()
+
+
def domain_capabilities(emulator=None, arch=None, machine=None, domain=None, **kwargs):
"""
Return the domain capabilities given an emulator, architecture, machine or virtualization type.
- .. versionadded:: 2019.2.0
+ .. versionadded:: Fluorine
:param emulator: return the capabilities for the given emulator binary
:param arch: return the capabilities for the given CPU architecture
@@ -5611,7 +5676,7 @@ def all_capabilities(**kwargs):
"""
Return the host and domain capabilities in a single call.
- .. versionadded:: 3001
+ .. versionadded:: Neon
:param connection: libvirt connection URI, overriding defaults
:param username: username to connect with, overriding defaults
@@ -5625,6 +5690,7 @@ def all_capabilities(**kwargs):
"""
conn = __get_conn(**kwargs)
+ result = {}
try:
host_caps = ElementTree.fromstring(conn.getCapabilities())
domains = [
@@ -5653,10 +5719,11 @@ def all_capabilities(**kwargs):
for (arch, domain) in flattened
],
}
- return result
finally:
conn.close()
+ return result
+
+
def cpu_baseline(full=False, migratable=False, out='libvirt', **kwargs):
'''
Return the optimal 'custom' CPU baseline config for VM's on this minion
def cpu_baseline(full=False, migratable=False, out="libvirt", **kwargs):
"""
diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py
index 32f4302e5f..94372c6d72 100644
index cce107c9e4..e9e73d7b5d 100644
--- a/tests/unit/modules/test_virt.py
+++ b/tests/unit/modules/test_virt.py
@@ -2216,6 +2216,62 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -4063,7 +4063,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
"44454c4c-3400-105a-8033-b3c04f4b344a", caps["host"]["host"]["uuid"]
)
self.assertEqual(
- {"qemu", "kvm"}, {domainCaps["domain"] for domainCaps in caps["domains"]},
+ {"qemu", "kvm"}, {domainCaps["domain"] for domainCaps in caps["domains"]}
)
self.assertEqual(expected, caps)
+ def test_all_capabilities(self):
+ '''
+ Test the virt.domain_capabilities default output
+ '''
+ domainXml = '''
+<domainCapabilities>
+ <path>/usr/bin/qemu-system-x86_64</path>
+ <domain>kvm</domain>
+ <machine>virt-2.12</machine>
+ <arch>x86_64</arch>
+ <vcpu max='255'/>
+ <iothreads supported='yes'/>
+</domainCapabilities>
+ '''
+ hostXml = '''
+<capabilities>
+ <host>
+ <uuid>44454c4c-3400-105a-8033-b3c04f4b344a</uuid>
+ <cpu>
+ <arch>x86_64</arch>
+ <model>Nehalem</model>
+ <vendor>Intel</vendor>
+ <microcode version='25'/>
+ <topology sockets='1' cores='4' threads='2'/>
+ </cpu>
+ </host>
+ <guest>
+ <os_type>hvm</os_type>
+ <arch name='x86_64'>
+ <wordsize>64</wordsize>
+ <emulator>/usr/bin/qemu-system-x86_64</emulator>
+ <machine maxCpus='255'>pc-i440fx-2.6</machine>
+ <machine canonical='pc-i440fx-2.6' maxCpus='255'>pc</machine>
+ <machine maxCpus='255'>pc-0.12</machine>
+ <domain type='qemu'/>
+ <domain type='kvm'>
+ <emulator>/usr/bin/qemu-kvm</emulator>
+ <machine maxCpus='255'>pc-i440fx-2.6</machine>
+ <machine canonical='pc-i440fx-2.6' maxCpus='255'>pc</machine>
+ <machine maxCpus='255'>pc-0.12</machine>
+ </domain>
+ </arch>
+ </guest>
+</capabilities>
+ '''
+
+ # pylint: disable=no-member
+ self.mock_conn.getCapabilities.return_value = hostXml
+ self.mock_conn.getDomainCapabilities.side_effect = [
+ domainXml, domainXml.replace('<domain>kvm', '<domain>qemu')]
+ # pylint: enable=no-member
+
+ caps = virt.all_capabilities()
+ self.assertEqual('44454c4c-3400-105a-8033-b3c04f4b344a', caps['host']['host']['uuid'])
+ self.assertEqual(set(['qemu', 'kvm']), set([domainCaps['domain'] for domainCaps in caps['domains']]))
+
def test_network_tag(self):
'''
Test virt._get_net_xml() with VLAN tag
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 2182f2cbc835fee8a95101ce0c722d582b7456aa Mon Sep 17 00:00:00 2001
From 12d67e0cfa54399f3a0b6ae0d4faa09793fa2b0f Mon Sep 17 00:00:00 2001
From: Jochen Breuer <jbreuer@suse.de>
Date: Wed, 1 Apr 2020 16:13:23 +0200
Subject: [PATCH] Adds explicit type cast for port
@ -12,22 +12,22 @@ The type casting to int solves this issue.
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/salt/utils/network.py b/salt/utils/network.py
index d6543ff160..def997f3dc 100644
index 25b2d06758..1705a5809d 100644
--- a/salt/utils/network.py
+++ b/salt/utils/network.py
@@ -1457,9 +1457,9 @@ def _netlink_tool_remote_on(port, which_end):
local_host, local_port = chunks[3].rsplit(':', 1)
remote_host, remote_port = chunks[4].rsplit(':', 1)
@@ -1626,9 +1626,9 @@ def _netlink_tool_remote_on(port, which_end):
local_host, local_port = chunks[3].rsplit(":", 1)
remote_host, remote_port = chunks[4].rsplit(":", 1)
- if which_end == 'remote_port' and int(remote_port) != port:
+ if which_end == 'remote_port' and int(remote_port) != int(port):
- if which_end == "remote_port" and int(remote_port) != port:
+ if which_end == "remote_port" and int(remote_port) != int(port):
continue
- if which_end == 'local_port' and int(local_port) != port:
+ if which_end == 'local_port' and int(local_port) != int(port):
- if which_end == "local_port" and int(local_port) != port:
+ if which_end == "local_port" and int(local_port) != int(port):
continue
remotes.add(remote_host.strip("[]"))
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 206a2f7c4c1104f2f35dfa2c0b775bef4adc5b91 Mon Sep 17 00:00:00 2001
From 125f973014b8d5ffa13ae7dd231043e39af75ea0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 3 Jul 2019 09:34:50 +0100
@ -7,62 +7,23 @@ Subject: [PATCH] Allow passing kwargs to pkg.list_downloaded
Add unit test for pkg.list_downloaded with kwargs
---
salt/modules/zypperpkg.py | 2 +-
tests/unit/modules/test_zypperpkg.py | 27 +++++++++++++++++++++++++++
2 files changed, 28 insertions(+), 1 deletion(-)
salt/modules/zypperpkg.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 582caffb59..3760b525e7 100644
index 75cb5ce4a8..c996935bff 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -2557,7 +2557,7 @@ def download(*packages, **kwargs):
@@ -2754,7 +2754,7 @@ def download(*packages, **kwargs):
)
-def list_downloaded(root=None):
+def list_downloaded(root=None, **kwargs):
'''
"""
.. versionadded:: 2017.7.0
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index 3a6466f061..12c22bfcb2 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -767,6 +767,33 @@ Repository 'DUMMY' not found by its alias, number, or URI.
self.assertEqual(len(list_patches), 3)
self.assertDictEqual(list_patches, PATCHES_RET)
+ @patch('salt.utils.path.os_walk', MagicMock(return_value=[('test', 'test', 'test')]))
+ @patch('os.path.getsize', MagicMock(return_value=123456))
+ @patch('os.path.getctime', MagicMock(return_value=1234567890.123456))
+ @patch('fnmatch.filter', MagicMock(return_value=['/var/cache/zypper/packages/foo/bar/test_package.rpm']))
+ def test_list_downloaded_with_kwargs(self):
+ '''
+ Test downloaded packages listing.
+
+ :return:
+ '''
+ DOWNLOADED_RET = {
+ 'test-package': {
+ '1.0': {
+ 'path': '/var/cache/zypper/packages/foo/bar/test_package.rpm',
+ 'size': 123456,
+ 'creation_date_time_t': 1234567890,
+ 'creation_date_time': '2009-02-13T23:31:30',
+ }
+ }
+ }
+
+ with patch.dict(zypper.__salt__, {'lowpkg.bin_pkg_info': MagicMock(return_value={'name': 'test-package',
+ 'version': '1.0'})}):
+ list_downloaded = zypper.list_downloaded(kw1=True, kw2=False)
+ self.assertEqual(len(list_downloaded), 1)
+ self.assertDictEqual(list_downloaded, DOWNLOADED_RET)
+
@patch('salt.utils.path.os_walk', MagicMock(return_value=[('test', 'test', 'test')]))
@patch('os.path.getsize', MagicMock(return_value=123456))
@patch('os.path.getctime', MagicMock(return_value=1234567890.123456))
--
2.16.4
2.29.2

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
From b8226467e665650a0587b8fd64242faefb805e13 Mon Sep 17 00:00:00 2001
From 85464ec6c34fceee3379d268745c3294d27e7fb4 Mon Sep 17 00:00:00 2001
From: Steve Kowalik <steven@wedontsleep.org>
Date: Mon, 17 Feb 2020 15:34:00 +1100
Subject: [PATCH] Apply patch from upstream to support Python 3.8
@ -7,15 +7,12 @@ Apply saltstack/salt#56031 to support Python 3.8, which removed a
deprecated module and changed some behaviour. Add a {Build,}Requires on
python-distro, since it is now required.
---
pkg/suse/salt.spec | 2 ++
salt/config/__init__.py | 4 +++-
salt/grains/core.py | 16 ++++++++--------
salt/renderers/stateconf.py | 8 ++++----
tests/unit/modules/test_virt.py | 2 +-
5 files changed, 18 insertions(+), 14 deletions(-)
pkg/suse/salt.spec | 2 ++
salt/renderers/stateconf.py | 49 ++++++++++++++++---------------------
2 files changed, 23 insertions(+), 28 deletions(-)
diff --git a/pkg/suse/salt.spec b/pkg/suse/salt.spec
index e3e678af3b..0f6a9bc012 100644
index a17d2381ce..0df9d6c283 100644
--- a/pkg/suse/salt.spec
+++ b/pkg/suse/salt.spec
@@ -62,6 +62,7 @@ BuildRequires: python-psutil
@ -34,95 +31,205 @@ index e3e678af3b..0f6a9bc012 100644
%if 0%{?suse_version}
# requirements/opt.txt (not all)
Recommends: python-MySQL-python
diff --git a/salt/config/__init__.py b/salt/config/__init__.py
index 0ebe1181dd..f484d94e7e 100644
--- a/salt/config/__init__.py
+++ b/salt/config/__init__.py
@@ -3196,7 +3196,9 @@ def apply_cloud_providers_config(overrides, defaults=None):
# Merge provided extends
keep_looping = False
for alias, entries in six.iteritems(providers.copy()):
- for driver, details in six.iteritems(entries):
+ for driver in list(six.iterkeys(entries)):
+ # Don't use iteritems, because the values of the dictionary will be changed
+ details = entries[driver]
if 'extends' not in details:
# Extends resolved or non existing, continue!
diff --git a/salt/grains/core.py b/salt/grains/core.py
index f410985198..358b66fdb0 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -40,20 +40,20 @@ except ImportError:
__proxyenabled__ = ['*']
__FQDN__ = None
-# Extend the default list of supported distros. This will be used for the
-# /etc/DISTRO-release checking that is part of linux_distribution()
-from platform import _supported_dists
-_supported_dists += ('arch', 'mageia', 'meego', 'vmware', 'bluewhite64',
- 'slamd64', 'ovs', 'system', 'mint', 'oracle', 'void')
-
# linux_distribution deprecated in py3.7
try:
from platform import linux_distribution as _deprecated_linux_distribution
+ # Extend the default list of supported distros. This will be used for the
+ # /etc/DISTRO-release checking that is part of linux_distribution()
+ from platform import _supported_dists
+ _supported_dists += ('arch', 'mageia', 'meego', 'vmware', 'bluewhite64',
+ 'slamd64', 'ovs', 'system', 'mint', 'oracle', 'void')
+
def linux_distribution(**kwargs):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
- return _deprecated_linux_distribution(**kwargs)
+ return _deprecated_linux_distribution(supported_dists=_supported_dists, **kwargs)
except ImportError:
from distro import linux_distribution
@@ -1976,7 +1976,7 @@ def os_data():
)
(osname, osrelease, oscodename) = \
[x.strip('"').strip("'") for x in
- linux_distribution(supported_dists=_supported_dists)]
+ linux_distribution()]
# Try to assign these three names based on the lsb info, they tend to
# be more accurate than what python gets from /etc/DISTRO-release.
# It's worth noting that Ubuntu has patched their Python distribution
diff --git a/salt/renderers/stateconf.py b/salt/renderers/stateconf.py
index cfce9e6926..5c8a8322ed 100644
index 298ae28338..f0527d51d7 100644
--- a/salt/renderers/stateconf.py
+++ b/salt/renderers/stateconf.py
@@ -224,10 +224,10 @@ def render(input, saltenv='base', sls='', argline='', **kws):
tmplctx = STATE_CONF.copy()
if tmplctx:
prefix = sls + '::'
- for k in six.iterkeys(tmplctx): # iterate over a copy of keys
- if k.startswith(prefix):
- tmplctx[k[len(prefix):]] = tmplctx[k]
- del tmplctx[k]
+ tmplctx = {
+ k[len(prefix):] if k.startswith(prefix) else k: v
+ for k, v in six.iteritems(tmplctx)
+ }
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
A flexible renderer that takes a templating engine and a data format
@@ -26,8 +25,6 @@ A flexible renderer that takes a templating engine and a data format
# - apache: >= 0.1.0
#
-# Import python libs
-from __future__ import absolute_import, print_function, unicode_literals
import copy
import getopt
@@ -36,12 +33,9 @@ import os
import re
from itertools import chain
-# Import salt libs
import salt.utils.files
import salt.utils.stringutils
from salt.exceptions import SaltRenderError
-
-# Import 3rd-party libs
from salt.ext import six
from salt.ext.six.moves import StringIO # pylint: disable=import-error
@@ -135,7 +129,7 @@ def render(input, saltenv="base", sls="", argline="", **kws):
sid = has_names_decls(data)
if sid:
raise SaltRenderError(
- "'names' declaration(found in state id: {0}) is "
+ "'names' declaration(found in state id: {}) is "
"not supported with implicitly ordered states! You "
"should generate the states in a template for-loop "
"instead.".format(sid)
@@ -203,11 +197,11 @@ def render(input, saltenv="base", sls="", argline="", **kws):
name, rt_argline = (args[1] + " ").split(" ", 1)
render_template = renderers[name] # e.g., the mako renderer
except KeyError as err:
- raise SaltRenderError("Renderer: {0} is not available!".format(err))
+ raise SaltRenderError("Renderer: {} is not available!".format(err))
except IndexError:
raise INVALID_USAGE_ERROR
- if isinstance(input, six.string_types):
+ if isinstance(input, str):
with salt.utils.files.fopen(input, "r") as ifile:
sls_templ = salt.utils.stringutils.to_unicode(ifile.read())
else: # assume file-like
@@ -227,7 +221,7 @@ def render(input, saltenv="base", sls="", argline="", **kws):
prefix = sls + "::"
tmplctx = {
k[len(prefix) :] if k.startswith(prefix) else k: v
- for k, v in six.iteritems(tmplctx)
+ for k, v in tmplctx.items()
}
else:
tmplctx = {}
@@ -262,8 +256,8 @@ def rewrite_single_shorthand_state_decl(data): # pylint: disable=C0103
state_id_decl:
state.func: []
"""
- for sid, states in six.iteritems(data):
- if isinstance(states, six.string_types):
+ for sid, states in data.items():
+ if isinstance(states, str):
data[sid] = {states: []}
diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py
index 94372c6d72..d762dcc479 100644
--- a/tests/unit/modules/test_virt.py
+++ b/tests/unit/modules/test_virt.py
@@ -1256,7 +1256,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
<alias name='net1'/>
<address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x1'/>
</interface>
- <graphics type='spice' port='5900' autoport='yes' listen='127.0.0.1'>
+ <graphics type='spice' listen='127.0.0.1' autoport='yes'>
<listen type='address' address='127.0.0.1'/>
</graphics>
<video>
@@ -328,7 +322,7 @@ def nvlist(thelist, names=None):
for nvitem in thelist:
if isinstance(nvitem, dict):
# then nvitem is a name-value item(a dict) of the list.
- name, value = next(six.iteritems(nvitem))
+ name, value = next(iter(nvitem.items()))
if names is None or name in names:
yield nvitem, name, value
@@ -349,17 +343,16 @@ def nvlist2(thelist, names=None):
"""
for _, _, value in nvlist(thelist, names):
- for each in nvlist(value):
- yield each
+ yield from nvlist(value)
def statelist(states_dict, sid_excludes=frozenset(["include", "exclude"])):
- for sid, states in six.iteritems(states_dict):
+ for sid, states in states_dict.items():
if sid.startswith("__"):
continue
if sid in sid_excludes:
continue
- for sname, args in six.iteritems(states):
+ for sname, args in states.items():
if sname.startswith("__"):
continue
yield sid, states, sname, args
@@ -401,11 +394,11 @@ def rename_state_ids(data, sls, is_extend=False):
newsid = _local_to_abs_sid(sid, sls)
if newsid in data:
raise SaltRenderError(
- "Can't rename state id({0}) into {1} because the later "
+ "Can't rename state id({}) into {} because the later "
"already exists!".format(sid, newsid)
)
# add a '- name: sid' to those states without '- name'.
- for sname, args in six.iteritems(data[sid]):
+ for sname, args in data[sid].items():
if state_name(sname) == STATE_NAME:
continue
for arg in args:
@@ -430,7 +423,7 @@ EXTENDED_REQUIRE_IN = {}
# explicit require_in/watch_in/listen_in/onchanges_in/onfail_in can only contain states after it
def add_implicit_requires(data):
def T(sid, state): # pylint: disable=C0103
- return "{0}:{1}".format(sid, state_name(state))
+ return "{}:{}".format(sid, state_name(state))
states_before = set()
states_after = set()
@@ -462,7 +455,7 @@ def add_implicit_requires(data):
for _, rstate, rsid in reqs:
if T(rsid, rstate) in states_after:
raise SaltRenderError(
- "State({0}) can't require/watch/listen/onchanges/onfail a state({1}) defined "
+ "State({}) can't require/watch/listen/onchanges/onfail a state({}) defined "
"after it!".format(tag, T(rsid, rstate))
)
@@ -472,7 +465,7 @@ def add_implicit_requires(data):
for _, rstate, rsid in reqs:
if T(rsid, rstate) in states_before:
raise SaltRenderError(
- "State({0}) can't require_in/watch_in/listen_in/onchanges_in/onfail_in a state({1}) "
+ "State({}) can't require_in/watch_in/listen_in/onchanges_in/onfail_in a state({}) "
"defined before it!".format(tag, T(rsid, rstate))
)
@@ -492,7 +485,7 @@ def add_start_state(data, sls):
start_sid = __opts__["stateconf_start_state"]
if start_sid in data:
raise SaltRenderError(
- "Can't generate start state({0})! The same state id already "
+ "Can't generate start state({})! The same state id already "
"exists!".format(start_sid)
)
if not data:
@@ -502,14 +495,14 @@ def add_start_state(data, sls):
# no __sls__, or it's the first state whose id declaration has a
# __sls__ == sls.
non_sids = ("include", "exclude", "extend")
- for sid, states in six.iteritems(data):
+ for sid, states in data.items():
if sid in non_sids or sid.startswith("__"):
continue
if "__sls__" not in states or states["__sls__"] == sls:
break
else:
raise SaltRenderError("Can't determine the first state in the sls file!")
- reqin = {state_name(next(six.iterkeys(data[sid]))): sid}
+ reqin = {state_name(next(iter(data[sid].keys()))): sid}
data[start_sid] = {STATE_FUNC: [{"require_in": [reqin]}]}
@@ -517,7 +510,7 @@ def add_goal_state(data):
goal_sid = __opts__["stateconf_goal_state"]
if goal_sid in data:
raise SaltRenderError(
- "Can't generate goal state({0})! The same state id already "
+ "Can't generate goal state({})! The same state id already "
"exists!".format(goal_sid)
)
else:
@@ -561,7 +554,7 @@ STATE_CONF_EXT = {} # stateconf.set under extend: ...
def extract_state_confs(data, is_extend=False):
- for state_id, state_dict in six.iteritems(data):
+ for state_id, state_dict in data.items():
if state_id == "extend" and not is_extend:
extract_state_confs(state_dict, True)
continue
@@ -578,7 +571,7 @@ def extract_state_confs(data, is_extend=False):
for sdk in state_dict[key]:
if not isinstance(sdk, dict):
continue
- key, val = next(six.iteritems(sdk))
+ key, val = next(iter(sdk.items()))
conf[key] = val
if not is_extend and state_id in STATE_CONF_EXT:
--
2.16.4
2.29.2

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
From 638ad2baa04e96f744f97c97f3840151937e8aac Mon Sep 17 00:00:00 2001
From 3c83bab3da101223c99af1f9ee2f3bf5e97be3f8 Mon Sep 17 00:00:00 2001
From: Hubert Mantel <mantel@suse.de>
Date: Mon, 27 Nov 2017 13:55:13 +0100
Subject: [PATCH] avoid excessive syslogging by watchdog cronjob (#58)
@ -21,6 +21,6 @@ index 2e418094ed..73a91ebd62 100755
/usr/bin/salt-daemon-watcher --with-init & disown
fi
--
2.16.4
2.29.2

View File

@ -1,34 +0,0 @@
From f942aeb3eb64b99cd9432bebf021835ade46df74 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 28 May 2020 16:38:04 +0100
Subject: [PATCH] Avoid HAS_DOCKER true if import messes with
salt.utils.docker (bsc#1172075)
---
salt/modules/swarm.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/salt/modules/swarm.py b/salt/modules/swarm.py
index ea327ce640040bdbd7e7077bc6bbb59a9f0ade4a..6f16f41ece01738f3a04d11211fa5e96cd8155b4 100644
--- a/salt/modules/swarm.py
+++ b/salt/modules/swarm.py
@@ -30,9 +30,13 @@ from __future__ import absolute_import, unicode_literals, print_function
# Import Salt libs
import salt.utils.json
+HAS_DOCKER = False
+
try:
import docker
- HAS_DOCKER = True
+
+ if hasattr(docker, "from_env"):
+ HAS_DOCKER = True
except ImportError:
HAS_DOCKER = False
--
2.23.0

View File

@ -1,4 +1,4 @@
From e45658e074fbf8c038816dc56b86c3daf33d6ebc Mon Sep 17 00:00:00 2001
From 2fecfe18cf17389714ab5bed0ff59bec2d1e1c36 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 29 Jul 2019 11:17:53 +0100
@ -7,29 +7,33 @@ Subject: [PATCH] Avoid traceback when http.query request cannot be
Improve error logging when http.query cannot be performed
---
salt/utils/http.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
salt/utils/http.py | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/salt/utils/http.py b/salt/utils/http.py
index dee0563679..c2fdffb266 100644
index 5ab4503f61..9522bd6ee4 100644
--- a/salt/utils/http.py
+++ b/salt/utils/http.py
@@ -580,11 +580,13 @@ def query(url,
@@ -628,12 +628,17 @@ def query(
except salt.ext.tornado.httpclient.HTTPError as exc:
ret['status'] = exc.code
ret['error'] = six.text_type(exc)
+ log.error("Cannot perform 'http.query': {0} - {1}".format(url_full, ret['error']))
ret["status"] = exc.code
ret["error"] = str(exc)
+ log.error(
+ "Cannot perform 'http.query': {} - {}".format(url_full, ret["error"])
+ )
return ret
- except socket.gaierror as exc:
+ except (socket.herror, socket.error, socket.timeout, socket.gaierror) as exc:
except (socket.herror, OSError, socket.timeout, socket.gaierror) as exc:
if status is True:
ret['status'] = 0
ret['error'] = six.text_type(exc)
+ log.error("Cannot perform 'http.query': {0} - {1}".format(url_full, ret['error']))
ret["status"] = 0
ret["error"] = str(exc)
- log.debug("Cannot perform 'http.query': %s - %s", url_full, ret["error"])
+ log.error(
+ "Cannot perform 'http.query': {} - {}".format(url_full, ret["error"])
+ )
return ret
if stream is True or handle is True:
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From f45df684fe68a93a7003aca2189479b0d0240305 Mon Sep 17 00:00:00 2001
From acee2074e9fe4da2731e61a554639e773c04e43a Mon Sep 17 00:00:00 2001
From: Cedric Bosdonnat <cbosdonnat@suse.com>
Date: Mon, 5 Oct 2020 16:49:59 +0200
Subject: [PATCH] Backport a few virt PRs (#272)
@ -39,13 +39,13 @@ Co-authored-by: gqlo <escita@pm.me>
---
changelog/57639.added | 1 +
changelog/58589.added | 1 +
salt/modules/virt.py | 303 +++++++++++++++++++--
salt/states/virt.py | 73 +++++-
salt/modules/virt.py | 284 ++++++++++++++++++--
salt/states/virt.py | 71 ++++-
salt/templates/virt/libvirt_domain.jinja | 30 ++-
salt/utils/xmlutil.py | 4 +-
tests/unit/modules/test_virt.py | 320 ++++++++++++++++++++++-
tests/unit/states/test_virt.py | 19 +-
8 files changed, 704 insertions(+), 47 deletions(-)
salt/utils/xmlutil.py | 2 +-
tests/unit/modules/test_virt.py | 318 ++++++++++++++++++++++-
tests/unit/states/test_virt.py | 14 +-
8 files changed, 687 insertions(+), 34 deletions(-)
create mode 100644 changelog/57639.added
create mode 100644 changelog/58589.added
@ -64,10 +64,10 @@ index 0000000000..5960555ec6
@@ -0,0 +1 @@
+Allow handling special first boot definition on virtual machine
diff --git a/salt/modules/virt.py b/salt/modules/virt.py
index 34643787f9..87ab7ca12d 100644
index e306bc0679..8e2180608a 100644
--- a/salt/modules/virt.py
+++ b/salt/modules/virt.py
@@ -71,13 +71,57 @@ The calls not using the libvirt connection setup are:
@@ -71,6 +71,50 @@ The calls not using the libvirt connection setup are:
- `libvirt URI format <http://libvirt.org/uri.html#URI_config>`_
- `libvirt authentication configuration <http://libvirt.org/auth.html#Auth_client_config>`_
@ -118,26 +118,7 @@ index 34643787f9..87ab7ca12d 100644
"""
# Special Thanks to Michael Dehann, many of the concepts, and a few structures
# of his in the virt func module have been used
-# Import python libs
import base64
+import collections
import copy
import datetime
import logging
@@ -89,10 +133,8 @@ import subprocess
import sys
import time
-# Import third party libs
import jinja2.exceptions
-# Import salt libs
import salt.utils.data
import salt.utils.files
import salt.utils.json
@@ -725,6 +767,39 @@ def _disk_from_pool(conn, pool, pool_xml, volume_name):
@@ -719,6 +763,39 @@ def _disk_from_pool(conn, pool, pool_xml, volume_name):
return disk_context
@ -145,7 +126,7 @@ index 34643787f9..87ab7ca12d 100644
+ """
+ Handle the unit conversion, return the value in bytes
+ """
+ m = re.match(r"(?P<value>[0-9.]*)\s*(?P<unit>.*)$", six.text_type(s).strip())
+ m = re.match(r"(?P<value>[0-9.]*)\s*(?P<unit>.*)$", str(s).strip())
+ value = m.group("value")
+ # default unit
+ unit = m.group("unit").lower() or def_unit
@ -177,7 +158,7 @@ index 34643787f9..87ab7ca12d 100644
def _gen_xml(
conn,
name,
@@ -738,18 +813,32 @@ def _gen_xml(
@@ -732,18 +809,32 @@ def _gen_xml(
graphics=None,
boot=None,
boot_dev=None,
@ -191,28 +172,28 @@ index 34643787f9..87ab7ca12d 100644
context = {
"hypervisor": hypervisor,
"name": name,
"cpu": six.text_type(cpu),
- "mem": six.text_type(mem),
"cpu": str(cpu),
- "mem": str(mem),
+ "on_reboot": "destroy" if stop_on_reboot else "restart",
}
+
+ context["mem"] = nesthash()
+ if isinstance(mem, int):
+ mem = int(mem) * 1024 # MB
+ context["mem"]["boot"] = six.text_type(mem)
+ context["mem"]["current"] = six.text_type(mem)
+ context["mem"]["boot"] = str(mem)
+ context["mem"]["current"] = str(mem)
+ elif isinstance(mem, dict):
+ for tag, val in six.iteritems(mem):
+ for tag, val in mem.items():
+ if val:
+ if tag == "slots":
+ context["mem"]["slots"] = "{}='{}'".format(tag, val)
+ else:
+ context["mem"][tag] = six.text_type(int(_handle_unit(val) / 1024))
+ context["mem"][tag] = str(int(_handle_unit(val) / 1024))
+
if hypervisor in ["qemu", "kvm"]:
context["controller_model"] = False
elif hypervisor == "vmware":
@@ -869,7 +958,6 @@ def _gen_xml(
@@ -863,7 +954,6 @@ def _gen_xml(
except jinja2.exceptions.TemplateNotFound:
log.error("Could not load template %s", fn_)
return ""
@ -220,7 +201,7 @@ index 34643787f9..87ab7ca12d 100644
return template.render(**context)
@@ -1668,6 +1756,7 @@ def init(
@@ -1662,6 +1752,7 @@ def init(
arch=None,
boot=None,
boot_dev=None,
@ -228,7 +209,7 @@ index 34643787f9..87ab7ca12d 100644
**kwargs
):
"""
@@ -1675,7 +1764,28 @@ def init(
@@ -1669,7 +1760,28 @@ def init(
:param name: name of the virtual machine to create
:param cpu: Number of virtual CPUs to assign to the virtual machine
@ -258,7 +239,7 @@ index 34643787f9..87ab7ca12d 100644
:param nic: NIC profile to use (Default: ``'default'``).
The profile interfaces can be customized / extended with the interfaces parameter.
If set to ``None``, no profile will be used.
@@ -1732,6 +1842,15 @@ def init(
@@ -1726,6 +1838,15 @@ def init(
:param password: password to connect with, overriding defaults
.. versionadded:: 2019.2.0
@ -274,7 +255,7 @@ index 34643787f9..87ab7ca12d 100644
:param boot:
Specifies kernel, initial ramdisk and kernel command line parameters for the virtual machine.
This is an optional parameter, all of the keys are optional within the dictionary. The structure of
@@ -1788,6 +1907,36 @@ def init(
@@ -1782,6 +1903,36 @@ def init(
.. versionadded:: sodium
@ -311,7 +292,7 @@ index 34643787f9..87ab7ca12d 100644
.. _init-nic-def:
.. rubric:: Network Interfaces Definitions
@@ -2082,6 +2231,7 @@ def init(
@@ -2076,6 +2227,7 @@ def init(
graphics,
boot,
boot_dev,
@ -319,7 +300,7 @@ index 34643787f9..87ab7ca12d 100644
**kwargs
)
log.debug("New virtual machine definition: %s", vm_xml)
@@ -2311,6 +2461,7 @@ def update(
@@ -2305,6 +2457,7 @@ def update(
boot=None,
test=False,
boot_dev=None,
@ -327,33 +308,25 @@ index 34643787f9..87ab7ca12d 100644
**kwargs
):
"""
@@ -2318,7 +2469,24 @@ def update(
@@ -2312,7 +2465,7 @@ def update(
:param name: Name of the domain to update
:param cpu: Number of virtual CPUs to assign to the virtual machine
- :param mem: Amount of memory to allocate to the virtual machine in MiB.
- :param mem: Amount of memory to allocate to the virtual machine in MiB. Since 3002, a dictionary can be used to
+ :param mem: Amount of memory to allocate to the virtual machine in MiB. Since Magnesium, a dictionary can be used to
+ contain detailed configuration which support memory allocation or tuning. Supported parameters are ``boot``,
+ ``current``, ``max``, ``slots``, ``hard_limit``, ``soft_limit``, ``swap_hard_limit`` and ``min_guarantee``. The
+ structure of the dictionary is documented in :ref:`init-mem-def`. Both decimal and binary base are supported.
+ Detail unit specification is documented in :ref:`virt-units`. Please note that the value for ``slots`` must be
+ an integer.
+
+ To remove any parameters, pass a None object, for instance: 'soft_limit': ``None``. Please note that ``None``
+ is mapped to ``null`` in sls file, pass ``null`` in sls file instead.
+
+ .. code-block:: yaml
+
+ - mem:
+ hard_limit: null
+ soft_limit: null
+
contain detailed configuration which support memory allocation or tuning. Supported parameters are ``boot``,
``current``, ``max``, ``slots``, ``hard_limit``, ``soft_limit``, ``swap_hard_limit`` and ``min_guarantee``. The
structure of the dictionary is documented in :ref:`init-mem-def`. Both decimal and binary base are supported.
@@ -2328,7 +2481,7 @@ def update(
hard_limit: null
soft_limit: null
- .. versionchanged:: 3002
+ .. versionchanged:: Magnesium
+
:param disk_profile: disk profile to use
:param disks:
Disk definitions as documented in the :func:`init` function.
@@ -2375,6 +2543,14 @@ def update(
@@ -2386,6 +2539,14 @@ def update(
.. versionadded:: Magnesium
@ -368,7 +341,7 @@ index 34643787f9..87ab7ca12d 100644
:param test: run in dry-run mode if set to True
.. versionadded:: sodium
@@ -2438,6 +2614,8 @@ def update(
@@ -2449,6 +2610,8 @@ def update(
desc.find(".//os/type").get("arch"),
graphics,
boot,
@ -377,13 +350,13 @@ index 34643787f9..87ab7ca12d 100644
**kwargs
)
)
@@ -2458,12 +2636,26 @@ def update(
@@ -2469,12 +2632,26 @@ def update(
def _set_nvram(node, value):
node.set("template", value)
- def _set_with_mib_unit(node, value):
+ def _set_with_byte_unit(node, value):
node.text = six.text_type(value)
node.text = str(value)
- node.set("unit", "MiB")
+ node.set("unit", "bytes")
+
@ -406,7 +379,7 @@ index 34643787f9..87ab7ca12d 100644
{"path": "boot:kernel", "xpath": "os/kernel"},
{"path": "boot:initrd", "xpath": "os/initrd"},
{"path": "boot:cmdline", "xpath": "os/cmdline"},
@@ -2473,14 +2665,72 @@ def update(
@@ -2484,14 +2661,72 @@ def update(
{
"path": "mem",
"xpath": "memory",
@ -483,7 +456,7 @@ index 34643787f9..87ab7ca12d 100644
},
{
"path": "boot_dev:{dev}",
@@ -2566,13 +2816,24 @@ def update(
@@ -2577,13 +2812,24 @@ def update(
}
)
if mem:
@ -516,24 +489,10 @@ index 34643787f9..87ab7ca12d 100644
# Look for removable device source changes
new_disks = []
diff --git a/salt/states/virt.py b/salt/states/virt.py
index 1a0c889d58..740f6c5746 100644
index df7ebb63e6..20ea1c25f1 100644
--- a/salt/states/virt.py
+++ b/salt/states/virt.py
@@ -11,13 +11,11 @@ for the generation and signing of certificates for systems running libvirt:
virt.keys
"""
-# Import Python libs
import fnmatch
import logging
import os
-# Import Salt libs
import salt.utils.args
import salt.utils.files
import salt.utils.stringutils
@@ -290,6 +288,8 @@ def defined(
@@ -289,6 +289,8 @@ def defined(
boot=None,
update=True,
boot_dev=None,
@ -542,7 +501,7 @@ index 1a0c889d58..740f6c5746 100644
):
"""
Starts an existing guest, or defines and starts a new VM with specified arguments.
@@ -298,7 +298,28 @@ def defined(
@@ -297,7 +299,28 @@ def defined(
:param name: name of the virtual machine to run
:param cpu: number of CPUs for the virtual machine to create
@ -572,7 +531,7 @@ index 1a0c889d58..740f6c5746 100644
:param vm_type: force virtual machine type for the new VM. The default value is taken from
the host capabilities. This could be useful for example to use ``'qemu'`` type instead
of the ``'kvm'`` one.
@@ -358,6 +379,20 @@ def defined(
@@ -357,6 +380,20 @@ def defined(
.. versionadded:: Magnesium
@ -593,7 +552,7 @@ index 1a0c889d58..740f6c5746 100644
.. rubric:: Example States
Make sure a virtual machine called ``domain_name`` is defined:
@@ -415,13 +450,14 @@ def defined(
@@ -414,13 +451,14 @@ def defined(
nic_profile=nic_profile,
interfaces=interfaces,
graphics=graphics,
@ -609,7 +568,7 @@ index 1a0c889d58..740f6c5746 100644
)
ret["changes"][name] = status
if not status.get("definition"):
@@ -457,6 +493,7 @@ def defined(
@@ -456,6 +494,7 @@ def defined(
boot=boot,
start=False,
boot_dev=boot_dev,
@ -617,7 +576,7 @@ index 1a0c889d58..740f6c5746 100644
)
ret["changes"][name] = {"definition": True}
ret["comment"] = "Domain {} defined".format(name)
@@ -490,6 +527,7 @@ def running(
@@ -489,6 +528,7 @@ def running(
arch=None,
boot=None,
boot_dev=None,
@ -625,7 +584,7 @@ index 1a0c889d58..740f6c5746 100644
):
"""
Starts an existing guest, or defines and starts a new VM with specified arguments.
@@ -498,7 +536,23 @@ def running(
@@ -497,7 +537,23 @@ def running(
:param name: name of the virtual machine to run
:param cpu: number of CPUs for the virtual machine to create
@ -650,7 +609,7 @@ index 1a0c889d58..740f6c5746 100644
:param vm_type: force virtual machine type for the new VM. The default value is taken from
the host capabilities. This could be useful for example to use ``'qemu'`` type instead
of the ``'kvm'`` one.
@@ -609,6 +663,14 @@ def running(
@@ -608,6 +664,14 @@ def running(
.. versionadded:: Magnesium
@ -665,7 +624,7 @@ index 1a0c889d58..740f6c5746 100644
.. rubric:: Example States
Make sure an already-defined virtual machine called ``domain_name`` is running:
@@ -677,6 +739,7 @@ def running(
@@ -676,6 +740,7 @@ def running(
boot=boot,
update=update,
boot_dev=boot_dev,
@ -722,60 +681,31 @@ index 18728a75b5..fb4c9f40d0 100644
{% for disk in disks %}
<disk type='{{ disk.type }}' device='{{ disk.device }}'>
diff --git a/salt/utils/xmlutil.py b/salt/utils/xmlutil.py
index 68191bc528..6c32f22ad4 100644
index 111ca155d4..d25f5c8da5 100644
--- a/salt/utils/xmlutil.py
+++ b/salt/utils/xmlutil.py
@@ -2,12 +2,10 @@
Various XML utilities
"""
-# Import Python libs
import re
import string # pylint: disable=deprecated-module
from xml.etree import ElementTree
-# Import salt libs
import salt.utils.data
from salt.ext import six
@@ -301,7 +299,7 @@ def change_xml(doc, data, mapping):
@@ -299,7 +299,7 @@ def change_xml(doc, data, mapping):
if convert_fn:
new_value = convert_fn(new_value)
- if current_value != new_value:
+ if six.text_type(current_value) != six.text_type(new_value):
+ if str(current_value) != str(new_value):
set_fn(node, new_value)
need_update = True
else:
diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py
index 6e61544a1f..ca5e80d2d2 100644
index e214e406e2..fba821ea53 100644
--- a/tests/unit/modules/test_virt.py
+++ b/tests/unit/modules/test_virt.py
@@ -4,7 +4,6 @@ virt execution module unit tests
@@ -21,7 +21,6 @@ from salt.ext import six
# pylint: disable=3rd-party-module-not-gated
-# Import python libs
import datetime
import os
@@ -16,7 +15,6 @@ import salt.modules.config as config
import salt.modules.virt as virt
import salt.syspaths
-# Import salt libs
import salt.utils.yaml
from salt._compat import ElementTree as ET
from salt.exceptions import CommandExecutionError, SaltInvocationError
@@ -24,7 +22,6 @@ from salt.exceptions import CommandExecutionError, SaltInvocationError
# pylint: disable=import-error
from salt.ext.six.moves import range # pylint: disable=redefined-builtin
-# Import Salt Testing libs
-from tests.support.helpers import dedent
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase
@@ -1859,6 +1856,25 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -1856,6 +1855,25 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
virt.update("my_vm"),
)
@ -801,7 +731,7 @@ index 6e61544a1f..ca5e80d2d2 100644
# Same parameters passed than in default virt.defined state case
self.assertEqual(
{
@@ -2004,6 +2020,50 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -2001,6 +2019,50 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
with self.assertRaises(SaltInvocationError):
virt.update("my_vm", boot={"efi": "Not a boolean value"})
@ -852,7 +782,7 @@ index 6e61544a1f..ca5e80d2d2 100644
# Update memory case
setmem_mock = MagicMock(return_value=0)
domain_mock.setMemoryFlags = setmem_mock
@@ -2018,10 +2078,43 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -2015,10 +2077,43 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
virt.update("my_vm", mem=2048),
)
setxml = ET.fromstring(define_mock.call_args[0][0])
@ -898,7 +828,7 @@ index 6e61544a1f..ca5e80d2d2 100644
# Update disks case
devattach_mock = MagicMock(return_value=0)
devdetach_mock = MagicMock(return_value=0)
@@ -2536,7 +2629,6 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -2533,7 +2628,6 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
"""
Test virt.update() with existing boot parameters.
"""
@ -906,7 +836,7 @@ index 6e61544a1f..ca5e80d2d2 100644
xml_boot = """
<domain type='kvm' id='8'>
<name>vm_with_boot_param</name>
@@ -2594,9 +2686,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -2591,9 +2685,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
</video>
</devices>
</domain>
@ -917,7 +847,7 @@ index 6e61544a1f..ca5e80d2d2 100644
domain_mock_boot = self.set_mock_vm("vm_with_boot_param", xml_boot)
domain_mock_boot.OSType = MagicMock(return_value="hvm")
define_mock_boot = MagicMock(return_value=True)
@@ -2697,6 +2787,218 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -2694,6 +2786,218 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(setxml.find("os").find("loader"), None)
self.assertEqual(setxml.find("os").find("nvram"), None)
@ -1137,32 +1067,18 @@ index 6e61544a1f..ca5e80d2d2 100644
"""
Test virt._nic_profile with mixed dictionaries and lists as input.
diff --git a/tests/unit/states/test_virt.py b/tests/unit/states/test_virt.py
index f03159334b..1923ae5c0f 100644
index 8fe892f607..1923ae5c0f 100644
--- a/tests/unit/states/test_virt.py
+++ b/tests/unit/states/test_virt.py
@@ -1,21 +1,15 @@
"""
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
"""
-# Import Python libs
import shutil
import tempfile
-# Import Salt Libs
@@ -8,7 +8,6 @@ import tempfile
import salt.states.virt as virt
import salt.utils.files
from salt.exceptions import CommandExecutionError, SaltInvocationError
-
-# Import 3rd-party libs
-from salt.ext import six
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, mock_open, patch
-
-# Import Salt Testing Libs
from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import TestCase
@@ -351,6 +345,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -346,6 +345,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
install=False,
pub_key="/path/to/key.pub",
priv_key="/path/to/key",
@ -1170,7 +1086,7 @@ index f03159334b..1923ae5c0f 100644
connection="someconnection",
username="libvirtuser",
password="supersecret",
@@ -376,6 +371,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -371,6 +371,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
start=False,
pub_key="/path/to/key.pub",
priv_key="/path/to/key",
@ -1178,7 +1094,7 @@ index f03159334b..1923ae5c0f 100644
connection="someconnection",
username="libvirtuser",
password="supersecret",
@@ -489,6 +485,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -484,6 +485,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
password=None,
boot=None,
test=False,
@ -1186,7 +1102,7 @@ index f03159334b..1923ae5c0f 100644
)
# Failed definition update case
@@ -559,6 +556,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -554,6 +556,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
install=False,
pub_key="/path/to/key.pub",
priv_key="/path/to/key",
@ -1194,7 +1110,7 @@ index f03159334b..1923ae5c0f 100644
connection="someconnection",
username="libvirtuser",
password="supersecret",
@@ -601,6 +599,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -596,6 +599,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
boot=None,
test=True,
boot_dev=None,
@ -1202,7 +1118,7 @@ index f03159334b..1923ae5c0f 100644
)
# No changes case
@@ -636,6 +635,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -631,6 +635,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
boot=None,
test=True,
boot_dev=None,
@ -1210,7 +1126,7 @@ index f03159334b..1923ae5c0f 100644
)
def test_running(self):
@@ -713,6 +713,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -708,6 +713,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
pub_key=None,
priv_key=None,
boot_dev=None,
@ -1218,7 +1134,7 @@ index f03159334b..1923ae5c0f 100644
connection=None,
username=None,
password=None,
@@ -775,6 +776,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -770,6 +776,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
pub_key="/path/to/key.pub",
priv_key="/path/to/key",
boot_dev="network hd",
@ -1226,7 +1142,7 @@ index f03159334b..1923ae5c0f 100644
connection="someconnection",
username="libvirtuser",
password="supersecret",
@@ -800,6 +802,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -795,6 +802,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
pub_key="/path/to/key.pub",
priv_key="/path/to/key",
boot_dev="network hd",
@ -1234,7 +1150,7 @@ index f03159334b..1923ae5c0f 100644
connection="someconnection",
username="libvirtuser",
password="supersecret",
@@ -945,6 +948,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -940,6 +948,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
boot=None,
test=False,
boot_dev=None,
@ -1242,7 +1158,7 @@ index f03159334b..1923ae5c0f 100644
)
# Failed definition update case
@@ -1018,6 +1022,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -1013,6 +1022,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
install=False,
pub_key="/path/to/key.pub",
priv_key="/path/to/key",
@ -1250,7 +1166,7 @@ index f03159334b..1923ae5c0f 100644
connection="someconnection",
username="libvirtuser",
password="supersecret",
@@ -1064,6 +1069,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -1059,6 +1069,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
boot=None,
test=True,
boot_dev=None,
@ -1258,7 +1174,7 @@ index f03159334b..1923ae5c0f 100644
)
start_mock.assert_not_called()
@@ -1101,6 +1107,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -1096,6 +1107,7 @@ class LibvirtTestCase(TestCase, LoaderModuleMockMixin):
boot=None,
test=True,
boot_dev=None,
@ -1267,6 +1183,6 @@ index f03159334b..1923ae5c0f 100644
def test_stopped(self):
--
2.28.0
2.29.2

File diff suppressed because it is too large Load Diff

View File

@ -1,42 +1,34 @@
From c5edf396ffd66b6ac1479aa01367aae3eff7683d Mon Sep 17 00:00:00 2001
From 1606379714f4776e2b529fb1d45891266985c896 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 28 Feb 2020 15:11:53 +0000
Subject: [PATCH] Batch Async: Catch exceptions and safety unregister and
close instances
Subject: [PATCH] Batch Async: Catch exceptions and safety unregister
and close instances
---
salt/cli/batch_async.py | 156 +++++++++++++++++++++++-----------------
1 file changed, 89 insertions(+), 67 deletions(-)
salt/cli/batch_async.py | 160 ++++++++++++++++++++++++----------------
1 file changed, 96 insertions(+), 64 deletions(-)
diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py
index da069b64bd..b8f272ed67 100644
index 1e2ac5b0d3..3dc04826d1 100644
--- a/salt/cli/batch_async.py
+++ b/salt/cli/batch_async.py
@@ -13,7 +13,6 @@ import salt.client
# pylint: enable=import-error,no-name-in-module,redefined-builtin
import logging
-import fnmatch
log = logging.getLogger(__name__)
@@ -104,22 +103,25 @@ class BatchAsync(object):
@@ -107,22 +107,25 @@ class BatchAsync:
def __event_handler(self, raw):
if not self.event:
return
- mtag, data = self.event.unpack(raw, self.event.serial)
- for (pattern, op) in self.patterns:
- if mtag.startswith(pattern[:-1]):
- minion = data['id']
- if op == 'ping_return':
- minion = data["id"]
- if op == "ping_return":
- self.minions.add(minion)
- if self.targeted_minions == self.minions:
- self.event.io_loop.spawn_callback(self.start_batch)
- elif op == 'find_job_return':
- elif op == "find_job_return":
- if data.get("return", None):
- self.find_job_returned.add(minion)
- elif op == 'batch_run':
- elif op == "batch_run":
- if minion in self.active:
- self.active.remove(minion)
- self.done_minions.add(minion)
@ -45,25 +37,25 @@ index da069b64bd..b8f272ed67 100644
+ mtag, data = self.event.unpack(raw, self.event.serial)
+ for (pattern, op) in self.patterns:
+ if mtag.startswith(pattern[:-1]):
+ minion = data['id']
+ if op == 'ping_return':
+ minion = data["id"]
+ if op == "ping_return":
+ self.minions.add(minion)
+ if self.targeted_minions == self.minions:
+ self.event.io_loop.spawn_callback(self.start_batch)
+ elif op == 'find_job_return':
+ elif op == "find_job_return":
+ if data.get("return", None):
+ self.find_job_returned.add(minion)
+ elif op == 'batch_run':
+ elif op == "batch_run":
+ if minion in self.active:
+ self.active.remove(minion)
+ self.done_minions.add(minion)
+ self.event.io_loop.spawn_callback(self.schedule_next)
+ except Exception as ex:
+ log.error("Exception occured while processing event: {}".format(ex))
+ log.error("Exception occured while processing event: {}".format(ex))
def _get_next(self):
to_run = self.minions.difference(
@@ -146,54 +148,59 @@ class BatchAsync(object):
to_run = (
@@ -154,53 +157,67 @@ class BatchAsync:
if timedout_minions:
self.schedule_next()
@ -74,112 +66,118 @@ index da069b64bd..b8f272ed67 100644
@tornado.gen.coroutine
def find_job(self, minions):
- not_done = minions.difference(self.done_minions).difference(self.timedout_minions)
- not_done = minions.difference(self.done_minions).difference(
- self.timedout_minions
- )
-
- if not_done:
- jid = self.jid_gen()
- find_job_return_pattern = 'salt/job/{0}/ret/*'.format(jid)
- find_job_return_pattern = "salt/job/{}/ret/*".format(jid)
- self.patterns.add((find_job_return_pattern, "find_job_return"))
- self.event.subscribe(find_job_return_pattern, match_type='glob')
- self.event.subscribe(find_job_return_pattern, match_type="glob")
-
- ret = yield self.local.run_job_async(
- not_done,
- 'saltutil.find_job',
- "saltutil.find_job",
- [self.batch_jid],
- 'list',
- gather_job_timeout=self.opts['gather_job_timeout'],
- "list",
- gather_job_timeout=self.opts["gather_job_timeout"],
- jid=jid,
- **self.eauth)
- yield tornado.gen.sleep(self.opts['gather_job_timeout'])
- self.event.io_loop.spawn_callback(
- self.check_find_job,
- not_done,
- jid)
- **self.eauth
+ if self.event:
+ not_done = minions.difference(self.done_minions).difference(self.timedout_minions)
+ not_done = minions.difference(self.done_minions).difference(
+ self.timedout_minions
)
- yield tornado.gen.sleep(self.opts["gather_job_timeout"])
- self.event.io_loop.spawn_callback(self.check_find_job, not_done, jid)
+ try:
+ if not_done:
+ jid = self.jid_gen()
+ find_job_return_pattern = 'salt/job/{0}/ret/*'.format(jid)
+ find_job_return_pattern = "salt/job/{}/ret/*".format(jid)
+ self.patterns.add((find_job_return_pattern, "find_job_return"))
+ self.event.subscribe(find_job_return_pattern, match_type='glob')
+ self.event.subscribe(find_job_return_pattern, match_type="glob")
+ ret = yield self.local.run_job_async(
+ not_done,
+ 'saltutil.find_job',
+ "saltutil.find_job",
+ [self.batch_jid],
+ 'list',
+ gather_job_timeout=self.opts['gather_job_timeout'],
+ "list",
+ gather_job_timeout=self.opts["gather_job_timeout"],
+ jid=jid,
+ **self.eauth)
+ yield tornado.gen.sleep(self.opts['gather_job_timeout'])
+ **self.eauth
+ )
+ yield tornado.gen.sleep(self.opts["gather_job_timeout"])
+ if self.event:
+ self.event.io_loop.spawn_callback(
+ self.check_find_job,
+ not_done,
+ jid)
+ self.check_find_job, not_done, jid
+ )
+ except Exception as ex:
+ log.error("Exception occured handling batch async: {}. Aborting execution.".format(ex))
+ log.error(
+ "Exception occured handling batch async: {}. Aborting execution.".format(
+ ex
+ )
+ )
+ self.close_safe()
@tornado.gen.coroutine
def start(self):
- self.__set_event_handler()
- ping_return = yield self.local.run_job_async(
- self.opts['tgt'],
- 'test.ping',
- self.opts["tgt"],
- "test.ping",
- [],
- self.opts.get(
- 'selected_target_option',
- self.opts.get('tgt_type', 'glob')
- ),
- gather_job_timeout=self.opts['gather_job_timeout'],
- self.opts.get("selected_target_option", self.opts.get("tgt_type", "glob")),
- gather_job_timeout=self.opts["gather_job_timeout"],
- jid=self.ping_jid,
- metadata=self.metadata,
- **self.eauth)
- self.targeted_minions = set(ping_return['minions'])
- #start batching even if not all minions respond to ping
- yield tornado.gen.sleep(self.batch_presence_ping_timeout or self.opts['gather_job_timeout'])
- **self.eauth
- )
- self.targeted_minions = set(ping_return["minions"])
- # start batching even if not all minions respond to ping
- yield tornado.gen.sleep(
- self.batch_presence_ping_timeout or self.opts["gather_job_timeout"]
- )
- self.event.io_loop.spawn_callback(self.start_batch)
-
+ if self.event:
+ self.__set_event_handler()
+ ping_return = yield self.local.run_job_async(
+ self.opts['tgt'],
+ 'test.ping',
+ self.opts["tgt"],
+ "test.ping",
+ [],
+ self.opts.get(
+ 'selected_target_option',
+ self.opts.get('tgt_type', 'glob')
+ "selected_target_option", self.opts.get("tgt_type", "glob")
+ ),
+ gather_job_timeout=self.opts['gather_job_timeout'],
+ gather_job_timeout=self.opts["gather_job_timeout"],
+ jid=self.ping_jid,
+ metadata=self.metadata,
+ **self.eauth)
+ self.targeted_minions = set(ping_return['minions'])
+ #start batching even if not all minions respond to ping
+ yield tornado.gen.sleep(self.batch_presence_ping_timeout or self.opts['gather_job_timeout'])
+ **self.eauth
+ )
+ self.targeted_minions = set(ping_return["minions"])
+ # start batching even if not all minions respond to ping
+ yield tornado.gen.sleep(
+ self.batch_presence_ping_timeout or self.opts["gather_job_timeout"]
+ )
+ if self.event:
+ self.event.io_loop.spawn_callback(self.start_batch)
@tornado.gen.coroutine
def start_batch(self):
@@ -206,7 +213,8 @@ class BatchAsync(object):
"metadata": self.metadata
}
ret = self.event.fire_event(data, "salt/batch/{0}/start".format(self.batch_jid))
@@ -215,7 +232,8 @@ class BatchAsync:
ret = self.event.fire_event(
data, "salt/batch/{}/start".format(self.batch_jid)
)
- self.event.io_loop.spawn_callback(self.run_next)
+ if self.event:
+ self.event.io_loop.spawn_callback(self.run_next)
@tornado.gen.coroutine
def end_batch(self):
@@ -221,11 +229,21 @@ class BatchAsync(object):
"metadata": self.metadata
@@ -232,11 +250,21 @@ class BatchAsync:
"metadata": self.metadata,
}
self.event.fire_event(data, "salt/batch/{0}/done".format(self.batch_jid))
self.event.fire_event(data, "salt/batch/{}/done".format(self.batch_jid))
- for (pattern, label) in self.patterns:
- if label in ["ping_return", "batch_run"]:
- self.event.unsubscribe(pattern, match_type='glob')
- self.event.unsubscribe(pattern, match_type="glob")
- del self
- gc.collect()
+
@ -189,18 +187,18 @@ index da069b64bd..b8f272ed67 100644
+ self.close_safe()
+
+ def close_safe(self):
+ for (pattern, label) in self.patterns:
+ self.event.unsubscribe(pattern, match_type='glob')
+ self.event.remove_event_handler(self.__event_handler)
+ self.event = None
+ self.local = None
+ self.ioloop = None
+ del self
+ gc.collect()
+ for (pattern, label) in self.patterns:
+ self.event.unsubscribe(pattern, match_type="glob")
+ self.event.remove_event_handler(self.__event_handler)
+ self.event = None
+ self.local = None
+ self.ioloop = None
+ del self
+ gc.collect()
@tornado.gen.coroutine
def schedule_next(self):
@@ -233,7 +251,8 @@ class BatchAsync(object):
@@ -244,7 +272,8 @@ class BatchAsync:
self.scheduled = True
# call later so that we maybe gather more returns
yield tornado.gen.sleep(self.batch_delay)
@ -210,10 +208,10 @@ index da069b64bd..b8f272ed67 100644
@tornado.gen.coroutine
def run_next(self):
@@ -254,17 +273,20 @@ class BatchAsync(object):
metadata=self.metadata)
@@ -266,17 +295,20 @@ class BatchAsync:
)
yield tornado.gen.sleep(self.opts['timeout'])
yield tornado.gen.sleep(self.opts["timeout"])
- self.event.io_loop.spawn_callback(self.find_job, set(next_batch))
+
+ # The batch can be done already at this point, which means no self.event
@ -235,6 +233,6 @@ index da069b64bd..b8f272ed67 100644
self.ioloop = None
gc.collect()
--
2.23.0
2.29.2

View File

@ -1,9 +1,9 @@
From bbd2e622f7e165a6e16fd5edf5f4596764748208 Mon Sep 17 00:00:00 2001
From 03f0aa44f6963e09a92dd3ea2090ef9ee463cb94 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 5 Jun 2019 15:15:04 +0100
Subject: [PATCH] batch.py: avoid exception when minion does not respond
(bsc#1135507)
Subject: [PATCH] batch.py: avoid exception when minion does not
respond (bsc#1135507)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
@ -18,26 +18,29 @@ bsc#1135507
Signed-off-by: José Guilherme Vanz <jguilhermevanz@suse.com>
---
salt/cli/batch.py | 5 +++++
1 file changed, 5 insertions(+)
salt/cli/batch.py | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/salt/cli/batch.py b/salt/cli/batch.py
index 67f03c8a45..10fc81a5f4 100644
index 2bc5444aef..6285a45434 100644
--- a/salt/cli/batch.py
+++ b/salt/cli/batch.py
@@ -318,6 +318,11 @@ class Batch(object):
if self.opts.get('failhard') and data['retcode'] > 0:
@@ -348,6 +348,14 @@ class Batch:
if self.opts.get("failhard") and data["retcode"] > 0:
failhard = True
+ # avoid an exception if the minion does not respond.
+ if data.get("failed") is True:
+ log.debug('Minion %s failed to respond: data=%s', minion, data)
+ data = {'ret': 'Minion did not return. [Failed]', 'retcode': salt.defaults.exitcodes.EX_GENERIC}
+ log.debug("Minion %s failed to respond: data=%s", minion, data)
+ data = {
+ "ret": "Minion did not return. [Failed]",
+ "retcode": salt.defaults.exitcodes.EX_GENERIC,
+ }
+
if self.opts.get('raw'):
if self.opts.get("raw"):
ret[minion] = data
yield data
--
2.16.4
2.29.2

View File

@ -1,26 +1,27 @@
From bd20cd2655a1141fe9ea892e974e40988c3fb83c Mon Sep 17 00:00:00 2001
From 31fedcb3173f73fbffc3b053bc64c94a7b608118 Mon Sep 17 00:00:00 2001
From: Silvio Moioli <smoioli@suse.de>
Date: Mon, 2 Mar 2020 11:23:59 +0100
Subject: [PATCH] batch_async: avoid using fnmatch to match event (#217)
Subject: [PATCH] batch_async: avoid using fnmatch to match event
(#217)
---
salt/cli/batch_async.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py
index c4545e3ebc..da069b64bd 100644
index 8d2601e636..1e2ac5b0d3 100644
--- a/salt/cli/batch_async.py
+++ b/salt/cli/batch_async.py
@@ -106,7 +106,7 @@ class BatchAsync(object):
@@ -109,7 +109,7 @@ class BatchAsync:
return
mtag, data = self.event.unpack(raw, self.event.serial)
for (pattern, op) in self.patterns:
- if fnmatch.fnmatch(mtag, pattern):
+ if mtag.startswith(pattern[:-1]):
minion = data['id']
if op == 'ping_return':
minion = data["id"]
if op == "ping_return":
self.minions.add(minion)
--
2.23.0
2.29.2

View File

@ -1,4 +1,4 @@
From 8973063f6ad24fd5b3788292aa8cc341221d7fb5 Mon Sep 17 00:00:00 2001
From 60b8f6cdaab10a12973a074678608b86a34e23b7 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com>
Date: Tue, 6 Oct 2020 12:36:41 +0300
Subject: [PATCH] bsc#1176024: Fix file/directory user and group
@ -12,22 +12,22 @@ Subject: [PATCH] bsc#1176024: Fix file/directory user and group
Co-authored-by: Victor Zhestkov <vzhestkov@vz-thinkpad.vzhestkov.net>
---
salt/modules/file.py | 18 +++++++++---------
salt/states/file.py | 4 ++--
2 files changed, 11 insertions(+), 11 deletions(-)
salt/modules/file.py | 26 +++++++++++++++++---------
salt/states/file.py | 12 ++++++++++--
2 files changed, 27 insertions(+), 11 deletions(-)
diff --git a/salt/modules/file.py b/salt/modules/file.py
index b5b70e2d4c..0b516aff05 100644
index 989a7ad92d..b830b390d3 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -256,7 +256,7 @@ def group_to_gid(group):
@@ -252,7 +252,7 @@ def group_to_gid(group):
try:
if isinstance(group, int):
return group
- return grp.getgrnam(group).gr_gid
+ return grp.getgrnam(salt.utils.stringutils.to_str(group)).gr_gid
except KeyError:
return ''
return ""
@@ -344,7 +344,7 @@ def user_to_uid(user):
try:
@ -36,77 +36,91 @@ index b5b70e2d4c..0b516aff05 100644
- return pwd.getpwnam(user).pw_uid
+ return pwd.getpwnam(salt.utils.stringutils.to_str(user)).pw_uid
except KeyError:
return ''
return ""
@@ -4574,7 +4574,7 @@ def check_perms(name, ret, user, group, mode, attrs=None, follow_symlinks=False)
if (salt.utils.platform.is_windows() and
user_to_uid(user) != user_to_uid(perms['luser'])
) or (
- not salt.utils.platform.is_windows() and user != perms['luser']
+ not salt.utils.platform.is_windows() and salt.utils.stringutils.to_str(user) != perms['luser']
):
perms['cuser'] = user
@@ -4977,7 +4977,10 @@ def check_perms(
if (
salt.utils.platform.is_windows()
and user_to_uid(user) != user_to_uid(perms["luser"])
- ) or (not salt.utils.platform.is_windows() and user != perms["luser"]):
+ ) or (
+ not salt.utils.platform.is_windows()
+ and salt.utils.stringutils.to_str(user) != perms["luser"]
+ ):
perms["cuser"] = user
@@ -4584,7 +4584,7 @@ def check_perms(name, ret, user, group, mode, attrs=None, follow_symlinks=False)
if (salt.utils.platform.is_windows() and
group_to_gid(group) != group_to_gid(perms['lgroup'])
) or (
- not salt.utils.platform.is_windows() and group != perms['lgroup']
+ not salt.utils.platform.is_windows() and salt.utils.stringutils.to_str(group) != perms['lgroup']
):
perms['cgroup'] = group
if group:
@@ -4986,7 +4989,10 @@ def check_perms(
if (
salt.utils.platform.is_windows()
and group_to_gid(group) != group_to_gid(perms["lgroup"])
- ) or (not salt.utils.platform.is_windows() and group != perms["lgroup"]):
+ ) or (
+ not salt.utils.platform.is_windows()
+ and salt.utils.stringutils.to_str(group) != perms["lgroup"]
+ ):
perms["cgroup"] = group
@@ -4615,7 +4615,7 @@ def check_perms(name, ret, user, group, mode, attrs=None, follow_symlinks=False)
user != ''
) or (
not salt.utils.platform.is_windows() and
- user != get_user(name, follow_symlinks=follow_symlinks) and
+ salt.utils.stringutils.to_str(user) != get_user(name, follow_symlinks=follow_symlinks) and
user != ''
if "cuser" in perms or "cgroup" in perms:
@@ -5017,7 +5023,8 @@ def check_perms(
and user != ""
) or (
not salt.utils.platform.is_windows()
- and user != get_user(name, follow_symlinks=follow_symlinks)
+ and salt.utils.stringutils.to_str(user)
+ != get_user(name, follow_symlinks=follow_symlinks)
and user != ""
):
if __opts__['test'] is True:
@@ -4633,10 +4633,10 @@ def check_perms(name, ret, user, group, mode, attrs=None, follow_symlinks=False)
if (salt.utils.platform.is_windows() and
group_to_gid(group) != group_to_gid(
get_group(name, follow_symlinks=follow_symlinks)) and
- user != '') or (
+ group != '') or (
not salt.utils.platform.is_windows() and
- group != get_group(name, follow_symlinks=follow_symlinks) and
- user != ''
+ salt.utils.stringutils.to_str(group) != get_group(name, follow_symlinks=follow_symlinks) and
+ group != ''
if __opts__["test"] is True:
@@ -5035,18 +5042,19 @@ def check_perms(
salt.utils.platform.is_windows()
and group_to_gid(group)
!= group_to_gid(get_group(name, follow_symlinks=follow_symlinks))
- and user != ""
+ and group != ""
) or (
not salt.utils.platform.is_windows()
- and group != get_group(name, follow_symlinks=follow_symlinks)
- and user != ""
+ and salt.utils.stringutils.to_str(group)
+ != get_group(name, follow_symlinks=follow_symlinks)
+ and group != ""
):
if __opts__['test'] is True:
ret['changes']['group'] = group
@@ -4644,7 +4644,7 @@ def check_perms(name, ret, user, group, mode, attrs=None, follow_symlinks=False)
ret['result'] = False
ret['comment'].append('Failed to change group to {0}'
.format(group))
- elif 'cgroup' in perms and user != '':
+ elif 'cgroup' in perms and group != '':
ret['changes']['group'] = group
if __opts__["test"] is True:
ret["changes"]["group"] = group
else:
ret["result"] = False
ret["comment"].append("Failed to change group to {}".format(group))
- elif "cgroup" in perms and user != "":
+ elif "cgroup" in perms and group != "":
ret["changes"]["group"] = group
# Mode changes if needed
diff --git a/salt/states/file.py b/salt/states/file.py
index 0e925bb2ed..f21e0d12fc 100644
index 9e24e389d8..89c70eb454 100644
--- a/salt/states/file.py
+++ b/salt/states/file.py
@@ -960,11 +960,11 @@ def _check_dir_meta(name,
changes['directory'] = 'new'
@@ -989,9 +989,17 @@ def _check_dir_meta(name, user, group, mode, follow_symlinks=False):
if not stats:
changes["directory"] = "new"
return changes
if (user is not None
- and user != stats['user']
+ and salt.utils.stringutils.to_str(user) != stats['user']
and user != stats.get('uid')):
changes['user'] = user
if (group is not None
- and group != stats['group']
+ and salt.utils.stringutils.to_str(group) != stats['group']
and group != stats.get('gid')):
changes['group'] = group
- if user is not None and user != stats["user"] and user != stats.get("uid"):
+ if (
+ user is not None
+ and salt.utils.stringutils.to_str(user) != stats["user"]
+ and user != stats.get("uid")
+ ):
changes["user"] = user
- if group is not None and group != stats["group"] and group != stats.get("gid"):
+ if (
+ group is not None
+ and salt.utils.stringutils.to_str(group) != stats["group"]
+ and group != stats.get("gid")
+ ):
changes["group"] = group
# Normalize the dir mode
smode = salt.utils.files.normalize_mode(stats["mode"])
--
2.28.0
2.29.2

View File

@ -1,4 +1,4 @@
From 07f5a1d984b5a86c24620503f5e373ea0f11484a Mon Sep 17 00:00:00 2001
From d9362f10681a2dfdb057939eee1ffae3a35d4a8d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 12 Apr 2019 16:47:03 +0100
@ -7,54 +7,81 @@ Subject: [PATCH] Calculate FQDNs in parallel to avoid blockings
Fix pylint issue
---
salt/grains/core.py | 31 ++++++++++++++++++++++++++-----
1 file changed, 26 insertions(+), 5 deletions(-)
salt/grains/core.py | 55 +++++++++++++++++++++++++++++++++------------
1 file changed, 41 insertions(+), 14 deletions(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 309e4c9c4a..4600f055dd 100644
index 006878f806..883e3ebe09 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -20,12 +20,15 @@ import platform
import logging
import locale
@@ -20,8 +20,10 @@ import socket
import sys
import time
import uuid
+import time
+import warnings
import zlib
from errno import EACCES, EPERM
import datetime
import warnings
import time
+from multiprocessing.dummy import Pool as ThreadPool
import distro
import salt.exceptions
@@ -44,6 +46,14 @@ import salt.utils.versions
from salt.ext.six.moves import range
from salt.utils.network import _get_interfaces
+# pylint: disable=import-error
+try:
+ import dateutil.tz
+
# pylint: disable=import-error
try:
import dateutil.tz
@@ -2275,13 +2278,10 @@ def fqdns():
+ _DATEUTIL_TZ = True
+except ImportError:
+ _DATEUTIL_TZ = False
+
# rewrite distro.linux_distribution to allow best=True kwarg in version(), needed to get the minor version numbers in CentOS
def _linux_distribution():
@@ -2402,22 +2412,12 @@ def fqdns():
grains = {}
fqdns = set()
- addresses = salt.utils.network.ip_addrs(include_loopback=False, interface_data=_get_interfaces())
- addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False, interface_data=_get_interfaces()))
- err_message = 'Exception during resolving address: %s'
- addresses = salt.utils.network.ip_addrs(
- include_loopback=False, interface_data=_get_interfaces()
- )
- addresses.extend(
- salt.utils.network.ip_addrs6(
- include_loopback=False, interface_data=_get_interfaces()
- )
- )
- err_message = "Exception during resolving address: %s"
- for ip in addresses:
+ def _lookup_fqdn(ip):
try:
name, aliaslist, addresslist = socket.gethostbyaddr(ip)
- fqdns.update([socket.getfqdn(name)] + [als for als in aliaslist if salt.utils.network.is_fqdn(als)])
+ return [socket.getfqdn(name)] + [als for als in aliaslist if salt.utils.network.is_fqdn(als)]
- fqdns.update(
- [socket.getfqdn(name)]
- + [als for als in aliaslist if salt.utils.network.is_fqdn(als)]
- )
+ return [socket.getfqdn(name)] + [
+ als for als in aliaslist if salt.utils.network.is_fqdn(als)
+ ]
except socket.herror as err:
if err.errno in (0, HOST_NOT_FOUND, NO_DATA):
# No FQDN for this IP address, so we don't need to know this all the time.
@@ -2291,6 +2291,27 @@ def fqdns():
except (socket.error, socket.gaierror, socket.timeout) as err:
@@ -2427,6 +2427,33 @@ def fqdns():
except (OSError, socket.gaierror, socket.timeout) as err:
log.error(err_message, ip, err)
+ start = time.time()
+
+ addresses = salt.utils.network.ip_addrs(include_loopback=False, interface_data=_get_interfaces())
+ addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False, interface_data=_get_interfaces()))
+ err_message = 'Exception during resolving address: %s'
+ addresses = salt.utils.network.ip_addrs(
+ include_loopback=False, interface_data=_get_interfaces()
+ )
+ addresses.extend(
+ salt.utils.network.ip_addrs6(
+ include_loopback=False, interface_data=_get_interfaces()
+ )
+ )
+ err_message = "Exception during resolving address: %s"
+
+ # Create a ThreadPool to process the underlying calls to 'socket.gethostbyaddr' in parallel.
+ # This avoid blocking the execution when the "fqdn" is not defined for certains IP addresses, which was causing
@ -69,12 +96,12 @@ index 309e4c9c4a..4600f055dd 100644
+ fqdns.update(item)
+
+ elapsed = time.time() - start
+ log.debug('Elapsed time getting FQDNs: {} seconds'.format(elapsed))
+ log.debug("Elapsed time getting FQDNs: {} seconds".format(elapsed))
+
return {"fqdns": sorted(list(fqdns))}
--
2.16.4
2.29.2

View File

@ -1,38 +1,36 @@
From 0cf1a655aa9353b22ae011e492a33aa52d780f83 Mon Sep 17 00:00:00 2001
From 5db9ccdb4f557cdbff670b18c45e55124e29c57c Mon Sep 17 00:00:00 2001
From: Jochen Breuer <jbreuer@suse.de>
Date: Tue, 10 Mar 2020 14:02:17 +0100
Subject: [PATCH] Changed imports to vendored Tornado
---
salt/cli/batch_async.py | 26 ++++++++++++------------
salt/cli/batch_async.py | 25 ++++++++++++-----------
salt/master.py | 2 +-
salt/transport/ipc.py | 4 ++--
tests/unit/cli/test_batch_async.py | 32 +++++++++++++++---------------
4 files changed, 32 insertions(+), 32 deletions(-)
3 files changed, 30 insertions(+), 29 deletions(-)
diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py
index b8f272ed67..08eeb34f1c 100644
index 3dc04826d1..09aa85258b 100644
--- a/salt/cli/batch_async.py
+++ b/salt/cli/batch_async.py
@@ -6,7 +6,7 @@ Execute a job on the targeted minions by using a moving window of fixed size `ba
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
import gc
-import tornado
+import salt.ext.tornado
@@ -8,6 +8,7 @@ import gc
import logging
# Import salt libs
import salt.client
@@ -50,7 +50,7 @@ class BatchAsync(object):
}
'''
+import salt.ext.tornado
import tornado
from salt.cli.batch import batch_get_eauth, batch_get_opts, get_bnum
@@ -46,7 +47,7 @@ class BatchAsync:
"""
def __init__(self, parent_opts, jid_gen, clear_load):
- ioloop = tornado.ioloop.IOLoop.current()
+ ioloop = salt.ext.tornado.ioloop.IOLoop.current()
self.local = salt.client.get_local_client(parent_opts['conf_file'], io_loop=ioloop)
if 'gather_job_timeout' in clear_load['kwargs']:
clear_load['gather_job_timeout'] = clear_load['kwargs'].pop('gather_job_timeout')
@@ -152,7 +152,7 @@ class BatchAsync(object):
self.local = salt.client.get_local_client(
parent_opts["conf_file"], io_loop=ioloop
)
@@ -161,7 +162,7 @@ class BatchAsync:
self.find_job_returned = self.find_job_returned.difference(running)
self.event.io_loop.spawn_callback(self.find_job, running)
@ -40,18 +38,18 @@ index b8f272ed67..08eeb34f1c 100644
+ @salt.ext.tornado.gen.coroutine
def find_job(self, minions):
if self.event:
not_done = minions.difference(self.done_minions).difference(self.timedout_minions)
@@ -170,7 +170,7 @@ class BatchAsync(object):
gather_job_timeout=self.opts['gather_job_timeout'],
not_done = minions.difference(self.done_minions).difference(
@@ -182,7 +183,7 @@ class BatchAsync:
jid=jid,
**self.eauth)
- yield tornado.gen.sleep(self.opts['gather_job_timeout'])
+ yield salt.ext.tornado.gen.sleep(self.opts['gather_job_timeout'])
**self.eauth
)
- yield tornado.gen.sleep(self.opts["gather_job_timeout"])
+ yield salt.ext.tornado.gen.sleep(self.opts["gather_job_timeout"])
if self.event:
self.event.io_loop.spawn_callback(
self.check_find_job,
@@ -180,7 +180,7 @@ class BatchAsync(object):
log.error("Exception occured handling batch async: {}. Aborting execution.".format(ex))
self.check_find_job, not_done, jid
@@ -195,7 +196,7 @@ class BatchAsync:
)
self.close_safe()
- @tornado.gen.coroutine
@ -59,12 +57,14 @@ index b8f272ed67..08eeb34f1c 100644
def start(self):
if self.event:
self.__set_event_handler()
@@ -198,11 +198,11 @@ class BatchAsync(object):
**self.eauth)
self.targeted_minions = set(ping_return['minions'])
#start batching even if not all minions respond to ping
- yield tornado.gen.sleep(self.batch_presence_ping_timeout or self.opts['gather_job_timeout'])
+ yield salt.ext.tornado.gen.sleep(self.batch_presence_ping_timeout or self.opts['gather_job_timeout'])
@@ -213,13 +214,13 @@ class BatchAsync:
)
self.targeted_minions = set(ping_return["minions"])
# start batching even if not all minions respond to ping
- yield tornado.gen.sleep(
+ yield salt.ext.tornado.gen.sleep(
self.batch_presence_ping_timeout or self.opts["gather_job_timeout"]
)
if self.event:
self.event.io_loop.spawn_callback(self.start_batch)
@ -73,16 +73,16 @@ index b8f272ed67..08eeb34f1c 100644
def start_batch(self):
if not self.initialized:
self.batch_size = get_bnum(self.opts, self.minions, True)
@@ -216,7 +216,7 @@ class BatchAsync(object):
@@ -235,7 +236,7 @@ class BatchAsync:
if self.event:
self.event.io_loop.spawn_callback(self.run_next)
- @tornado.gen.coroutine
+ @salt.ext.tornado.gen.coroutine
def end_batch(self):
left = self.minions.symmetric_difference(self.done_minions.union(self.timedout_minions))
if not left and not self.ended:
@@ -232,7 +232,7 @@ class BatchAsync(object):
left = self.minions.symmetric_difference(
self.done_minions.union(self.timedout_minions)
@@ -253,7 +254,7 @@ class BatchAsync:
# release to the IOLoop to allow the event to be published
# before closing batch async execution
@ -91,9 +91,9 @@ index b8f272ed67..08eeb34f1c 100644
self.close_safe()
def close_safe(self):
@@ -245,16 +245,16 @@ class BatchAsync(object):
del self
gc.collect()
@@ -266,16 +267,16 @@ class BatchAsync:
del self
gc.collect()
- @tornado.gen.coroutine
+ @salt.ext.tornado.gen.coroutine
@ -111,66 +111,44 @@ index b8f272ed67..08eeb34f1c 100644
def run_next(self):
self.scheduled = False
next_batch = self._get_next()
@@ -272,7 +272,7 @@ class BatchAsync(object):
jid=self.batch_jid,
metadata=self.metadata)
@@ -294,7 +295,7 @@ class BatchAsync:
metadata=self.metadata,
)
- yield tornado.gen.sleep(self.opts['timeout'])
+ yield salt.ext.tornado.gen.sleep(self.opts['timeout'])
- yield tornado.gen.sleep(self.opts["timeout"])
+ yield salt.ext.tornado.gen.sleep(self.opts["timeout"])
# The batch can be done already at this point, which means no self.event
if self.event:
diff --git a/salt/master.py b/salt/master.py
index 3abf7ae60b..3a9d12999d 100644
index 7a99af357a..ab85c7f5c6 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -2049,7 +2049,7 @@ class ClearFuncs(object):
@@ -2237,7 +2237,7 @@ class ClearFuncs(TransportMethods):
functools.partial(self._prep_jid, clear_load, {}),
batch_load
batch_load,
)
- ioloop = tornado.ioloop.IOLoop.current()
+ ioloop = salt.ext.tornado.ioloop.IOLoop.current()
ioloop.add_callback(batch.start)
return {
diff --git a/salt/transport/ipc.py b/salt/transport/ipc.py
index d2b295a633..33ee3d4182 100644
--- a/salt/transport/ipc.py
+++ b/salt/transport/ipc.py
@@ -697,7 +697,7 @@ class IPCMessageSubscriber(IPCClient):
for callback in self.callbacks:
self.io_loop.spawn_callback(callback, raw)
- @tornado.gen.coroutine
+ @salt.ext.tornado.gen.coroutine
def read_async(self):
'''
Asynchronously read messages and invoke a callback when they are ready.
@@ -712,7 +712,7 @@ class IPCMessageSubscriber(IPCClient):
yield salt.ext.tornado.gen.sleep(1)
except Exception as exc: # pylint: disable=broad-except
log.error('Exception occurred while Subscriber connecting: %s', exc)
- yield tornado.gen.sleep(1)
+ yield salt.ext.tornado.gen.sleep(1)
yield self._read(None, self.__run_callbacks)
def close(self):
diff --git a/tests/unit/cli/test_batch_async.py b/tests/unit/cli/test_batch_async.py
index e1ce60859b..635dc689a8 100644
index dcee9a87bd..82a712b15b 100644
--- a/tests/unit/cli/test_batch_async.py
+++ b/tests/unit/cli/test_batch_async.py
@@ -5,8 +5,8 @@ from __future__ import absolute_import
# Import Salt Libs
from salt.cli.batch_async import BatchAsync
@@ -1,8 +1,8 @@
-import tornado
-from tornado.testing import AsyncTestCase
+import salt.ext.tornado
from salt.cli.batch_async import BatchAsync
+from salt.ext.tornado.testing import AsyncTestCase
from tests.support.unit import skipIf, TestCase
from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
from tests.support.unit import TestCase, skipIf
-from tornado.testing import AsyncTestCase
@@ -59,10 +59,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@skipIf(NO_MOCK, NO_MOCK_REASON)
@@ -52,10 +52,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.batch.start_batch()
self.assertEqual(self.batch.batch_size, 2)
@ -180,12 +158,12 @@ index e1ce60859b..635dc689a8 100644
self.batch.event = MagicMock()
- future = tornado.gen.Future()
+ future = salt.ext.tornado.gen.Future()
future.set_result({'minions': ['foo', 'bar']})
future.set_result({"minions": ["foo", "bar"]})
self.batch.local.run_job_async.return_value = future
ret = self.batch.start()
@@ -78,10 +78,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -71,10 +71,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
# assert targeted_minions == all minions matched by tgt
self.assertEqual(self.batch.targeted_minions, set(['foo', 'bar']))
self.assertEqual(self.batch.targeted_minions, {"foo", "bar"})
- @tornado.testing.gen_test
+ @salt.ext.tornado.testing.gen_test
@ -193,11 +171,11 @@ index e1ce60859b..635dc689a8 100644
self.batch.event = MagicMock()
- future = tornado.gen.Future()
+ future = salt.ext.tornado.gen.Future()
future.set_result({'minions': ['foo', 'bar']})
future.set_result({"minions": ["foo", "bar"]})
self.batch.local.run_job_async.return_value = future
self.batch.batch_presence_ping_timeout = None
@@ -109,7 +109,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
)
@@ -103,7 +103,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
),
)
- @tornado.testing.gen_test
@ -205,26 +183,26 @@ index e1ce60859b..635dc689a8 100644
def test_start_batch_calls_next(self):
self.batch.run_next = MagicMock(return_value=MagicMock())
self.batch.event = MagicMock()
@@ -165,14 +165,14 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(
len(event.remove_event_handler.mock_calls), 1)
@@ -160,14 +160,14 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(len(event.unsubscribe.mock_calls), 2)
self.assertEqual(len(event.remove_event_handler.mock_calls), 1)
- @tornado.testing.gen_test
+ @salt.ext.tornado.testing.gen_test
def test_batch_next(self):
self.batch.event = MagicMock()
self.batch.opts['fun'] = 'my.fun'
self.batch.opts['arg'] = []
self.batch._get_next = MagicMock(return_value={'foo', 'bar'})
self.batch.opts["fun"] = "my.fun"
self.batch.opts["arg"] = []
self.batch._get_next = MagicMock(return_value={"foo", "bar"})
self.batch.batch_size = 2
- future = tornado.gen.Future()
+ future = salt.ext.tornado.gen.Future()
future.set_result({'minions': ['foo', 'bar']})
future.set_result({"minions": ["foo", "bar"]})
self.batch.local.run_job_async.return_value = future
self.batch.run_next()
@@ -284,38 +284,38 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -290,38 +290,38 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.batch._BatchAsync__event_handler(MagicMock())
self.assertEqual(self.batch.find_job_returned, {'foo'})
self.assertEqual(self.batch.find_job_returned, {"foo"})
- @tornado.testing.gen_test
+ @salt.ext.tornado.testing.gen_test
@ -242,33 +220,33 @@ index e1ce60859b..635dc689a8 100644
+ future = salt.ext.tornado.gen.Future()
future.set_result({})
self.batch.local.run_job_async.return_value = future
self.batch.minions = set(['foo', 'bar'])
self.batch.minions = {"foo", "bar"}
self.batch.jid_gen = MagicMock(return_value="1234")
- tornado.gen.sleep = MagicMock(return_value=future)
+ salt.ext.tornado.gen.sleep = MagicMock(return_value=future)
self.batch.find_job({'foo', 'bar'})
self.batch.find_job({"foo", "bar"})
self.assertEqual(
self.batch.event.io_loop.spawn_callback.call_args[0],
(self.batch.check_find_job, {'foo', 'bar'}, "1234")
(self.batch.check_find_job, {"foo", "bar"}, "1234"),
)
- @tornado.testing.gen_test
+ @salt.ext.tornado.testing.gen_test
def test_batch_find_job_with_done_minions(self):
self.batch.done_minions = {'bar'}
self.batch.done_minions = {"bar"}
self.batch.event = MagicMock()
- future = tornado.gen.Future()
+ future = salt.ext.tornado.gen.Future()
future.set_result({})
self.batch.local.run_job_async.return_value = future
self.batch.minions = set(['foo', 'bar'])
self.batch.minions = {"foo", "bar"}
self.batch.jid_gen = MagicMock(return_value="1234")
- tornado.gen.sleep = MagicMock(return_value=future)
+ salt.ext.tornado.gen.sleep = MagicMock(return_value=future)
self.batch.find_job({'foo', 'bar'})
self.batch.find_job({"foo", "bar"})
self.assertEqual(
self.batch.event.io_loop.spawn_callback.call_args[0],
--
2.23.0
2.29.2

File diff suppressed because it is too large Load Diff

View File

@ -1,54 +0,0 @@
From 615a8f8dfa8ef12eeb4c387e48309cc466b8597d Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 4 Dec 2018 16:39:08 +0100
Subject: [PATCH] Decide if the source should be actually skipped
---
salt/modules/aptpkg.py | 23 ++++++++++++++++++++++-
1 file changed, 22 insertions(+), 1 deletion(-)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 4ec9158476..3b0d8423db 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -1620,6 +1620,27 @@ def list_repo_pkgs(*args, **kwargs): # pylint: disable=unused-import
return ret
+def _skip_source(source):
+ '''
+ Decide to skip source or not.
+
+ :param source:
+ :return:
+ '''
+ if source.invalid:
+ if source.uri and source.type and source.type in ("deb", "deb-src", "rpm", "rpm-src"):
+ pieces = source.mysplit(source.line)
+ if pieces[1].strip()[0] == "[":
+ options = pieces.pop(1).strip("[]").split()
+ if len(options) > 0:
+ log.debug("Source %s will be included although is marked invalid", source.uri)
+ return False
+ return True
+ else:
+ return True
+ return False
+
+
def list_repos():
'''
Lists all repos in the sources.list (and sources.lists.d) files
@@ -1635,7 +1656,7 @@ def list_repos():
repos = {}
sources = sourceslist.SourcesList()
for source in sources.list:
- if source.invalid:
+ if _skip_source(source):
continue
repo = {}
repo['file'] = source.file
--
2.16.4

View File

@ -1,4 +1,4 @@
From f81a5b92d691c1d511a814f9344104dd37466bc3 Mon Sep 17 00:00:00 2001
From e986ed8fc0d5da74374d9ded82e10c16fc984ca8 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 29 May 2019 11:03:16 +0100
@ -6,42 +6,45 @@ Subject: [PATCH] Do not break repo files with multiple line values on
yumpkg (bsc#1135360)
---
tests/integration/modules/test_pkg.py | 48 +++++++++++++++++++++++++++++++++++
1 file changed, 48 insertions(+)
tests/integration/modules/test_pkg.py | 51 +++++++++++++++++++++++++++
1 file changed, 51 insertions(+)
diff --git a/tests/integration/modules/test_pkg.py b/tests/integration/modules/test_pkg.py
index e8374db2c0..61748f9477 100644
index 7a720523da..e32013800d 100644
--- a/tests/integration/modules/test_pkg.py
+++ b/tests/integration/modules/test_pkg.py
@@ -182,6 +182,54 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin):
@@ -194,6 +194,57 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin):
if repo is not None:
self.run_function('pkg.del_repo', [repo])
self.run_function("pkg.del_repo", [repo])
+ def test_mod_del_repo_multiline_values(self):
+ '''
+ """
+ test modifying and deleting a software repository defined with multiline values
+ '''
+ os_grain = self.run_function('grains.item', ['os'])['os']
+ """
+ os_grain = self.run_function("grains.item", ["os"])["os"]
+ repo = None
+ try:
+ if os_grain in ['CentOS', 'RedHat', 'SUSE']:
+ my_baseurl = 'http://my.fake.repo/foo/bar/\n http://my.fake.repo.alt/foo/bar/'
+ expected_get_repo_baseurl = 'http://my.fake.repo/foo/bar/\nhttp://my.fake.repo.alt/foo/bar/'
+ major_release = int(
+ self.run_function(
+ 'grains.item',
+ ['osmajorrelease']
+ )['osmajorrelease']
+ if os_grain in ["CentOS", "RedHat", "SUSE"]:
+ my_baseurl = (
+ "http://my.fake.repo/foo/bar/\n http://my.fake.repo.alt/foo/bar/"
+ )
+ repo = 'fakerepo'
+ name = 'Fake repo for RHEL/CentOS/SUSE'
+ expected_get_repo_baseurl = (
+ "http://my.fake.repo/foo/bar/\nhttp://my.fake.repo.alt/foo/bar/"
+ )
+ major_release = int(
+ self.run_function("grains.item", ["osmajorrelease"])[
+ "osmajorrelease"
+ ]
+ )
+ repo = "fakerepo"
+ name = "Fake repo for RHEL/CentOS/SUSE"
+ baseurl = my_baseurl
+ gpgkey = 'https://my.fake.repo/foo/bar/MY-GPG-KEY.pub'
+ failovermethod = 'priority'
+ gpgkey = "https://my.fake.repo/foo/bar/MY-GPG-KEY.pub"
+ failovermethod = "priority"
+ gpgcheck = 1
+ enabled = 1
+ ret = self.run_function(
+ 'pkg.mod_repo',
+ "pkg.mod_repo",
+ [repo],
+ name=name,
+ baseurl=baseurl,
@ -55,20 +58,20 @@ index e8374db2c0..61748f9477 100644
+ self.assertNotEqual(ret, {})
+ repo_info = ret[next(iter(ret))]
+ self.assertIn(repo, repo_info)
+ self.assertEqual(repo_info[repo]['baseurl'], my_baseurl)
+ ret = self.run_function('pkg.get_repo', [repo])
+ self.assertEqual(ret['baseurl'], expected_get_repo_baseurl)
+ self.run_function('pkg.mod_repo', [repo])
+ ret = self.run_function('pkg.get_repo', [repo])
+ self.assertEqual(ret['baseurl'], expected_get_repo_baseurl)
+ self.assertEqual(repo_info[repo]["baseurl"], my_baseurl)
+ ret = self.run_function("pkg.get_repo", [repo])
+ self.assertEqual(ret["baseurl"], expected_get_repo_baseurl)
+ self.run_function("pkg.mod_repo", [repo])
+ ret = self.run_function("pkg.get_repo", [repo])
+ self.assertEqual(ret["baseurl"], expected_get_repo_baseurl)
+ finally:
+ if repo is not None:
+ self.run_function('pkg.del_repo', [repo])
+ self.run_function("pkg.del_repo", [repo])
+
@requires_salt_modules('pkg.owner')
@requires_salt_modules("pkg.owner")
def test_owner(self):
'''
"""
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From bfee3a7c47786bb860663de97fca26725101f1d0 Mon Sep 17 00:00:00 2001
From 998136ffd4c8442e0c3a7030af3d8196abec6be1 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 7 May 2019 15:33:51 +0100
@ -11,24 +11,24 @@ Add unit test for '_netlink_tool_remote_on'
1 file changed, 5 insertions(+)
diff --git a/salt/utils/network.py b/salt/utils/network.py
index 2ae2e213b7..307cab885f 100644
index dd7fceb91a..d253ded3ab 100644
--- a/salt/utils/network.py
+++ b/salt/utils/network.py
@@ -1442,8 +1442,13 @@ def _netlink_tool_remote_on(port, which_end):
elif 'ESTAB' not in line:
@@ -1623,8 +1623,13 @@ def _netlink_tool_remote_on(port, which_end):
elif "ESTAB" not in line:
continue
chunks = line.split()
+ local_host, local_port = chunks[3].rsplit(':', 1)
remote_host, remote_port = chunks[4].rsplit(':', 1)
+ local_host, local_port = chunks[3].rsplit(":", 1)
remote_host, remote_port = chunks[4].rsplit(":", 1)
+ if which_end == 'remote_port' and int(remote_port) != port:
+ if which_end == "remote_port" and int(remote_port) != port:
+ continue
+ if which_end == 'local_port' and int(local_port) != port:
+ if which_end == "local_port" and int(local_port) != port:
+ continue
remotes.add(remote_host.strip("[]"))
if valid is False:
--
2.23.0
2.29.2

View File

@ -1,4 +1,4 @@
From 3d5d89428ca333caa2c2259f679f8fffd7110ba6 Mon Sep 17 00:00:00 2001
From 57f9da0bd7727c46eab866941fee46a3eaf8c8ea Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 21 Sep 2018 17:31:39 +0200
Subject: [PATCH] Do not load pip state if there is no 3rd party
@ -6,40 +6,355 @@ Subject: [PATCH] Do not load pip state if there is no 3rd party
Safe import 3rd party dependency
---
salt/modules/pip.py | 12 ++++++++++--
1 file changed, 10 insertions(+), 2 deletions(-)
salt/modules/pip.py | 93 ++++++++++++++++++++++++---------------------
1 file changed, 50 insertions(+), 43 deletions(-)
diff --git a/salt/modules/pip.py b/salt/modules/pip.py
index 0a0773a8f4..f19593ed1a 100644
index f7c101f6e4..742e0dd48a 100644
--- a/salt/modules/pip.py
+++ b/salt/modules/pip.py
@@ -82,7 +82,10 @@ from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
r"""
Install Python packages with pip to either the system or a virtualenv
@@ -77,9 +76,7 @@ of the 2015.5 branch:
The issue is described here: https://github.com/saltstack/salt/issues/46163
"""
-from __future__ import absolute_import, print_function, unicode_literals
-# Import python libs
import logging
import os
-import pkg_resources
import re
@@ -89,7 +86,6 @@ import tempfile
import pkg_resources # pylint: disable=3rd-party-module-not-gated
-# Import Salt libs
import salt.utils.data
import salt.utils.files
import salt.utils.json
@@ -101,6 +97,12 @@ import salt.utils.versions
from salt.exceptions import CommandExecutionError, CommandNotFoundError
from salt.ext import six
+try:
+ import pkg_resources
+except ImportError:
+ pkg_resources = None
import re
import shutil
import sys
@@ -121,7 +124,12 @@ def __virtual__():
+
+
# This needs to be named logger so we don't shadow it in pip.install
logger = logging.getLogger(__name__) # pylint: disable=C0103
@@ -118,7 +120,12 @@ def __virtual__():
entire filesystem. If it's not installed in a conventional location, the
user is required to provide the location of pip each time it is used.
'''
- return 'pip'
"""
- return "pip"
+ if pkg_resources is None:
+ ret = False, 'Package dependency "pkg_resource" is missing'
+ else:
+ ret = 'pip'
+ ret = "pip"
+
+ return ret
def _pip_bin_env(cwd, bin_env):
@@ -140,7 +147,7 @@ def _clear_context(bin_env=None):
"""
contextkey = "pip.version"
if bin_env is not None:
- contextkey = "{0}.{1}".format(contextkey, bin_env)
+ contextkey = "{}.{}".format(contextkey, bin_env)
__context__.pop(contextkey, None)
@@ -196,7 +203,7 @@ def _get_pip_bin(bin_env):
bin_path,
)
raise CommandNotFoundError(
- "Could not find a pip binary in virtualenv {0}".format(bin_env)
+ "Could not find a pip binary in virtualenv {}".format(bin_env)
)
# bin_env is the python or pip binary
@@ -209,11 +216,11 @@ def _get_pip_bin(bin_env):
return [os.path.normpath(bin_env)]
raise CommandExecutionError(
- "Could not find a pip binary within {0}".format(bin_env)
+ "Could not find a pip binary within {}".format(bin_env)
)
else:
raise CommandNotFoundError(
- "Access denied to {0}, could not find a pip binary".format(bin_env)
+ "Access denied to {}, could not find a pip binary".format(bin_env)
)
@@ -283,7 +290,7 @@ def _resolve_requirements_chain(requirements):
chain = []
- if isinstance(requirements, six.string_types):
+ if isinstance(requirements, str):
requirements = [requirements]
for req_file in requirements:
@@ -300,7 +307,7 @@ def _process_requirements(requirements, cmd, cwd, saltenv, user):
cleanup_requirements = []
if requirements is not None:
- if isinstance(requirements, six.string_types):
+ if isinstance(requirements, str):
requirements = [r.strip() for r in requirements.split(",")]
elif not isinstance(requirements, list):
raise TypeError("requirements must be a string or list")
@@ -314,7 +321,7 @@ def _process_requirements(requirements, cmd, cwd, saltenv, user):
if not cached_requirements:
ret = {
"result": False,
- "comment": "pip requirements file '{0}' not found".format(
+ "comment": "pip requirements file '{}' not found".format(
requirement
),
}
@@ -412,15 +419,15 @@ def _format_env_vars(env_vars):
ret = {}
if env_vars:
if isinstance(env_vars, dict):
- for key, val in six.iteritems(env_vars):
- if not isinstance(key, six.string_types):
+ for key, val in env_vars.items():
+ if not isinstance(key, str):
key = str(key) # future lint: disable=blacklisted-function
- if not isinstance(val, six.string_types):
+ if not isinstance(val, str):
val = str(val) # future lint: disable=blacklisted-function
ret[key] = val
else:
raise CommandExecutionError(
- "env_vars {0} is not a dictionary".format(env_vars)
+ "env_vars {} is not a dictionary".format(env_vars)
)
return ret
@@ -762,9 +769,9 @@ def install(
if log:
if os.path.isdir(log):
- raise IOError("'{0}' is a directory. Use --log path_to_file".format(log))
+ raise OSError("'{}' is a directory. Use --log path_to_file".format(log))
elif not os.access(log, os.W_OK):
- raise IOError("'{0}' is not writeable".format(log))
+ raise OSError("'{}' is not writeable".format(log))
cmd.extend(["--log", log])
@@ -790,12 +797,12 @@ def install(
int(timeout)
except ValueError:
raise ValueError(
- "'{0}' is not a valid timeout, must be an integer".format(timeout)
+ "'{}' is not a valid timeout, must be an integer".format(timeout)
)
cmd.extend(["--timeout", timeout])
if find_links:
- if isinstance(find_links, six.string_types):
+ if isinstance(find_links, str):
find_links = [l.strip() for l in find_links.split(",")]
for link in find_links:
@@ -803,7 +810,7 @@ def install(
salt.utils.url.validate(link, VALID_PROTOS) or os.path.exists(link)
):
raise CommandExecutionError(
- "'{0}' is not a valid URL or path".format(link)
+ "'{}' is not a valid URL or path".format(link)
)
cmd.extend(["--find-links", link])
@@ -815,13 +822,13 @@ def install(
if index_url:
if not salt.utils.url.validate(index_url, VALID_PROTOS):
- raise CommandExecutionError("'{0}' is not a valid URL".format(index_url))
+ raise CommandExecutionError("'{}' is not a valid URL".format(index_url))
cmd.extend(["--index-url", index_url])
if extra_index_url:
if not salt.utils.url.validate(extra_index_url, VALID_PROTOS):
raise CommandExecutionError(
- "'{0}' is not a valid URL".format(extra_index_url)
+ "'{}' is not a valid URL".format(extra_index_url)
)
cmd.extend(["--extra-index-url", extra_index_url])
@@ -836,13 +843,13 @@ def install(
" use index_url and/or extra_index_url instead"
)
- if isinstance(mirrors, six.string_types):
+ if isinstance(mirrors, str):
mirrors = [m.strip() for m in mirrors.split(",")]
cmd.append("--use-mirrors")
for mirror in mirrors:
if not mirror.startswith("http://"):
- raise CommandExecutionError("'{0}' is not a valid URL".format(mirror))
+ raise CommandExecutionError("'{}' is not a valid URL".format(mirror))
cmd.extend(["--mirrors", mirror])
if disable_version_check:
@@ -883,7 +890,7 @@ def install(
if exists_action.lower() not in ("s", "i", "w", "b"):
raise CommandExecutionError(
"The exists_action pip option only supports the values "
- "s, i, w, and b. '{0}' is not valid.".format(exists_action)
+ "s, i, w, and b. '{}' is not valid.".format(exists_action)
)
cmd.extend(["--exists-action", exists_action])
@@ -911,14 +918,14 @@ def install(
cmd.extend(["--cert", cert])
if global_options:
- if isinstance(global_options, six.string_types):
+ if isinstance(global_options, str):
global_options = [go.strip() for go in global_options.split(",")]
for opt in global_options:
cmd.extend(["--global-option", opt])
if install_options:
- if isinstance(install_options, six.string_types):
+ if isinstance(install_options, str):
install_options = [io.strip() for io in install_options.split(",")]
for opt in install_options:
@@ -929,7 +936,7 @@ def install(
try:
pkgs = [p.strip() for p in pkgs.split(",")]
except AttributeError:
- pkgs = [p.strip() for p in six.text_type(pkgs).split(",")]
+ pkgs = [p.strip() for p in str(pkgs).split(",")]
pkgs = salt.utils.data.stringify(salt.utils.data.decode_list(pkgs))
# It's possible we replaced version-range commas with semicolons so
@@ -945,7 +952,7 @@ def install(
if editable:
egg_match = re.compile(r"(?:#|#.*?&)egg=([^&]*)")
- if isinstance(editable, six.string_types):
+ if isinstance(editable, str):
editable = [e.strip() for e in editable.split(",")]
for entry in editable:
@@ -964,14 +971,14 @@ def install(
cmd.append("--allow-all-external")
if allow_external:
- if isinstance(allow_external, six.string_types):
+ if isinstance(allow_external, str):
allow_external = [p.strip() for p in allow_external.split(",")]
for pkg in allow_external:
cmd.extend(["--allow-external", pkg])
if allow_unverified:
- if isinstance(allow_unverified, six.string_types):
+ if isinstance(allow_unverified, str):
allow_unverified = [p.strip() for p in allow_unverified.split(",")]
for pkg in allow_unverified:
@@ -1106,8 +1113,8 @@ def uninstall(
try:
# TODO make this check if writeable
os.path.exists(log)
- except IOError:
- raise IOError("'{0}' is not writeable".format(log))
+ except OSError:
+ raise OSError("'{}' is not writeable".format(log))
cmd.extend(["--log", log])
@@ -1133,12 +1140,12 @@ def uninstall(
int(timeout)
except ValueError:
raise ValueError(
- "'{0}' is not a valid timeout, must be an integer".format(timeout)
+ "'{}' is not a valid timeout, must be an integer".format(timeout)
)
cmd.extend(["--timeout", timeout])
if pkgs:
- if isinstance(pkgs, six.string_types):
+ if isinstance(pkgs, str):
pkgs = [p.strip() for p in pkgs.split(",")]
if requirements:
for requirement in requirements:
@@ -1323,7 +1330,7 @@ def version(bin_env=None, cwd=None, user=None):
cwd = _pip_bin_env(cwd, bin_env)
contextkey = "pip.version"
if bin_env is not None:
- contextkey = "{0}.{1}".format(contextkey, bin_env)
+ contextkey = "{}.{}".format(contextkey, bin_env)
if contextkey in __context__:
return __context__[contextkey]
@@ -1402,7 +1409,7 @@ def list_upgrades(bin_env=None, user=None, cwd=None):
if match:
name, version_ = match.groups()
else:
- logger.error("Can't parse line '{0}'".format(line))
+ logger.error("Can't parse line '{}'".format(line))
continue
packages[name] = version_
@@ -1414,7 +1421,7 @@ def list_upgrades(bin_env=None, user=None, cwd=None):
raise CommandExecutionError("Invalid JSON", info=result)
for pkg in pkgs:
- packages[pkg["name"]] = "{0} [{1}]".format(
+ packages[pkg["name"]] = "{} [{}]".format(
pkg["latest_version"], pkg["latest_filetype"]
)
@@ -1602,17 +1609,17 @@ def list_all_versions(
"""
cwd = _pip_bin_env(cwd, bin_env)
cmd = _get_pip_bin(bin_env)
- cmd.extend(["install", "{0}==versions".format(pkg)])
+ cmd.extend(["install", "{}==versions".format(pkg)])
if index_url:
if not salt.utils.url.validate(index_url, VALID_PROTOS):
- raise CommandExecutionError("'{0}' is not a valid URL".format(index_url))
+ raise CommandExecutionError("'{}' is not a valid URL".format(index_url))
cmd.extend(["--index-url", index_url])
if extra_index_url:
if not salt.utils.url.validate(extra_index_url, VALID_PROTOS):
raise CommandExecutionError(
- "'{0}' is not a valid URL".format(extra_index_url)
+ "'{}' is not a valid URL".format(extra_index_url)
)
cmd.extend(["--extra-index-url", extra_index_url])
@@ -1632,7 +1639,7 @@ def list_all_versions(
if not include_rc:
filtered.append("rc")
if filtered:
- excludes = re.compile(r"^((?!{0}).)*$".format("|".join(filtered)))
+ excludes = re.compile(r"^((?!{}).)*$".format("|".join(filtered)))
else:
excludes = re.compile(r"")
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 235cca81be2f64ed3feb48ed42bfa3f9196bff39 Mon Sep 17 00:00:00 2001
From 5d465a5b392efa1b4df7870161b32e0125efa4af Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 28 Jun 2019 15:17:56 +0100
@ -10,77 +10,315 @@ Move MockTimedProc implementation to tests.support.mock
Add unit test for ansible caller
---
salt/modules/ansiblegate.py | 14 +++++++++---
tests/support/mock.py | 31 +++++++++++++++++++++++++
tests/unit/modules/test_ansiblegate.py | 41 ++++++++++++++++++++++++++++++++++
tests/unit/modules/test_cmdmod.py | 35 ++---------------------------
4 files changed, 85 insertions(+), 36 deletions(-)
salt/modules/ansiblegate.py | 7 +-
tests/support/mock.py | 128 +++++++++-------
tests/unit/modules/test_ansiblegate.py | 201 +++++++++++++++++++++++++
tests/unit/modules/test_cmdmod.py | 1 +
4 files changed, 280 insertions(+), 57 deletions(-)
create mode 100644 tests/unit/modules/test_ansiblegate.py
diff --git a/salt/modules/ansiblegate.py b/salt/modules/ansiblegate.py
index 6b903c2b94..8e28fcafa3 100644
index 0279a26017..5d4b986ec2 100644
--- a/salt/modules/ansiblegate.py
+++ b/salt/modules/ansiblegate.py
@@ -147,6 +147,10 @@ class AnsibleModuleCaller(object):
@@ -160,6 +160,7 @@ class AnsibleModuleCaller:
:param kwargs: keywords to the module
:return:
'''
+ if six.PY3:
+ python_exec = 'python3'
+ else:
+ python_exec = 'python'
"""
+ python_exec = "python3"
module = self._resolver.load_module(module)
if not hasattr(module, 'main'):
@@ -162,9 +166,13 @@ class AnsibleModuleCaller(object):
["echo", "{0}".format(js_args)],
stdout=subprocess.PIPE, timeout=self.timeout)
if not hasattr(module, "main"):
@@ -182,9 +183,9 @@ class AnsibleModuleCaller:
timeout=self.timeout,
)
proc_out.run()
+ if six.PY3:
+ proc_out_stdout = proc_out.stdout.decode()
+ else:
+ proc_out_stdout = proc_out.stdout
- proc_out_stdout = salt.utils.stringutils.to_str(proc_out.stdout)
+ proc_out_stdout = proc_out.stdout.decode()
proc_exc = salt.utils.timed_subprocess.TimedProc(
- ['python', module.__file__],
- stdin=proc_out.stdout, stdout=subprocess.PIPE, timeout=self.timeout)
- [sys.executable, module.__file__],
+ [python_exec, module.__file__],
+ stdin=proc_out_stdout, stdout=subprocess.PIPE, timeout=self.timeout)
proc_exc.run()
try:
@@ -263,7 +271,7 @@ def help(module=None, *args):
description = doc.get('description') or ''
del doc['description']
ret['Description'] = description
- ret['Available sections on module "{}"'.format(module.__name__.replace('ansible.modules.', ''))] = doc.keys()
+ ret['Available sections on module "{}"'.format(module.__name__.replace('ansible.modules.', ''))] = [i for i in doc.keys()]
stdin=proc_out_stdout,
stdout=subprocess.PIPE,
timeout=self.timeout,
@@ -298,7 +299,7 @@ def help(module=None, *args):
'Available sections on module "{}"'.format(
module.__name__.replace("ansible.modules.", "")
)
- ] = list(doc)
+ ] = [i for i in doc.keys()]
else:
for arg in args:
info = doc.get(arg)
diff --git a/tests/support/mock.py b/tests/support/mock.py
index 805a60377c..67ecb4838a 100644
index 7ef02e0701..87d052c399 100644
--- a/tests/support/mock.py
+++ b/tests/support/mock.py
@@ -461,6 +461,37 @@ class MockOpen(object):
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
@@ -14,7 +13,6 @@
"""
# pylint: disable=unused-import,function-redefined,blacklisted-module,blacklisted-external-module
-from __future__ import absolute_import
import collections
import copy
@@ -42,8 +40,6 @@ from mock import (
patch,
sentinel,
)
-
-# Import salt libs
from salt.ext import six
# pylint: disable=no-name-in-module,no-member
@@ -57,7 +53,7 @@ if sys.version_info < (3, 6) and __mock_version < (2,):
raise ImportError("Please install mock>=2.0.0")
-class MockFH(object):
+class MockFH:
def __init__(self, filename, read_data, *args, **kwargs):
self.filename = filename
self.read_data = read_data
@@ -89,7 +85,7 @@ class MockFH(object):
"""
# Newline will always be a bytestring on PY2 because mock_open will have
# normalized it to one.
- newline = b"\n" if isinstance(read_data, six.binary_type) else "\n"
+ newline = b"\n" if isinstance(read_data, bytes) else "\n"
read_data = [line + newline for line in read_data.split(newline)]
@@ -103,8 +99,7 @@ class MockFH(object):
# newline that we added in the list comprehension.
read_data[-1] = read_data[-1][:-1]
- for line in read_data:
- yield line
+ yield from read_data
@property
def write_calls(self):
@@ -126,18 +121,18 @@ class MockFH(object):
def __check_read_data(self):
if not self.__read_data_ok:
if self.binary_mode:
- if not isinstance(self.read_data, six.binary_type):
+ if not isinstance(self.read_data, bytes):
raise TypeError(
- "{0} opened in binary mode, expected read_data to be "
- "bytes, not {1}".format(
+ "{} opened in binary mode, expected read_data to be "
+ "bytes, not {}".format(
self.filename, type(self.read_data).__name__
)
)
else:
if not isinstance(self.read_data, str):
raise TypeError(
- "{0} opened in non-binary mode, expected read_data to "
- "be str, not {1}".format(
+ "{} opened in non-binary mode, expected read_data to "
+ "be str, not {}".format(
self.filename, type(self.read_data).__name__
)
)
@@ -147,8 +142,8 @@ class MockFH(object):
def _read(self, size=0):
self.__check_read_data()
if not self.read_mode:
- raise IOError("File not open for reading")
- if not isinstance(size, six.integer_types) or size < 0:
+ raise OSError("File not open for reading")
+ if not isinstance(size, int) or size < 0:
raise TypeError("a positive integer is required")
joined = self.empty_string.join(self.read_data_iter)
@@ -169,7 +164,7 @@ class MockFH(object):
# TODO: Implement "size" argument
self.__check_read_data()
if not self.read_mode:
- raise IOError("File not open for reading")
+ raise OSError("File not open for reading")
ret = list(self.read_data_iter)
self.__loc += sum(len(x) for x in ret)
return ret
@@ -178,7 +173,7 @@ class MockFH(object):
# TODO: Implement "size" argument
self.__check_read_data()
if not self.read_mode:
- raise IOError("File not open for reading")
+ raise OSError("File not open for reading")
try:
ret = next(self.read_data_iter)
self.__loc += len(ret)
@@ -189,7 +184,7 @@ class MockFH(object):
def __iter__(self):
self.__check_read_data()
if not self.read_mode:
- raise IOError("File not open for reading")
+ raise OSError("File not open for reading")
while True:
try:
ret = next(self.read_data_iter)
@@ -200,30 +195,22 @@ class MockFH(object):
def _write(self, content):
if not self.write_mode:
- raise IOError("File not open for writing")
- if six.PY2:
- if isinstance(content, six.text_type):
- # encoding intentionally not specified to force a
- # UnicodeEncodeError when non-ascii unicode type is passed
- content.encode()
- else:
- content_type = type(content)
- if self.binary_mode and content_type is not bytes:
- raise TypeError(
- "a bytes-like object is required, not '{0}'".format(
- content_type.__name__
- )
- )
- elif not self.binary_mode and content_type is not str:
- raise TypeError(
- "write() argument must be str, not {0}".format(
- content_type.__name__
- )
+ raise OSError("File not open for writing")
+ content_type = type(content)
+ if self.binary_mode and content_type is not bytes:
+ raise TypeError(
+ "a bytes-like object is required, not '{}'".format(
+ content_type.__name__
)
+ )
+ elif not self.binary_mode and content_type is not str:
+ raise TypeError(
+ "write() argument must be str, not {}".format(content_type.__name__)
+ )
def _writelines(self, lines):
if not self.write_mode:
- raise IOError("File not open for writing")
+ raise OSError("File not open for writing")
for line in lines:
self._write(line)
@@ -234,26 +221,24 @@ class MockFH(object):
pass
-class MockCall(object):
+class MockCall:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def __repr__(self):
# future lint: disable=blacklisted-function
- ret = str("MockCall(")
+ ret = "MockCall("
for arg in self.args:
- ret += repr(arg) + str(", ")
+ ret += repr(arg) + ", "
if not self.kwargs:
if self.args:
# Remove trailing ', '
ret = ret[:-2]
else:
- for key, val in six.iteritems(self.kwargs):
- ret += str("{0}={1}").format(
- salt.utils.stringutils.to_str(key), repr(val)
- )
- ret += str(")")
+ for key, val in self.kwargs.items():
+ ret += "{}={}".format(salt.utils.stringutils.to_str(key), repr(val))
+ ret += ")"
return ret
# future lint: enable=blacklisted-function
@@ -264,7 +249,7 @@ class MockCall(object):
return self.args == other.args and self.kwargs == other.kwargs
-class MockOpen(object):
+class MockOpen:
r'''
This class can be used to mock the use of ``open()``.
@@ -379,7 +364,7 @@ class MockOpen(object):
# .__class__() used here to preserve the dict class in the event that
# an OrderedDict was used.
new_read_data = read_data.__class__()
- for key, val in six.iteritems(read_data):
+ for key, val in read_data.items():
try:
val = salt.utils.data.decode(val, to_str=True)
except TypeError:
@@ -424,7 +409,7 @@ class MockOpen(object):
except IndexError:
# We've run out of file contents, abort!
raise RuntimeError(
- "File matching expression '{0}' opened more times than "
+ "File matching expression '{}' opened more times than "
"expected".format(matched_pattern)
)
@@ -443,7 +428,7 @@ class MockOpen(object):
except KeyError:
# No matching glob in read_data, treat this as a file that does
# not exist and raise the appropriate exception.
- raise IOError(errno.ENOENT, "No such file or directory", name)
+ raise OSError(errno.ENOENT, "No such file or directory", name)
def write_calls(self, path=None):
"""
@@ -451,7 +436,7 @@ class MockOpen(object):
the results to files matching a given pattern.
"""
ret = []
- for filename, handles in six.iteritems(self.filehandles):
+ for filename, handles in self.filehandles.items():
if path is None or fnmatch.fnmatch(filename, path):
for fh_ in handles:
ret.extend(fh_.write_calls)
@@ -463,19 +448,54 @@ class MockOpen(object):
narrow the results to files matching a given pattern.
"""
ret = []
- for filename, handles in six.iteritems(self.filehandles):
+ for filename, handles in self.filehandles.items():
if path is None or fnmatch.fnmatch(filename, path):
for fh_ in handles:
ret.extend(fh_.writelines_calls)
return ret
+class MockTimedProc(object):
+ '''
-class MockTimedProc(object):
+class MockTimedProc:
+ """
+ Class used as a stand-in for salt.utils.timed_subprocess.TimedProc
+ '''
+ class _Process(object):
+ '''
+ """
+
+ class _Process:
+ """
+ Used to provide a dummy "process" attribute
+ '''
+ """
+
+ def __init__(self, returncode=0, pid=12345):
+ self.returncode = returncode
+ self.pid = pid
+
+ def __init__(self, stdout=None, stderr=None, returncode=0, pid=12345):
+ if stdout is not None and not isinstance(stdout, bytes):
+ raise TypeError('Must pass stdout to MockTimedProc as bytes')
+ raise TypeError("Must pass stdout to MockTimedProc as bytes")
+ if stderr is not None and not isinstance(stderr, bytes):
+ raise TypeError('Must pass stderr to MockTimedProc as bytes')
+ raise TypeError("Must pass stderr to MockTimedProc as bytes")
+ self._stdout = stdout
+ self._stderr = stderr
+ self.process = self._Process(returncode=returncode, pid=pid)
@ -95,124 +333,238 @@ index 805a60377c..67ecb4838a 100644
+ @property
+ def stderr(self):
+ return self._stderr
+
+
+class MockTimedProc:
"""
Class used as a stand-in for salt.utils.timed_subprocess.TimedProc
"""
# reimplement mock_open to support multiple filehandles
mock_open = MockOpen
- class _Process(object):
+ class _Process:
"""
Used to provide a dummy "process" attribute
"""
diff --git a/tests/unit/modules/test_ansiblegate.py b/tests/unit/modules/test_ansiblegate.py
index 5613a0e79b..b7b43efda4 100644
--- a/tests/unit/modules/test_ansiblegate.py
new file mode 100644
index 0000000000..61aad44b5c
--- /dev/null
+++ b/tests/unit/modules/test_ansiblegate.py
@@ -29,11 +29,13 @@ from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch,
MagicMock,
+ MockTimedProc,
)
import salt.modules.ansiblegate as ansible
import salt.utils.platform
from salt.exceptions import LoaderError
@@ -0,0 +1,201 @@
+#
+# Author: Bo Maryniuk <bo@suse.de>
+#
+# Copyright 2017 SUSE LLC
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+import salt.modules.ansiblegate as ansible
+import salt.utils.platform
+from salt.exceptions import LoaderError
+from salt.ext import six
@skipIf(NO_PYTEST, False)
@@ -134,3 +136,42 @@ description:
'''
with patch('salt.modules.ansiblegate.ansible', None):
assert ansible.__virtual__() == 'ansible'
+from tests.support.mixins import LoaderModuleMockMixin
+from tests.support.mock import MagicMock, MockTimedProc, patch
+from tests.support.unit import TestCase, skipIf
+
+try:
+ import pytest
+except ImportError as import_error:
+ pytest = None
+NO_PYTEST = not bool(pytest)
+
+
+@skipIf(NO_PYTEST, False)
+@skipIf(salt.utils.platform.is_windows(), "Not supported on Windows")
+class AnsiblegateTestCase(TestCase, LoaderModuleMockMixin):
+ def setUp(self):
+ self.resolver = ansible.AnsibleModuleResolver({})
+ self.resolver._modules_map = {
+ "one.two.three": os.sep + os.path.join("one", "two", "three.py"),
+ "four.five.six": os.sep + os.path.join("four", "five", "six.py"),
+ "three.six.one": os.sep + os.path.join("three", "six", "one.py"),
+ }
+
+ def tearDown(self):
+ self.resolver = None
+
+ def setup_loader_modules(self):
+ return {ansible: {}}
+
+ def test_ansible_module_help(self):
+ """
+ Test help extraction from the module
+ :return:
+ """
+
+ class Module:
+ """
+ An ansible module mock.
+ """
+
+ __name__ = "foo"
+ DOCUMENTATION = """
+---
+one:
+ text here
+---
+two:
+ text here
+description:
+ describe the second part
+ """
+
+ with patch.object(ansible, "_resolver", self.resolver), patch.object(
+ ansible._resolver, "load_module", MagicMock(return_value=Module())
+ ):
+ ret = ansible.help("dummy")
+ assert sorted(
+ ret.get('Available sections on module "{}"'.format(Module().__name__))
+ ) == ["one", "two"]
+ assert ret.get("Description") == "describe the second part"
+
+ def test_module_resolver_modlist(self):
+ """
+ Test Ansible resolver modules list.
+ :return:
+ """
+ assert self.resolver.get_modules_list() == [
+ "four.five.six",
+ "one.two.three",
+ "three.six.one",
+ ]
+ for ptr in ["five", "fi", "ve"]:
+ assert self.resolver.get_modules_list(ptr) == ["four.five.six"]
+ for ptr in ["si", "ix", "six"]:
+ assert self.resolver.get_modules_list(ptr) == [
+ "four.five.six",
+ "three.six.one",
+ ]
+ assert self.resolver.get_modules_list("one") == [
+ "one.two.three",
+ "three.six.one",
+ ]
+ assert self.resolver.get_modules_list("one.two") == ["one.two.three"]
+ assert self.resolver.get_modules_list("four") == ["four.five.six"]
+
+ def test_resolver_module_loader_failure(self):
+ """
+ Test Ansible module loader.
+ :return:
+ """
+ mod = "four.five.six"
+ with pytest.raises(ImportError) as import_error:
+ self.resolver.load_module(mod)
+
+ mod = "i.even.do.not.exist.at.all"
+ with pytest.raises(LoaderError) as loader_error:
+ self.resolver.load_module(mod)
+
+ def test_resolver_module_loader(self):
+ """
+ Test Ansible module loader.
+ :return:
+ """
+ with patch("salt.modules.ansiblegate.importlib", MagicMock()), patch(
+ "salt.modules.ansiblegate.importlib.import_module", lambda x: x
+ ):
+ assert (
+ self.resolver.load_module("four.five.six")
+ == "ansible.modules.four.five.six"
+ )
+
+ def test_resolver_module_loader_import_failure(self):
+ """
+ Test Ansible module loader failure.
+ :return:
+ """
+ with patch("salt.modules.ansiblegate.importlib", MagicMock()), patch(
+ "salt.modules.ansiblegate.importlib.import_module", lambda x: x
+ ):
+ with pytest.raises(LoaderError) as loader_error:
+ self.resolver.load_module("something.strange")
+
+ def test_virtual_function(self):
+ """
+ Test Ansible module __virtual__ when ansible is not installed on the minion.
+ :return:
+ """
+ with patch("salt.modules.ansiblegate.ansible", None):
+ assert ansible.__virtual__() == "ansible"
+
+ def test_ansible_module_call(self):
+ '''
+ """
+ Test Ansible module call from ansible gate module
+
+ :return:
+ '''
+ """
+
+ class Module(object):
+ '''
+ class Module:
+ """
+ An ansible module mock.
+ '''
+ __name__ = 'one.two.three'
+ __file__ = 'foofile'
+ """
+
+ __name__ = "one.two.three"
+ __file__ = "foofile"
+
+ def main():
+ pass
+
+ ANSIBLE_MODULE_ARGS = '{"ANSIBLE_MODULE_ARGS": ["arg_1", {"kwarg1": "foobar"}]}'
+
+ proc = MagicMock(side_effect=[
+ MockTimedProc(
+ stdout=ANSIBLE_MODULE_ARGS.encode(),
+ stderr=None),
+ MockTimedProc(stdout='{"completed": true}'.encode(), stderr=None)
+ ])
+ proc = MagicMock(
+ side_effect=[
+ MockTimedProc(stdout=ANSIBLE_MODULE_ARGS.encode(), stderr=None),
+ MockTimedProc(stdout=b'{"completed": true}', stderr=None),
+ ]
+ )
+
+ with patch.object(ansible, '_resolver', self.resolver), \
+ patch.object(ansible._resolver, 'load_module', MagicMock(return_value=Module())):
+ with patch.object(ansible, "_resolver", self.resolver), patch.object(
+ ansible._resolver, "load_module", MagicMock(return_value=Module())
+ ):
+ _ansible_module_caller = ansible.AnsibleModuleCaller(ansible._resolver)
+ with patch('salt.utils.timed_subprocess.TimedProc', proc):
+ ret = _ansible_module_caller.call("one.two.three", "arg_1", kwarg1="foobar")
+ if six.PY3:
+ proc.assert_any_call(['echo', '{"ANSIBLE_MODULE_ARGS": {"kwarg1": "foobar", "_raw_params": "arg_1"}}'], stdout=-1, timeout=1200)
+ proc.assert_any_call(['python3', 'foofile'], stdin=ANSIBLE_MODULE_ARGS, stdout=-1, timeout=1200)
+ else:
+ proc.assert_any_call(['echo', '{"ANSIBLE_MODULE_ARGS": {"_raw_params": "arg_1", "kwarg1": "foobar"}}'], stdout=-1, timeout=1200)
+ proc.assert_any_call(['python', 'foofile'], stdin=ANSIBLE_MODULE_ARGS, stdout=-1, timeout=1200)
+ with patch("salt.utils.timed_subprocess.TimedProc", proc):
+ ret = _ansible_module_caller.call(
+ "one.two.three", "arg_1", kwarg1="foobar"
+ )
+ proc.assert_any_call(
+ [
+ "echo",
+ '{"ANSIBLE_MODULE_ARGS": {"kwarg1": "foobar", "_raw_params": "arg_1"}}',
+ ],
+ stdout=-1,
+ timeout=1200,
+ )
+ proc.assert_any_call(
+ ["python3", "foofile"],
+ stdin=ANSIBLE_MODULE_ARGS,
+ stdout=-1,
+ timeout=1200,
+ )
+ assert ret == {"completed": True, "timeout": 1200}
diff --git a/tests/unit/modules/test_cmdmod.py b/tests/unit/modules/test_cmdmod.py
index 8170a56b4e..f8fba59294 100644
index 15b97f8568..f3348bc379 100644
--- a/tests/unit/modules/test_cmdmod.py
+++ b/tests/unit/modules/test_cmdmod.py
@@ -26,6 +26,7 @@ from tests.support.helpers import TstSuiteLoggingHandler
from tests.support.mock import (
mock_open,
Mock,
+ MockTimedProc,
MagicMock,
patch
)
@@ -36,39 +37,7 @@ MOCK_SHELL_FILE = '# List of acceptable shells\n' \
'/bin/bash\n'
@@ -24,6 +24,7 @@ DEFAULT_SHELL = "foo/bar"
MOCK_SHELL_FILE = "# List of acceptable shells\n" "\n" "/bin/bash\n"
-class MockTimedProc(object):
- '''
- Class used as a stand-in for salt.utils.timed_subprocess.TimedProc
- '''
- class _Process(object):
- '''
- Used to provide a dummy "process" attribute
- '''
- def __init__(self, returncode=0, pid=12345):
- self.returncode = returncode
- self.pid = pid
-
- def __init__(self, stdout=None, stderr=None, returncode=0, pid=12345):
- if stdout is not None and not isinstance(stdout, bytes):
- raise TypeError('Must pass stdout to MockTimedProc as bytes')
- if stderr is not None and not isinstance(stderr, bytes):
- raise TypeError('Must pass stderr to MockTimedProc as bytes')
- self._stdout = stdout
- self._stderr = stderr
- self.process = self._Process(returncode=returncode, pid=pid)
-
- def run(self):
- pass
-
- @property
- def stdout(self):
- return self._stdout
-
- @property
- def stderr(self):
- return self._stderr
-
-
+@skipIf(NO_MOCK, NO_MOCK_REASON)
class CMDMODTestCase(TestCase, LoaderModuleMockMixin):
'''
"""
Unit tests for the salt.modules.cmdmod module
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From b651c2cd8b719a72e66b63afd9061739624763e1 Mon Sep 17 00:00:00 2001
From 81d0105b0c0464c375070ffbc863a020a67e7965 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 26 Aug 2020 10:24:58 +0100
@ -10,18 +10,18 @@ Subject: [PATCH] Do not raise StreamClosedError traceback but only log
1 file changed, 1 deletion(-)
diff --git a/salt/transport/ipc.py b/salt/transport/ipc.py
index 33ee3d4182..624eca5a9c 100644
index f411907da2..5ff0956dde 100644
--- a/salt/transport/ipc.py
+++ b/salt/transport/ipc.py
@@ -667,7 +667,6 @@ class IPCMessageSubscriber(IPCClient):
@@ -688,7 +688,6 @@ class IPCMessageSubscriber(IPCClient):
except StreamClosedError as exc:
log.trace('Subscriber disconnected from IPC %s', self.socket_path)
log.trace("Subscriber disconnected from IPC %s", self.socket_path)
self._read_stream_future = None
- exc_to_raise = exc
except Exception as exc: # pylint: disable=broad-except
log.error('Exception occurred in Subscriber while handling stream: %s', exc)
log.error("Exception occurred in Subscriber while handling stream: %s", exc)
self._read_stream_future = None
--
2.28.0
2.29.2

View File

@ -1,32 +0,0 @@
From 7e9adda8dfd53050756d0ac0cf64570b76ce7365 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 13 Mar 2019 16:14:07 +0000
Subject: [PATCH] Do not report patches as installed when not all the
related packages are installed (bsc#1128061)
Co-authored-by: Mihai Dinca <mdinca@suse.de>
---
salt/modules/yumpkg.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index b1257d0de0..3ddf989511 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -3220,7 +3220,11 @@ def _get_patches(installed_only=False):
for line in salt.utils.itertools.split(ret, os.linesep):
inst, advisory_id, sev, pkg = re.match(r'([i|\s]) ([^\s]+) +([^\s]+) +([^\s]+)',
line).groups()
+<<<<<<< HEAD
if advisory_id not in patches:
+=======
+ if not advisory_id in patches:
+>>>>>>> Do not report patches as installed when not all the related packages are installed (bsc#1128061)
patches[advisory_id] = {
'installed': True if inst == 'i' else False,
'summary': [pkg]
--
2.16.4

View File

@ -1,4 +1,4 @@
From c1f5e6332bf025394b81868bf1edc6ae44944a7c Mon Sep 17 00:00:00 2001
From 421988aea296ced1f8c63cfa4b517b25eedfb00c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Bosdonnat?= <cbosdonnat@suse.com>
Date: Tue, 29 Jan 2019 09:44:03 +0100
Subject: [PATCH] Don't call zypper with more than one --no-refresh
@ -11,32 +11,32 @@ passed twice. Make sure we won't hit this.
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 04a6a6872d..37428cf67c 100644
index 6fa6e3e0a1..dfaaf420a1 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -282,7 +282,7 @@ class _Zypper(object):
@@ -300,7 +300,7 @@ class _Zypper:
self.__called = True
if self.__xml:
self.__cmd.append('--xmlout')
self.__cmd.append("--xmlout")
- if not self.__refresh:
+ if not self.__refresh and '--no-refresh' not in args:
self.__cmd.append('--no-refresh')
self.__cmd.extend(args)
+ if not self.__refresh and "--no-refresh" not in args:
self.__cmd.append("--no-refresh")
if self.__root:
self.__cmd.extend(["--root", self.__root])
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index b3162f10cd..956902eab3 100644
index 7bff7065c6..b07f9a3af7 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -135,7 +135,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(zypper.__zypper__.call('foo'), stdout_xml_snippet)
@@ -136,7 +136,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(zypper.__zypper__.call("foo"), stdout_xml_snippet)
self.assertEqual(len(sniffer.calls), 1)
- zypper.__zypper__.call('bar')
+ zypper.__zypper__.call('--no-refresh', 'bar')
- zypper.__zypper__.call("bar")
+ zypper.__zypper__.call("--no-refresh", "bar")
self.assertEqual(len(sniffer.calls), 2)
self.assertEqual(sniffer.calls[0]['args'][0], ['zypper', '--non-interactive', '--no-refresh', 'foo'])
self.assertEqual(sniffer.calls[1]['args'][0], ['zypper', '--non-interactive', '--no-refresh', 'bar'])
self.assertEqual(
sniffer.calls[0]["args"][0],
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From e2c3b1cb72b796fe12f94af64baa2e64cbe5db0b Mon Sep 17 00:00:00 2001
From 3dc61b426cee5c40976ee25a0357fd07244a630b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 13 Oct 2020 12:02:00 +0100
@ -9,11 +9,11 @@ Subject: [PATCH] Drop wrong mock from chroot unit test
1 file changed, 1 deletion(-)
diff --git a/tests/unit/modules/test_chroot.py b/tests/unit/modules/test_chroot.py
index 62808ed680..045d56c5b0 100644
index 196e3ad27f..a0f3f8e6af 100644
--- a/tests/unit/modules/test_chroot.py
+++ b/tests/unit/modules/test_chroot.py
@@ -83,7 +83,6 @@ class ChrootTestCase(TestCase, LoaderModuleMockMixin):
self.assertTrue(chroot.create('/chroot'))
@@ -71,7 +71,6 @@ class ChrootTestCase(TestCase, LoaderModuleMockMixin):
self.assertTrue(chroot.create("/chroot"))
makedirs.assert_called()
- @patch("salt.modules.chroot.exist")
@ -21,6 +21,6 @@ index 62808ed680..045d56c5b0 100644
def test_in_chroot(self, fopen):
"""
--
2.28.0
2.29.2

View File

@ -0,0 +1,99 @@
From 79ae019ac7515614c6fbc620e66575f015bc447d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 5 Jan 2021 09:34:45 +0000
Subject: [PATCH] Drop wrong virt capabilities code after rebasing
patches
---
salt/modules/virt.py | 66 --------------------------------------------
1 file changed, 66 deletions(-)
diff --git a/salt/modules/virt.py b/salt/modules/virt.py
index e3960a5a90..786bfa1e58 100644
--- a/salt/modules/virt.py
+++ b/salt/modules/virt.py
@@ -143,7 +143,6 @@ import salt.utils.xmlutil as xmlutil
import salt.utils.yaml
from salt._compat import ElementTree, ipaddress, saxutils
from salt.exceptions import CommandExecutionError, SaltInvocationError
-from salt.ext import six
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
from salt.ext.six.moves.urllib.parse import urlparse, urlunparse
from salt.utils.virt import check_remote, download_remote
@@ -5416,71 +5415,6 @@ def _parse_domain_caps(caps):
return result
-def _parse_domain_caps(caps):
- """
- Parse the XML document of domain capabilities into a structure.
- """
- result = {
- "emulator": caps.find("path").text if caps.find("path") is not None else None,
- "domain": caps.find("domain").text if caps.find("domain") is not None else None,
- "machine": caps.find("machine").text
- if caps.find("machine") is not None
- else None,
- "arch": caps.find("arch").text if caps.find("arch") is not None else None,
- }
-
-
-def all_capabilities(**kwargs):
- """
- Return the host and domain capabilities in a single call.
-
- .. versionadded:: 3001
-
- :param connection: libvirt connection URI, overriding defaults
- :param username: username to connect with, overriding defaults
- :param password: password to connect with, overriding defaults
-
- CLI Example:
-
- .. code-block:: bash
-
- salt '*' virt.all_capabilities
-
- """
- conn = __get_conn(**kwargs)
- try:
- host_caps = ElementTree.fromstring(conn.getCapabilities())
- domains = [
- [
- (guest.get("arch", {}).get("name", None), key)
- for key in guest.get("arch", {}).get("domains", {}).keys()
- ]
- for guest in [
- _parse_caps_guest(guest) for guest in host_caps.findall("guest")
- ]
- ]
- flattened = [pair for item in (x for x in domains) for pair in item]
- result = {
- "host": {
- "host": _parse_caps_host(host_caps.find("host")),
- "guests": [
- _parse_caps_guest(guest) for guest in host_caps.findall("guest")
- ],
- },
- "domains": [
- _parse_domain_caps(
- ElementTree.fromstring(
- conn.getDomainCapabilities(None, arch, None, domain)
- )
- )
- for (arch, domain) in flattened
- ],
- }
- return result
- finally:
- conn.close()
-
-
def domain_capabilities(emulator=None, arch=None, machine=None, domain=None, **kwargs):
"""
Return the domain capabilities given an emulator, architecture, machine or virtualization type.
--
2.29.2

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
From cc3bd759bc0e4cc3414ccc5a2928c593fa2eee04 Mon Sep 17 00:00:00 2001
From fec7f65b4debede8cf0eef335182fce2206e200d Mon Sep 17 00:00:00 2001
From: Maximilian Meister <mmeister@suse.de>
Date: Thu, 3 May 2018 15:52:23 +0200
Subject: [PATCH] enable passing a unix_socket for mysql returners
@ -15,14 +15,19 @@ the refactor is done upstream
Signed-off-by: Maximilian Meister <mmeister@suse.de>
---
salt/returners/mysql.py | 11 ++++++++---
1 file changed, 8 insertions(+), 3 deletions(-)
salt/returners/mysql.py | 63 ++++++++++++++++++++---------------------
1 file changed, 30 insertions(+), 33 deletions(-)
diff --git a/salt/returners/mysql.py b/salt/returners/mysql.py
index 69599ec36a..ff9d380843 100644
index b7bb05164f..4aa8aeddfa 100644
--- a/salt/returners/mysql.py
+++ b/salt/returners/mysql.py
@@ -18,6 +18,7 @@ config. These are the defaults:
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
Return data to a mysql server
@@ -18,6 +17,7 @@ config. These are the defaults:
mysql.pass: 'salt'
mysql.db: 'salt'
mysql.port: 3306
@ -30,7 +35,7 @@ index 69599ec36a..ff9d380843 100644
SSL is optional. The defaults are set to None. If you do not want to use SSL,
either exclude these options or set them to None.
@@ -43,6 +44,7 @@ optional. The following ssl options are simply for illustration purposes:
@@ -43,6 +43,7 @@ optional. The following ssl options are simply for illustration purposes:
alternative.mysql.ssl_ca: '/etc/pki/mysql/certs/localhost.pem'
alternative.mysql.ssl_cert: '/etc/pki/mysql/certs/localhost.crt'
alternative.mysql.ssl_key: '/etc/pki/mysql/certs/localhost.key'
@ -38,37 +43,195 @@ index 69599ec36a..ff9d380843 100644
Should you wish the returner data to be cleaned out every so often, set
`keep_jobs` to the number of hours for the jobs to live in the tables.
@@ -198,7 +200,8 @@ def _get_options(ret=None):
'port': 3306,
'ssl_ca': None,
'ssl_cert': None,
- 'ssl_key': None}
+ 'ssl_key': None,
+ 'unix_socket': '/tmp/mysql.sock'}
@@ -138,22 +139,15 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return mysql --return_kwargs '{"db": "another-salt"}'
attrs = {'host': 'host',
'user': 'user',
@@ -207,7 +210,8 @@ def _get_options(ret=None):
'port': 'port',
'ssl_ca': 'ssl_ca',
'ssl_cert': 'ssl_cert',
- 'ssl_key': 'ssl_key'}
+ 'ssl_key': 'ssl_key',
+ 'unix_socket': 'unix_socket'}
"""
-from __future__ import absolute_import, print_function, unicode_literals
_options = salt.returners.get_returner_options(__virtualname__,
ret,
@@ -261,7 +265,8 @@ def _get_serv(ret=None, commit=False):
passwd=_options.get('pass'),
db=_options.get('db'),
port=_options.get('port'),
- ssl=ssl_options)
+ ssl=ssl_options,
+ unix_socket=_options.get('unix_socket'))
import logging
import sys
-
-# Import python libs
from contextlib import contextmanager
import salt.exceptions
-
-# Import salt libs
import salt.returners
import salt.utils.jid
import salt.utils.json
-
-# Import 3rd-party libs
from salt.ext import six
# Let's not allow PyLint complain about string substitution
@@ -205,6 +199,7 @@ def _get_options(ret=None):
"ssl_ca": None,
"ssl_cert": None,
"ssl_key": None,
+ "unix_socket": "/tmp/mysql.sock",
}
attrs = {
@@ -216,6 +211,7 @@ def _get_options(ret=None):
"ssl_ca": "ssl_ca",
"ssl_cert": "ssl_cert",
"ssl_key": "ssl_key",
+ "unix_socket": "unix_socket",
}
_options = salt.returners.get_returner_options(
@@ -227,8 +223,8 @@ def _get_options(ret=None):
defaults=defaults,
)
# post processing
- for k, v in six.iteritems(_options):
- if isinstance(v, six.string_types) and v.lower() == "none":
+ for k, v in _options.items():
+ if isinstance(v, str) and v.lower() == "none":
# Ensure 'None' is rendered as None
_options[k] = None
if k == "port":
@@ -274,6 +270,7 @@ def _get_serv(ret=None, commit=False):
db=_options.get("db"),
port=_options.get("port"),
ssl=ssl_options,
+ unix_socket=_options.get("unix_socket"),
)
try:
__context__['mysql_returner_conn'] = conn
@@ -291,9 +288,9 @@ def _get_serv(ret=None, commit=False):
yield cursor
except MySQLdb.DatabaseError as err:
error = err.args
- sys.stderr.write(six.text_type(error))
+ sys.stderr.write(str(error))
cursor.execute("ROLLBACK")
- six.reraise(*sys.exc_info())
+ raise
else:
if commit:
cursor.execute("COMMIT")
@@ -515,8 +512,8 @@ def _purge_jobs(timestamp):
log.error(
"mysql returner archiver was unable to delete contents of table 'jids'"
)
- log.error(six.text_type(e))
- raise salt.exceptions.SaltRunnerError(six.text_type(e))
+ log.error(str(e))
+ raise salt.exceptions.SaltRunnerError(str(e))
try:
sql = "delete from `salt_returns` where alter_time < %s"
@@ -526,8 +523,8 @@ def _purge_jobs(timestamp):
log.error(
"mysql returner archiver was unable to delete contents of table 'salt_returns'"
)
- log.error(six.text_type(e))
- raise salt.exceptions.SaltRunnerError(six.text_type(e))
+ log.error(str(e))
+ raise salt.exceptions.SaltRunnerError(str(e))
try:
sql = "delete from `salt_events` where alter_time < %s"
@@ -537,8 +534,8 @@ def _purge_jobs(timestamp):
log.error(
"mysql returner archiver was unable to delete contents of table 'salt_events'"
)
- log.error(six.text_type(e))
- raise salt.exceptions.SaltRunnerError(six.text_type(e))
+ log.error(str(e))
+ raise salt.exceptions.SaltRunnerError(str(e))
return True
@@ -556,7 +553,7 @@ def _archive_jobs(timestamp):
for table_name in source_tables:
try:
tmp_table_name = table_name + "_archive"
- sql = "create table if not exists {0} like {1}".format(
+ sql = "create table if not exists {} like {}".format(
tmp_table_name, table_name
)
cur.execute(sql)
@@ -566,11 +563,11 @@ def _archive_jobs(timestamp):
log.error(
"mysql returner archiver was unable to create the archive tables."
)
- log.error(six.text_type(e))
- raise salt.exceptions.SaltRunnerError(six.text_type(e))
+ log.error(str(e))
+ raise salt.exceptions.SaltRunnerError(str(e))
try:
- sql = "insert into `{0}` select * from `{1}` where jid in (select distinct jid from salt_returns where alter_time < %s)".format(
+ sql = "insert into `{}` select * from `{}` where jid in (select distinct jid from salt_returns where alter_time < %s)".format(
target_tables["jids"], "jids"
)
cur.execute(sql, (timestamp,))
@@ -579,14 +576,14 @@ def _archive_jobs(timestamp):
log.error(
"mysql returner archiver was unable to copy contents of table 'jids'"
)
- log.error(six.text_type(e))
- raise salt.exceptions.SaltRunnerError(six.text_type(e))
+ log.error(str(e))
+ raise salt.exceptions.SaltRunnerError(str(e))
except Exception as e: # pylint: disable=broad-except
log.error(e)
raise
try:
- sql = "insert into `{0}` select * from `{1}` where alter_time < %s".format(
+ sql = "insert into `{}` select * from `{}` where alter_time < %s".format(
target_tables["salt_returns"], "salt_returns"
)
cur.execute(sql, (timestamp,))
@@ -595,11 +592,11 @@ def _archive_jobs(timestamp):
log.error(
"mysql returner archiver was unable to copy contents of table 'salt_returns'"
)
- log.error(six.text_type(e))
- raise salt.exceptions.SaltRunnerError(six.text_type(e))
+ log.error(str(e))
+ raise salt.exceptions.SaltRunnerError(str(e))
try:
- sql = "insert into `{0}` select * from `{1}` where alter_time < %s".format(
+ sql = "insert into `{}` select * from `{}` where alter_time < %s".format(
target_tables["salt_events"], "salt_events"
)
cur.execute(sql, (timestamp,))
@@ -608,8 +605,8 @@ def _archive_jobs(timestamp):
log.error(
"mysql returner archiver was unable to copy contents of table 'salt_events'"
)
- log.error(six.text_type(e))
- raise salt.exceptions.SaltRunnerError(six.text_type(e))
+ log.error(str(e))
+ raise salt.exceptions.SaltRunnerError(str(e))
return _purge_jobs(timestamp)
@@ -623,7 +620,7 @@ def clean_old_jobs():
if __opts__.get("keep_jobs", False) and int(__opts__.get("keep_jobs", 0)) > 0:
try:
with _get_serv() as cur:
- sql = "select date_sub(now(), interval {0} hour) as stamp;".format(
+ sql = "select date_sub(now(), interval {} hour) as stamp;".format(
__opts__["keep_jobs"]
)
cur.execute(sql)
@@ -638,5 +635,5 @@ def clean_old_jobs():
log.error(
"Mysql returner was unable to get timestamp for purge/archive of jobs"
)
- log.error(six.text_type(e))
- raise salt.exceptions.SaltRunnerError(six.text_type(e))
+ log.error(str(e))
+ raise salt.exceptions.SaltRunnerError(str(e))
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 173444cecc1e7b4867570f1f8764db1b7f82061e Mon Sep 17 00:00:00 2001
From 1cea7d065d8da7c713af8136162c21187d5186f5 Mon Sep 17 00:00:00 2001
From: Cedric Bosdonnat <cbosdonnat@suse.com>
Date: Wed, 14 Oct 2020 12:39:16 +0200
Subject: [PATCH] Ensure virt.update stop_on_reboot is updated with its
@ -14,22 +14,22 @@ this value.
2 files changed, 3 insertions(+)
diff --git a/salt/modules/virt.py b/salt/modules/virt.py
index 87ab7ca12d..9bc7bc6093 100644
index 8e2180608a..e3960a5a90 100644
--- a/salt/modules/virt.py
+++ b/salt/modules/virt.py
@@ -2742,6 +2742,7 @@ def update(
@@ -2738,6 +2738,7 @@ def update(
]
data = {k: v for k, v in six.iteritems(locals()) if bool(v)}
data = {k: v for k, v in locals().items() if bool(v)}
+ data["stop_on_reboot"] = stop_on_reboot
if boot_dev:
data["boot_dev"] = {i + 1: dev for i, dev in enumerate(boot_dev.split())}
need_update = salt.utils.xmlutil.change_xml(
need_update = (
diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py
index ca5e80d2d2..fbc03cf7a6 100644
index fba821ea53..83152eda6e 100644
--- a/tests/unit/modules/test_virt.py
+++ b/tests/unit/modules/test_virt.py
@@ -1778,6 +1778,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -1777,6 +1777,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
<memory unit='KiB'>1048576</memory>
<currentMemory unit='KiB'>1048576</currentMemory>
<vcpu placement='auto'>1</vcpu>
@ -37,7 +37,7 @@ index ca5e80d2d2..fbc03cf7a6 100644
<os>
<type arch='x86_64' machine='pc-i440fx-2.6'>hvm</type>
<boot dev="hd"/>
@@ -2350,6 +2351,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
@@ -2349,6 +2350,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
<memory unit='KiB'>1048576</memory>
<currentMemory unit='KiB'>1048576</currentMemory>
<vcpu placement='auto'>1</vcpu>
@ -46,6 +46,6 @@ index ca5e80d2d2..fbc03cf7a6 100644
<type arch='x86_64' machine='pc-i440fx-2.6'>hvm</type>
</os>
--
2.28.0
2.29.2

View File

@ -1,4 +1,4 @@
From f0098b4b9e5abaaca7bbc6c17f5a60bb2129dda5 Mon Sep 17 00:00:00 2001
From 188a97fc20c3e24950b82dc6fcd0da878509cf7a Mon Sep 17 00:00:00 2001
From: Maximilian Meister <mmeister@suse.de>
Date: Thu, 5 Apr 2018 13:23:23 +0200
Subject: [PATCH] fall back to PyMySQL
@ -11,10 +11,10 @@ Signed-off-by: Maximilian Meister <mmeister@suse.de>
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/salt/modules/mysql.py b/salt/modules/mysql.py
index 87e2361e28..e785e5219c 100644
index fdfe35158a..385e4d92a3 100644
--- a/salt/modules/mysql.py
+++ b/salt/modules/mysql.py
@@ -58,7 +58,7 @@ try:
@@ -55,7 +55,7 @@ try:
import MySQLdb.cursors
import MySQLdb.converters
from MySQLdb.constants import FIELD_TYPE, FLAG
@ -23,7 +23,7 @@ index 87e2361e28..e785e5219c 100644
except ImportError:
try:
# MySQLdb import failed, try to import PyMySQL
@@ -68,7 +68,7 @@ except ImportError:
@@ -66,7 +66,7 @@ except ImportError:
import MySQLdb.cursors
import MySQLdb.converters
from MySQLdb.constants import FIELD_TYPE, FLAG
@ -33,6 +33,6 @@ index 87e2361e28..e785e5219c 100644
MySQLdb = None
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 7ad2d6067400f55dc7b70745216fab20620f35fd Mon Sep 17 00:00:00 2001
From 1e00e2b72321b5312efb7b8b426a037c8db72b79 Mon Sep 17 00:00:00 2001
From: Alberto Planas <aplanas@suse.com>
Date: Wed, 29 Jul 2020 16:11:47 +0200
Subject: [PATCH] Fix __mount_device wrapper (#254)
@ -17,9 +17,9 @@ Fix #58012
(cherry picked from commit 2089645e2478751dc795127cfd14d0385c2e0899)
---
changelog/58012.fixed | 1 +
salt/states/btrfs.py | 6 +++---
salt/states/btrfs.py | 4 ++--
tests/unit/states/test_btrfs.py | 27 +++++++++++++++++++++++++++
3 files changed, 31 insertions(+), 3 deletions(-)
3 files changed, 30 insertions(+), 2 deletions(-)
create mode 100644 changelog/58012.fixed
diff --git a/changelog/58012.fixed b/changelog/58012.fixed
@ -31,27 +31,25 @@ index 0000000000..13a1ef747d
+Fix btrfs state decorator, that produces exceptions when creating subvolumes.
\ No newline at end of file
diff --git a/salt/states/btrfs.py b/salt/states/btrfs.py
index af78c8ae00..d0d6095c46 100644
index 1374bbffb4..62a3553758 100644
--- a/salt/states/btrfs.py
+++ b/salt/states/btrfs.py
@@ -103,9 +103,9 @@ def __mount_device(action):
'''
@@ -103,8 +103,8 @@ def __mount_device(action):
@functools.wraps(action)
def wrapper(*args, **kwargs):
- name = kwargs['name']
- device = kwargs['device']
- use_default = kwargs.get('use_default', False)
- name = kwargs["name"]
- device = kwargs["device"]
+ name = kwargs.get("name", args[0] if args else None)
+ device = kwargs.get("device", args[1] if len(args) > 1 else None)
+ use_default = kwargs.get("use_default", False)
use_default = kwargs.get("use_default", False)
ret = {
'name': name,
diff --git a/tests/unit/states/test_btrfs.py b/tests/unit/states/test_btrfs.py
index c68f6279dc..c722630aef 100644
index b8f70bccfe..dceb971aa1 100644
--- a/tests/unit/states/test_btrfs.py
+++ b/tests/unit/states/test_btrfs.py
@@ -245,6 +245,33 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
@@ -231,6 +231,33 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
mount.assert_called_once()
umount.assert_called_once()
@ -82,10 +80,10 @@ index c68f6279dc..c722630aef 100644
+ mount.assert_called_once()
+ umount.assert_called_once()
+
@patch('salt.states.btrfs._umount')
@patch('salt.states.btrfs._mount')
@patch("salt.states.btrfs._umount")
@patch("salt.states.btrfs._mount")
def test_subvolume_created_exists_test(self, mount, umount):
--
2.27.0
2.29.2

View File

@ -1,4 +1,4 @@
From c1e66b9953c753dc9eff3652aef316e19c22deb4 Mon Sep 17 00:00:00 2001
From daf29460408a5e0eb042b3c234c7e21a6b994cf1 Mon Sep 17 00:00:00 2001
From: Alberto Planas <aplanas@suse.com>
Date: Tue, 12 May 2020 14:16:23 +0200
Subject: [PATCH] Fix a test and some variable names (#229)
@ -7,60 +7,23 @@ Subject: [PATCH] Fix a test and some variable names (#229)
* Fix test_core tests for fqdns errors
---
salt/modules/network.py | 2 +-
tests/unit/grains/test_core.py | 24 +++++++++++++-----------
2 files changed, 14 insertions(+), 12 deletions(-)
tests/unit/grains/test_core.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/modules/network.py b/salt/modules/network.py
index 880f4f8d5f..9e11eb816e 100644
--- a/salt/modules/network.py
+++ b/salt/modules/network.py
@@ -1946,4 +1946,4 @@ def fqdns():
elapsed = time.time() - start
log.debug('Elapsed time getting FQDNs: {} seconds'.format(elapsed))
- return {"fqdns": sorted(list(fqdns))}
\ No newline at end of file
+ return {"fqdns": sorted(list(fqdns))}
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index 94e4199814..36aa49f232 100644
index 196dbcf83d..918a9155cb 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -1122,20 +1122,22 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
for errno in (0, core.HOST_NOT_FOUND, core.NO_DATA):
mock_log = MagicMock()
+ with patch.dict(core.__salt__, {'network.fqdns': salt.modules.network.fqdns}):
+ with patch.object(socket, 'gethostbyaddr',
+ side_effect=_gen_gethostbyaddr(errno)):
+ with patch('salt.modules.network.log', mock_log):
+ self.assertEqual(core.fqdns(), {'fqdns': []})
+ mock_log.debug.assert_called()
+ mock_log.error.assert_not_called()
+
+ mock_log = MagicMock()
+ with patch.dict(core.__salt__, {'network.fqdns': salt.modules.network.fqdns}):
with patch.object(socket, 'gethostbyaddr',
- side_effect=_gen_gethostbyaddr(errno)):
- with patch('salt.grains.core.log', mock_log):
+ side_effect=_gen_gethostbyaddr(-1)):
+ with patch('salt.modules.network.log', mock_log):
self.assertEqual(core.fqdns(), {'fqdns': []})
@@ -1416,7 +1416,7 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
with patch("salt.modules.network.log", mock_log):
self.assertEqual(core.fqdns(), {"fqdns": []})
mock_log.debug.assert_called_once()
- mock_log.error.assert_not_called()
-
- mock_log = MagicMock()
- with patch.object(socket, 'gethostbyaddr',
- side_effect=_gen_gethostbyaddr(-1)):
- with patch('salt.grains.core.log', mock_log):
- self.assertEqual(core.fqdns(), {'fqdns': []})
- mock_log.debug.assert_not_called()
- mock_log.error.assert_called_once()
- mock_log.error.assert_called()
+ mock_log.error.assert_called_once()
@patch.object(salt.utils.platform, 'is_windows', MagicMock(return_value=False))
@patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '5.6.7.8']))
@patch.object(salt.utils.platform, "is_windows", MagicMock(return_value=False))
@patch(
--
2.26.2
2.29.2

View File

@ -1,4 +1,4 @@
From 67830ea17ae1e87a6bffca2a9542788c200d7dd9 Mon Sep 17 00:00:00 2001
From 3d5f3cff6b43d7aba35063e970d016401bb82921 Mon Sep 17 00:00:00 2001
From: Alberto Planas <aplanas@gmail.com>
Date: Fri, 25 Oct 2019 15:43:16 +0200
Subject: [PATCH] Fix a wrong rebase in test_core.py (#180)
@ -17,89 +17,128 @@ This patch ignore this kind of issue during the grains creation.
(cherry picked from commit bd0213bae00b737b24795bec3c030ebfe476e0d8)
---
salt/grains/core.py | 4 ++--
tests/unit/grains/test_core.py | 45 ------------------------------------------
2 files changed, 2 insertions(+), 47 deletions(-)
salt/grains/core.py | 8 +++-
tests/unit/grains/test_core.py | 80 ----------------------------------
2 files changed, 6 insertions(+), 82 deletions(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 68c43482d3..20950988d9 100644
index a2983e388b..5dff6ecfd4 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1000,7 +1000,7 @@ def _virtual(osdata):
@@ -1066,7 +1066,9 @@ def _virtual(osdata):
except UnicodeDecodeError:
# Some firmwares provide non-valid 'product_name'
# files, ignore them
- pass
+ log.debug('The content in /sys/devices/virtual/dmi/id/product_name is not valid')
except IOError:
+ log.debug(
+ "The content in /sys/devices/virtual/dmi/id/product_name is not valid"
+ )
except OSError:
pass
elif osdata['kernel'] == 'FreeBSD':
@@ -2568,7 +2568,7 @@ def _hw_data(osdata):
elif osdata["kernel"] == "FreeBSD":
@@ -2716,7 +2718,9 @@ def _hw_data(osdata):
except UnicodeDecodeError:
# Some firmwares provide non-valid 'product_name'
# files, ignore them
- pass
+ log.debug('The content in /sys/devices/virtual/dmi/id/product_name is not valid')
except (IOError, OSError) as err:
+ log.debug(
+ "The content in /sys/devices/virtual/dmi/id/product_name is not valid"
+ )
except OSError as err:
# PermissionError is new to Python 3, but corresponds to the EACESS and
# EPERM error numbers. Use those instead here for PY2 compatibility.
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index 33d6a9507f..7fa2436e58 100644
index 0dc3423646..85d434dd9d 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -1560,51 +1560,6 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
assert all([x is not None for x in info])
assert all([isinstance(x, int) for x in info])
@@ -2047,86 +2047,6 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
result = core.path()
assert result == {"path": path, "systempath": comps}, result
- @skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
- @skipIf(not salt.utils.platform.is_linux(), "System is not Linux")
- @patch("os.path.exists")
- @patch("salt.utils.platform.is_proxy")
- def test_kernelparams_return(self):
- expectations = [
- ('BOOT_IMAGE=/vmlinuz-3.10.0-693.2.2.el7.x86_64',
- {'kernelparams': [('BOOT_IMAGE', '/vmlinuz-3.10.0-693.2.2.el7.x86_64')]}),
- ('root=/dev/mapper/centos_daemon-root',
- {'kernelparams': [('root', '/dev/mapper/centos_daemon-root')]}),
- ('rhgb quiet ro',
- {'kernelparams': [('rhgb', None), ('quiet', None), ('ro', None)]}),
- ('param="value1"',
- {'kernelparams': [('param', 'value1')]}),
- ('param="value1 value2 value3"',
- {'kernelparams': [('param', 'value1 value2 value3')]}),
- ('param="value1 value2 value3" LANG="pl" ro',
- {'kernelparams': [('param', 'value1 value2 value3'), ('LANG', 'pl'), ('ro', None)]}),
- ('ipv6.disable=1',
- {'kernelparams': [('ipv6.disable', '1')]}),
- ('param="value1:value2:value3"',
- {'kernelparams': [('param', 'value1:value2:value3')]}),
- ('param="value1,value2,value3"',
- {'kernelparams': [('param', 'value1,value2,value3')]}),
- ('param="value1" param="value2" param="value3"',
- {'kernelparams': [('param', 'value1'), ('param', 'value2'), ('param', 'value3')]}),
- (
- "BOOT_IMAGE=/vmlinuz-3.10.0-693.2.2.el7.x86_64",
- {
- "kernelparams": [
- ("BOOT_IMAGE", "/vmlinuz-3.10.0-693.2.2.el7.x86_64")
- ]
- },
- ),
- (
- "root=/dev/mapper/centos_daemon-root",
- {"kernelparams": [("root", "/dev/mapper/centos_daemon-root")]},
- ),
- (
- "rhgb quiet ro",
- {"kernelparams": [("rhgb", None), ("quiet", None), ("ro", None)]},
- ),
- ('param="value1"', {"kernelparams": [("param", "value1")]}),
- (
- 'param="value1 value2 value3"',
- {"kernelparams": [("param", "value1 value2 value3")]},
- ),
- (
- 'param="value1 value2 value3" LANG="pl" ro',
- {
- "kernelparams": [
- ("param", "value1 value2 value3"),
- ("LANG", "pl"),
- ("ro", None),
- ]
- },
- ),
- ("ipv6.disable=1", {"kernelparams": [("ipv6.disable", "1")]}),
- (
- 'param="value1:value2:value3"',
- {"kernelparams": [("param", "value1:value2:value3")]},
- ),
- (
- 'param="value1,value2,value3"',
- {"kernelparams": [("param", "value1,value2,value3")]},
- ),
- (
- 'param="value1" param="value2" param="value3"',
- {
- "kernelparams": [
- ("param", "value1"),
- ("param", "value2"),
- ("param", "value3"),
- ]
- },
- ),
- ]
-
- for cmdline, expectation in expectations:
- with patch('salt.utils.files.fopen', mock_open(read_data=cmdline)):
- with patch("salt.utils.files.fopen", mock_open(read_data=cmdline)):
- self.assertEqual(core.kernelparams(), expectation)
-
- @skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
- @patch('os.path.exists')
- @patch('salt.utils.platform.is_proxy')
- @skipIf(not salt.utils.platform.is_linux(), "System is not Linux")
- @patch("os.path.exists")
- @patch("salt.utils.platform.is_proxy")
- def test__hw_data_linux_empty(self, is_proxy, exists):
- is_proxy.return_value = False
- exists.return_value = True
- with patch('salt.utils.files.fopen', mock_open(read_data='')):
- self.assertEqual(core._hw_data({'kernel': 'Linux'}), {
- 'biosreleasedate': '',
- 'biosversion': '',
- 'manufacturer': '',
- 'productname': '',
- 'serialnumber': '',
- 'uuid': ''
- })
- with patch("salt.utils.files.fopen", mock_open(read_data="")):
- self.assertEqual(
- core._hw_data({"kernel": "Linux"}),
- {
- "biosreleasedate": "",
- "biosversion": "",
- "manufacturer": "",
- "productname": "",
- "serialnumber": "",
- "uuid": "",
- },
- )
-
@skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
@skipIf(six.PY2, 'UnicodeDecodeError is throw in Python 3')
@patch('os.path.exists')
@skipIf(not salt.utils.platform.is_linux(), "System is not Linux")
@skipIf(six.PY2, "UnicodeDecodeError is throw in Python 3")
@patch("os.path.exists")
--
2.16.4
2.29.2

View File

@ -1,40 +1,41 @@
From c2989e749f04aa8477130df649e550f5349a9a1f Mon Sep 17 00:00:00 2001
From 5dadda6822323f409c99112244c2c809e58126e1 Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Wed, 31 Jul 2019 15:29:03 +0200
Subject: [PATCH] Fix aptpkg systemd call (bsc#1143301)
---
salt/modules/aptpkg.py | 2 +-
tests/unit/modules/test_aptpkg.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
tests/unit/modules/test_aptpkg.py | 3 +--
2 files changed, 2 insertions(+), 3 deletions(-)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 13484c96bc..a5b039fc79 100644
index bf90d0614f..c47ee852f4 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -168,7 +168,7 @@ def _call_apt(args, scope=True, **kwargs):
'''
cmd = []
if scope and salt.utils.systemd.has_scope(__context__) and __salt__['config.get']('systemd.scope', True):
- cmd.extend(['systemd-run', '--scope'])
+ cmd.extend(['systemd-run', '--scope', '--description "{0}"'.format(__name__)])
@@ -160,7 +160,7 @@ def _call_apt(args, scope=True, **kwargs):
and salt.utils.systemd.has_scope(__context__)
and __salt__["config.get"]("systemd.scope", True)
):
- cmd.extend(["systemd-run", "--scope", "--description", '"{}"'.format(__name__)])
+ cmd.extend(["systemd-run", "--scope", '--description "{}"'.format(__name__)])
cmd.extend(args)
params = {'output_loglevel': 'trace',
params = {
diff --git a/tests/unit/modules/test_aptpkg.py b/tests/unit/modules/test_aptpkg.py
index 10e960f090..88eed062c4 100644
index 77d8b84896..c3769a7df1 100644
--- a/tests/unit/modules/test_aptpkg.py
+++ b/tests/unit/modules/test_aptpkg.py
@@ -645,7 +645,7 @@ class AptUtilsTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(aptpkg.__salt__, {'cmd.run_all': MagicMock(), 'config.get': MagicMock(return_value=True)}):
aptpkg._call_apt(['apt-get', 'purge', 'vim']) # pylint: disable=W0106
aptpkg.__salt__['cmd.run_all'].assert_called_once_with(
- ['systemd-run', '--scope', 'apt-get', 'purge', 'vim'], env={},
+ ['systemd-run', '--scope', '--description "salt.modules.aptpkg"', 'apt-get', 'purge', 'vim'], env={},
output_loglevel='trace', python_shell=False)
def test_call_apt_with_kwargs(self):
@@ -896,8 +896,7 @@ class AptUtilsTestCase(TestCase, LoaderModuleMockMixin):
[
"systemd-run",
"--scope",
- "--description",
- '"salt.modules.aptpkg"',
+ '--description "salt.modules.aptpkg"',
"apt-get",
"purge",
"vim",
--
2.16.4
2.29.2

View File

@ -0,0 +1,42 @@
From 763d63b72b9a20f22555b665033899e10f091b60 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 11 Jan 2021 15:45:28 +0000
Subject: [PATCH] Fix aptpkg.normalize_name when package arch is 'all'
Add test case of DEB package where arch is 'all'
---
salt/modules/aptpkg.py | 2 +-
tests/unit/modules/test_aptpkg.py | 2 ++
2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index e001d2f11c..03e99af733 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -208,7 +208,7 @@ def normalize_name(name):
pkgname = name
pkgarch = __grains__["osarch"]
- return pkgname if pkgarch in (__grains__["osarch"], "any") else name
+ return pkgname if pkgarch in (__grains__["osarch"], "all", "any") else name
def parse_arch(name):
diff --git a/tests/unit/modules/test_aptpkg.py b/tests/unit/modules/test_aptpkg.py
index 51dfce29eb..eb3f9e2da7 100644
--- a/tests/unit/modules/test_aptpkg.py
+++ b/tests/unit/modules/test_aptpkg.py
@@ -808,6 +808,8 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
assert result == "foo", result
result = aptpkg.normalize_name("foo:any")
assert result == "foo", result
+ result = aptpkg.normalize_name("foo:all")
+ assert result == "foo", result
result = aptpkg.normalize_name("foo:i386")
assert result == "foo:i386", result
--
2.29.2

View File

@ -1,55 +1,57 @@
From 42d7e1de2c69d82447e73eab483e5d3c299d55f7 Mon Sep 17 00:00:00 2001
From 85b8666b138cab170327f0217c799277371b2e80 Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Tue, 7 May 2019 12:24:35 +0200
Subject: [PATCH] Fix async-batch multiple done events
---
salt/cli/batch_async.py | 17 ++++++++++++-----
salt/cli/batch_async.py | 19 ++++++++++++-------
tests/unit/cli/test_batch_async.py | 20 +++++++++++++-------
2 files changed, 25 insertions(+), 12 deletions(-)
2 files changed, 25 insertions(+), 14 deletions(-)
diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py
index 9c20b2fc6e..8c8f481e34 100644
index b0ab9d9f47..7225491228 100644
--- a/salt/cli/batch_async.py
+++ b/salt/cli/batch_async.py
@@ -84,6 +84,7 @@ class BatchAsync(object):
listen=True,
@@ -86,6 +86,7 @@ class BatchAsync:
io_loop=ioloop,
keep_loop=True)
keep_loop=True,
)
+ self.scheduled = False
def __set_event_handler(self):
ping_return_pattern = 'salt/job/{0}/ret/*'.format(self.ping_jid)
@@ -116,8 +117,7 @@ class BatchAsync(object):
ping_return_pattern = "salt/job/{}/ret/*".format(self.ping_jid)
@@ -118,10 +119,7 @@ class BatchAsync:
if minion in self.active:
self.active.remove(minion)
self.done_minions.add(minion)
- # call later so that we maybe gather more returns
- self.event.io_loop.call_later(self.batch_delay, self.schedule_next)
- self.event.io_loop.call_later(
- self.batch_delay, self.schedule_next
- )
+ self.schedule_next()
def _get_next(self):
to_run = self.minions.difference(
@@ -137,7 +137,7 @@ class BatchAsync(object):
self.active = self.active.difference(self.timedout_minions)
running = batch_minions.difference(self.done_minions).difference(self.timedout_minions)
to_run = (
@@ -146,7 +144,7 @@ class BatchAsync:
self.timedout_minions
)
if timedout_minions:
- self.event.io_loop.call_later(self.batch_delay, self.schedule_next)
+ self.schedule_next()
if running:
self.event.io_loop.add_callback(self.find_job, running)
@@ -189,7 +189,7 @@ class BatchAsync(object):
"metadata": self.metadata
@@ -197,7 +195,7 @@ class BatchAsync:
"metadata": self.metadata,
}
self.event.fire_event(data, "salt/batch/{0}/start".format(self.batch_jid))
self.event.fire_event(data, "salt/batch/{}/start".format(self.batch_jid))
- yield self.schedule_next()
+ yield self.run_next()
def end_batch(self):
left = self.minions.symmetric_difference(self.done_minions.union(self.timedout_minions))
@@ -204,8 +204,14 @@ class BatchAsync(object):
self.event.fire_event(data, "salt/batch/{0}/done".format(self.batch_jid))
left = self.minions.symmetric_difference(
@@ -214,8 +212,14 @@ class BatchAsync:
self.event.fire_event(data, "salt/batch/{}/done".format(self.batch_jid))
self.event.remove_event_handler(self.__event_handler)
- @tornado.gen.coroutine
@ -64,16 +66,16 @@ index 9c20b2fc6e..8c8f481e34 100644
next_batch = self._get_next()
if next_batch:
self.active = self.active.union(next_batch)
@@ -225,3 +231,4 @@ class BatchAsync(object):
@@ -238,3 +242,4 @@ class BatchAsync:
self.active = self.active.difference(next_batch)
else:
self.end_batch()
+ self.scheduled = False
diff --git a/tests/unit/cli/test_batch_async.py b/tests/unit/cli/test_batch_async.py
index d519157d92..441f9c58b9 100644
index d6a4bfcf60..66332a548a 100644
--- a/tests/unit/cli/test_batch_async.py
+++ b/tests/unit/cli/test_batch_async.py
@@ -111,14 +111,14 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -105,14 +105,14 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@tornado.testing.gen_test
def test_start_batch_calls_next(self):
@ -90,27 +92,27 @@ index d519157d92..441f9c58b9 100644
+ self.assertEqual(len(self.batch.run_next.mock_calls), 1)
def test_batch_fire_done_event(self):
self.batch.targeted_minions = {'foo', 'baz', 'bar'}
@@ -154,7 +154,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.batch.targeted_minions = {"foo", "baz", "bar"}
@@ -147,7 +147,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
future = tornado.gen.Future()
future.set_result({'minions': ['foo', 'bar']})
future.set_result({"minions": ["foo", "bar"]})
self.batch.local.run_job_async.return_value = future
- ret = self.batch.schedule_next().result()
+ ret = self.batch.run_next().result()
self.assertEqual(
self.batch.local.run_job_async.call_args[0],
({'foo', 'bar'}, 'my.fun', [], 'list')
@@ -253,7 +253,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(self.batch.done_minions, {'foo'})
({"foo", "bar"}, "my.fun", [], "list"),
@@ -250,7 +250,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(self.batch.done_minions, {"foo"})
self.assertEqual(
self.batch.event.io_loop.call_later.call_args[0],
- (self.batch.batch_delay, self.batch.schedule_next))
+ (self.batch.batch_delay, self.batch.run_next))
- (self.batch.batch_delay, self.batch.schedule_next),
+ (self.batch.batch_delay, self.batch.run_next),
)
def test_batch__event_handler_find_job_return(self):
self.batch.event = MagicMock(
@@ -263,10 +263,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(self.batch.find_job_returned, {'foo'})
@@ -262,10 +262,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(self.batch.find_job_returned, {"foo"})
@tornado.testing.gen_test
- def test_batch_schedule_next_end_batch_when_no_next(self):
@ -122,9 +124,9 @@ index d519157d92..441f9c58b9 100644
self.assertEqual(len(self.batch.end_batch.mock_calls), 1)
@tornado.testing.gen_test
@@ -342,3 +342,9 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -345,3 +345,9 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.batch.event.io_loop.add_callback.call_args[0],
(self.batch.find_job, {'foo'})
(self.batch.find_job, {"foo"}),
)
+
+ def test_only_on_run_next_is_scheduled(self):
@ -133,6 +135,6 @@ index d519157d92..441f9c58b9 100644
+ self.batch.schedule_next()
+ self.assertEqual(len(self.batch.event.io_loop.call_later.mock_calls), 0)
--
2.16.4
2.29.2

View File

@ -1,19 +1,19 @@
From dc001cb47fd88a8e8a1bd82a1457325822d1220b Mon Sep 17 00:00:00 2001
From 4b3badeb52a9de10d6085ee3cc7598a827d1e68f Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Thu, 11 Apr 2019 15:57:59 +0200
Subject: [PATCH] Fix async batch race conditions
Close batching when there is no next batch
---
salt/cli/batch_async.py | 80 +++++++++++++++++++-------------------
tests/unit/cli/test_batch_async.py | 35 +++++++----------
2 files changed, 54 insertions(+), 61 deletions(-)
salt/cli/batch_async.py | 96 +++++++++++++++---------------
tests/unit/cli/test_batch_async.py | 38 +++++-------
2 files changed, 62 insertions(+), 72 deletions(-)
diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py
index 3160d46d8b..9c20b2fc6e 100644
index 1557e5105b..b0ab9d9f47 100644
--- a/salt/cli/batch_async.py
+++ b/salt/cli/batch_async.py
@@ -37,14 +37,14 @@ class BatchAsync(object):
@@ -32,14 +32,14 @@ class BatchAsync:
- tag: salt/batch/<batch-jid>/start
- data: {
"available_minions": self.minions,
@ -30,36 +30,38 @@ index 3160d46d8b..9c20b2fc6e 100644
"done_minions": self.done_minions,
"timedout_minions": self.timedout_minions
}
@@ -67,7 +67,7 @@ class BatchAsync(object):
self.eauth = batch_get_eauth(clear_load['kwargs'])
self.metadata = clear_load['kwargs'].get('metadata', {})
@@ -68,7 +68,7 @@ class BatchAsync:
self.eauth = batch_get_eauth(clear_load["kwargs"])
self.metadata = clear_load["kwargs"].get("metadata", {})
self.minions = set()
- self.down_minions = set()
+ self.targeted_minions = set()
self.timedout_minions = set()
self.done_minions = set()
self.active = set()
@@ -108,8 +108,7 @@ class BatchAsync(object):
minion = data['id']
if op == 'ping_return':
@@ -110,8 +110,7 @@ class BatchAsync:
minion = data["id"]
if op == "ping_return":
self.minions.add(minion)
- self.down_minions.remove(minion)
- if not self.down_minions:
+ if self.targeted_minions == self.minions:
self.event.io_loop.spawn_callback(self.start_batch)
elif op == 'find_job_return':
elif op == "find_job_return":
self.find_job_returned.add(minion)
@@ -120,9 +119,6 @@ class BatchAsync(object):
# call later so that we maybe gather more returns
self.event.io_loop.call_later(self.batch_delay, self.schedule_next)
@@ -124,11 +123,6 @@ class BatchAsync:
self.batch_delay, self.schedule_next
)
- if self.initialized and self.done_minions == self.minions.difference(self.timedout_minions):
- if self.initialized and self.done_minions == self.minions.difference(
- self.timedout_minions
- ):
- self.end_batch()
-
def _get_next(self):
to_run = self.minions.difference(
self.done_minions).difference(
@@ -135,16 +131,13 @@ class BatchAsync(object):
to_run = (
self.minions.difference(self.done_minions)
@@ -142,20 +136,17 @@ class BatchAsync:
return set(list(to_run)[:next_batch_size])
@tornado.gen.coroutine
@ -72,35 +74,42 @@ index 3160d46d8b..9c20b2fc6e 100644
- if minion in self.active:
- self.active.remove(minion)
- self.timedout_minions.add(minion)
- running = minions.difference(did_not_return).difference(self.done_minions).difference(self.timedout_minions)
- running = (
- minions.difference(did_not_return)
- .difference(self.done_minions)
- .difference(self.timedout_minions)
+ def check_find_job(self, batch_minions):
+ timedout_minions = batch_minions.difference(self.find_job_returned).difference(self.done_minions)
+ timedout_minions = batch_minions.difference(self.find_job_returned).difference(
+ self.done_minions
)
+ self.timedout_minions = self.timedout_minions.union(timedout_minions)
+ self.active = self.active.difference(self.timedout_minions)
+ running = batch_minions.difference(self.done_minions).difference(self.timedout_minions)
+ running = batch_minions.difference(self.done_minions).difference(
+ self.timedout_minions
+ )
+ if timedout_minions:
+ self.event.io_loop.call_later(self.batch_delay, self.schedule_next)
if running:
self.event.io_loop.add_callback(self.find_job, running)
@@ -183,7 +176,7 @@ class BatchAsync(object):
jid=self.ping_jid,
@@ -193,7 +184,7 @@ class BatchAsync:
metadata=self.metadata,
**self.eauth)
- self.down_minions = set(ping_return['minions'])
+ self.targeted_minions = set(ping_return['minions'])
**self.eauth
)
- self.down_minions = set(ping_return["minions"])
+ self.targeted_minions = set(ping_return["minions"])
@tornado.gen.coroutine
def start_batch(self):
@@ -192,36 +185,43 @@ class BatchAsync(object):
@@ -202,39 +193,48 @@ class BatchAsync:
self.initialized = True
data = {
"available_minions": self.minions,
- "down_minions": self.down_minions,
+ "down_minions": self.targeted_minions.difference(self.minions),
"metadata": self.metadata
"metadata": self.metadata,
}
self.event.fire_event(data, "salt/batch/{0}/start".format(self.batch_jid))
self.event.fire_event(data, "salt/batch/{}/start".format(self.batch_jid))
yield self.schedule_next()
def end_batch(self):
@ -109,20 +118,22 @@ index 3160d46d8b..9c20b2fc6e 100644
- "down_minions": self.down_minions,
- "done_minions": self.done_minions,
- "timedout_minions": self.timedout_minions,
- "metadata": self.metadata
- "metadata": self.metadata,
- }
- self.event.fire_event(data, "salt/batch/{0}/done".format(self.batch_jid))
- self.event.fire_event(data, "salt/batch/{}/done".format(self.batch_jid))
- self.event.remove_event_handler(self.__event_handler)
+ left = self.minions.symmetric_difference(self.done_minions.union(self.timedout_minions))
+ left = self.minions.symmetric_difference(
+ self.done_minions.union(self.timedout_minions)
+ )
+ if not left:
+ data = {
+ "available_minions": self.minions,
+ "down_minions": self.targeted_minions.difference(self.minions),
+ "done_minions": self.done_minions,
+ "timedout_minions": self.timedout_minions,
+ "metadata": self.metadata
+ "metadata": self.metadata,
+ }
+ self.event.fire_event(data, "salt/batch/{0}/done".format(self.batch_jid))
+ self.event.fire_event(data, "salt/batch/{}/done".format(self.batch_jid))
+ self.event.remove_event_handler(self.__event_handler)
@tornado.gen.coroutine
@ -131,116 +142,125 @@ index 3160d46d8b..9c20b2fc6e 100644
if next_batch:
- yield self.local.run_job_async(
- next_batch,
- self.opts['fun'],
- self.opts['arg'],
- 'list',
- raw=self.opts.get('raw', False),
- ret=self.opts.get('return', ''),
- gather_job_timeout=self.opts['gather_job_timeout'],
- self.opts["fun"],
- self.opts["arg"],
- "list",
- raw=self.opts.get("raw", False),
- ret=self.opts.get("return", ""),
- gather_job_timeout=self.opts["gather_job_timeout"],
- jid=self.batch_jid,
- metadata=self.metadata)
- self.event.io_loop.call_later(self.opts['timeout'], self.find_job, set(next_batch))
- metadata=self.metadata,
- )
- self.event.io_loop.call_later(
- self.opts["timeout"], self.find_job, set(next_batch)
- )
self.active = self.active.union(next_batch)
+ try:
+ yield self.local.run_job_async(
+ next_batch,
+ self.opts['fun'],
+ self.opts['arg'],
+ 'list',
+ raw=self.opts.get('raw', False),
+ ret=self.opts.get('return', ''),
+ gather_job_timeout=self.opts['gather_job_timeout'],
+ self.opts["fun"],
+ self.opts["arg"],
+ "list",
+ raw=self.opts.get("raw", False),
+ ret=self.opts.get("return", ""),
+ gather_job_timeout=self.opts["gather_job_timeout"],
+ jid=self.batch_jid,
+ metadata=self.metadata)
+ self.event.io_loop.call_later(self.opts['timeout'], self.find_job, set(next_batch))
+ metadata=self.metadata,
+ )
+ self.event.io_loop.call_later(
+ self.opts["timeout"], self.find_job, set(next_batch)
+ )
+ except Exception as ex:
+ self.active = self.active.difference(next_batch)
+ else:
+ self.end_batch()
diff --git a/tests/unit/cli/test_batch_async.py b/tests/unit/cli/test_batch_async.py
index f65b6a06c3..d519157d92 100644
index 3f8626a2dd..d6a4bfcf60 100644
--- a/tests/unit/cli/test_batch_async.py
+++ b/tests/unit/cli/test_batch_async.py
@@ -75,8 +75,8 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.batch.local.run_job_async.call_args[0],
('*', 'test.ping', [], 'glob')
@@ -68,8 +68,8 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(
self.batch.local.run_job_async.call_args[0], ("*", "test.ping", [], "glob")
)
- # assert down_minions == all minions matched by tgt
- self.assertEqual(self.batch.down_minions, set(['foo', 'bar']))
- self.assertEqual(self.batch.down_minions, {"foo", "bar"})
+ # assert targeted_minions == all minions matched by tgt
+ self.assertEqual(self.batch.targeted_minions, set(['foo', 'bar']))
+ self.assertEqual(self.batch.targeted_minions, {"foo", "bar"})
@tornado.testing.gen_test
def test_batch_start_on_gather_job_timeout(self):
@@ -121,7 +121,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -115,7 +115,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(len(self.batch.schedule_next.mock_calls), 1)
def test_batch_fire_done_event(self):
+ self.batch.targeted_minions = {'foo', 'baz', 'bar'}
self.batch.minions = set(['foo', 'bar'])
+ self.batch.done_minions = {'foo'}
+ self.batch.timedout_minions = {'bar'}
+ self.batch.targeted_minions = {"foo", "baz", "bar"}
self.batch.minions = {"foo", "bar"}
+ self.batch.done_minions = {"foo"}
+ self.batch.timedout_minions = {"bar"}
self.batch.event = MagicMock()
self.batch.metadata = {'mykey': 'myvalue'}
self.batch.metadata = {"mykey": "myvalue"}
self.batch.end_batch()
@@ -130,9 +133,9 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -124,9 +127,9 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
(
{
'available_minions': set(['foo', 'bar']),
- 'done_minions': set(),
- 'down_minions': set(),
- 'timedout_minions': set(),
+ 'done_minions': self.batch.done_minions,
+ 'down_minions': {'baz'},
+ 'timedout_minions': self.batch.timedout_minions,
'metadata': self.batch.metadata
"available_minions": {"foo", "bar"},
- "done_minions": set(),
- "down_minions": set(),
- "timedout_minions": set(),
+ "done_minions": self.batch.done_minions,
+ "down_minions": {"baz"},
+ "timedout_minions": self.batch.timedout_minions,
"metadata": self.batch.metadata,
},
"salt/batch/1235/done"
@@ -212,7 +215,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
"salt/batch/1235/done",
@@ -205,7 +208,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(self.batch._get_next(), set())
def test_batch__event_handler_ping_return(self):
- self.batch.down_minions = {'foo'}
+ self.batch.targeted_minions = {'foo'}
- self.batch.down_minions = {"foo"}
+ self.batch.targeted_minions = {"foo"}
self.batch.event = MagicMock(
unpack=MagicMock(return_value=('salt/job/1234/ret/foo', {'id': 'foo'})))
self.batch.start()
@@ -222,7 +225,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
)
@@ -216,7 +219,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(self.batch.done_minions, set())
def test_batch__event_handler_call_start_batch_when_all_pings_return(self):
- self.batch.down_minions = {'foo'}
+ self.batch.targeted_minions = {'foo'}
- self.batch.down_minions = {"foo"}
+ self.batch.targeted_minions = {"foo"}
self.batch.event = MagicMock(
unpack=MagicMock(return_value=('salt/job/1234/ret/foo', {'id': 'foo'})))
self.batch.start()
@@ -232,7 +235,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
(self.batch.start_batch,))
unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
)
@@ -228,7 +231,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
)
def test_batch__event_handler_not_call_start_batch_when_not_all_pings_return(self):
- self.batch.down_minions = {'foo', 'bar'}
+ self.batch.targeted_minions = {'foo', 'bar'}
- self.batch.down_minions = {"foo", "bar"}
+ self.batch.targeted_minions = {"foo", "bar"}
self.batch.event = MagicMock(
unpack=MagicMock(return_value=('salt/job/1234/ret/foo', {'id': 'foo'})))
self.batch.start()
@@ -260,20 +263,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(self.batch.find_job_returned, {'foo'})
unpack=MagicMock(return_value=("salt/job/1234/ret/foo", {"id": "foo"}))
)
@@ -259,23 +262,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(self.batch.find_job_returned, {"foo"})
@tornado.testing.gen_test
- def test_batch__event_handler_end_batch(self):
- self.batch.event = MagicMock(
- unpack=MagicMock(return_value=('salt/job/not-my-jid/ret/foo', {'id': 'foo'})))
- unpack=MagicMock(
- return_value=("salt/job/not-my-jid/ret/foo", {"id": "foo"})
- )
- )
- future = tornado.gen.Future()
- future.set_result({'minions': ['foo', 'bar', 'baz']})
- future.set_result({"minions": ["foo", "bar", "baz"]})
- self.batch.local.run_job_async.return_value = future
- self.batch.start()
- self.batch.initialized = True
- self.assertEqual(self.batch.down_minions, {'foo', 'bar', 'baz'})
- self.assertEqual(self.batch.down_minions, {"foo", "bar", "baz"})
+ def test_batch_schedule_next_end_batch_when_no_next(self):
self.batch.end_batch = MagicMock()
- self.batch.minions = {'foo', 'bar', 'baz'}
- self.batch.done_minions = {'foo', 'bar'}
- self.batch.timedout_minions = {'baz'}
- self.batch.minions = {"foo", "bar", "baz"}
- self.batch.done_minions = {"foo", "bar"}
- self.batch.timedout_minions = {"baz"}
- self.batch._BatchAsync__event_handler(MagicMock())
+ self.batch._get_next = MagicMock(return_value={})
+ self.batch.schedule_next()
@ -248,6 +268,6 @@ index f65b6a06c3..d519157d92 100644
@tornado.testing.gen_test
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 49780d409630fe18293a077e767aabfd183ff823 Mon Sep 17 00:00:00 2001
From 5a83801b7733f09c35a7ff0abb5aa32d4c857e4b Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Tue, 3 Dec 2019 11:22:42 +0100
Subject: [PATCH] Fix batch_async obsolete test
@ -8,26 +8,25 @@ Subject: [PATCH] Fix batch_async obsolete test
1 file changed, 7 insertions(+), 1 deletion(-)
diff --git a/tests/unit/cli/test_batch_async.py b/tests/unit/cli/test_batch_async.py
index 12dfe543bc..f1d36a81fb 100644
index c18b42be57..b04965268a 100644
--- a/tests/unit/cli/test_batch_async.py
+++ b/tests/unit/cli/test_batch_async.py
@@ -140,8 +140,14 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
"salt/batch/1235/done"
)
@@ -134,7 +134,13 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
"salt/batch/1235/done",
),
)
- self.assertEqual(len(self.batch.event.remove_event_handler.mock_calls), 1)
+
+ def test_batch__del__(self):
+ batch = BatchAsync(MagicMock(), MagicMock(), MagicMock())
+ event = MagicMock()
+ batch.event = event
+ batch.__del__()
self.assertEqual(
- len(self.batch.event.remove_event_handler.mock_calls), 1)
+ len(event.remove_event_handler.mock_calls), 1)
+ self.assertEqual(len(event.remove_event_handler.mock_calls), 1)
@tornado.testing.gen_test
def test_batch_next(self):
--
2.16.4
2.29.2

View File

@ -1,25 +1,42 @@
From 4acbe70851e3ef7a04fc5ad0dc9a2519f6989c66 Mon Sep 17 00:00:00 2001
From 1b9a160f578cf446f5ae622a450d23022e7e3ca5 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 14 Dec 2017 16:21:40 +0100
Subject: [PATCH] Fix bsc#1065792
---
salt/states/service.py | 1 +
1 file changed, 1 insertion(+)
salt/states/service.py | 5 +----
1 file changed, 1 insertion(+), 4 deletions(-)
diff --git a/salt/states/service.py b/salt/states/service.py
index de7718ea49..987e37cd42 100644
index d19c245756..4ea36a78f6 100644
--- a/salt/states/service.py
+++ b/salt/states/service.py
@@ -80,6 +80,7 @@ def __virtual__():
@@ -56,16 +56,12 @@ set the reload value to True:
:ref:`Requisites <requisites>` documentation.
"""
-# Import Python libs
import time
-# Import Salt libs
import salt.utils.data
import salt.utils.platform
from salt.exceptions import CommandExecutionError
-
-# Import 3rd-party libs
from salt.utils.args import get_function_argspec as _argspec
from salt.utils.systemd import booted
@@ -79,6 +75,7 @@ def __virtual__():
Only make these states available if a service provider has been detected or
assigned for this minion
'''
"""
+ __salt__._load_all()
if 'service.start' in __salt__:
if "service.start" in __salt__:
return __virtualname__
else:
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From e7514afcba4f57c5cb8599f561fcefdcc3db7314 Mon Sep 17 00:00:00 2001
From bc7acab857b952353a959339b06c79d851a9d879 Mon Sep 17 00:00:00 2001
From: "Daniel A. Wozniak" <dwozniak@saltstack.com>
Date: Wed, 16 Sep 2020 00:25:10 +0000
Subject: [PATCH] Fix CVE-2020-25592 and add tests (bsc#1178319)
@ -7,184 +7,17 @@ Properly validate eauth credentials and tokens on SSH calls made by Salt API
(bsc#1178319) (bsc#1178362) (bsc#1178361) (CVE-2020-25592) (CVE-2020-17490) (CVE-2020-16846)
---
salt/client/ssh/shell.py | 26 ++-
salt/modules/tls.py | 18 +-
salt/netapi/__init__.py | 67 ++++++
tests/integration/netapi/test_client.py | 296 +++++++++++++++++++++++-
4 files changed, 388 insertions(+), 19 deletions(-)
salt/netapi/__init__.py | 43 +++++++++++++++++++++++++
tests/integration/netapi/test_client.py | 13 ++++++--
2 files changed, 53 insertions(+), 3 deletions(-)
diff --git a/salt/client/ssh/shell.py b/salt/client/ssh/shell.py
index bd55c514ee..27aba7b382 100644
--- a/salt/client/ssh/shell.py
+++ b/salt/client/ssh/shell.py
@@ -8,6 +8,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import re
import os
import sys
+import shlex
import time
import logging
import subprocess
@@ -43,10 +44,10 @@ def gen_key(path):
'''
Generate a key for use with salt-ssh
'''
- cmd = 'ssh-keygen -P "" -f {0} -t rsa -q'.format(path)
+ cmd = ["ssh-keygen", "-P", '""', "-f", path, "-t", "rsa", "-q"]
if not os.path.isdir(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
- subprocess.call(cmd, shell=True)
+ subprocess.call(cmd)
def gen_shell(opts, **kwargs):
@@ -289,8 +290,7 @@ class Shell(object):
'''
try:
proc = salt.utils.nb_popen.NonBlockingPopen(
- cmd,
- shell=True,
+ self._split_cmd(cmd),
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
)
@@ -369,6 +369,21 @@ class Shell(object):
return self._run_cmd(cmd)
+ def _split_cmd(self, cmd):
+ """
+ Split a command string so that it is suitable to pass to Popen without
+ shell=True. This prevents shell injection attacks in the options passed
+ to ssh or some other command.
+ """
+ try:
+ ssh_part, cmd_part = cmd.split("/bin/sh")
+ except ValueError:
+ cmd_lst = shlex.split(cmd)
+ else:
+ cmd_lst = shlex.split(ssh_part)
+ cmd_lst.append("/bin/sh {}".format(cmd_part))
+ return cmd_lst
+
def _run_cmd(self, cmd, key_accept=False, passwd_retries=3):
'''
Execute a shell command via VT. This is blocking and assumes that ssh
@@ -378,8 +393,7 @@ class Shell(object):
return '', 'No command or passphrase', 245
term = salt.utils.vt.Terminal(
- cmd,
- shell=True,
+ self._split_cmd(cmd),
log_stdout=True,
log_stdout_level='trace',
log_stderr=True,
diff --git a/salt/modules/tls.py b/salt/modules/tls.py
index af845621a3..116b5fe379 100644
--- a/salt/modules/tls.py
+++ b/salt/modules/tls.py
@@ -798,12 +798,13 @@ def create_ca(ca_name,
if old_key.strip() == keycontent.strip():
write_key = False
else:
- log.info('Saving old CA ssl key in %s', bck)
- with salt.utils.files.fopen(bck, 'w') as bckf:
+ log.info('Saving old CA ssl key in {0}'.format(bck))
+ fp = os.open(bck, os.O_CREAT | os.O_RDWR, 0o600)
+ with os.fdopen(fp, 'w') as bckf:
bckf.write(old_key)
- os.chmod(bck, 0o600)
if write_key:
- with salt.utils.files.fopen(ca_keyp, 'wb') as ca_key:
+ fp = os.open(ca_keyp, os.O_CREAT | os.O_RDWR, 0o600)
+ with os.fdopen(fp, 'wb') as ca_key:
ca_key.write(salt.utils.stringutils.to_bytes(keycontent))
with salt.utils.files.fopen(certp, 'wb') as ca_crt:
@@ -1115,9 +1116,9 @@ def create_csr(ca_name,
req.sign(key, salt.utils.stringutils.to_str(digest))
# Write private key and request
- with salt.utils.files.fopen('{0}/{1}.key'.format(csr_path,
- csr_filename),
- 'wb+') as priv_key:
+ priv_keyp = '{0}/{1}.key'.format(csr_path, csr_filename)
+ fp = os.open(priv_keyp, os.O_CREAT | os.O_RDWR, 0o600)
+ with os.fdopen(fp, 'wb+') as priv_key:
priv_key.write(
salt.utils.stringutils.to_bytes(
OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,
@@ -1266,7 +1267,8 @@ def create_self_signed_cert(tls_dir='tls',
priv_key_path = '{0}/{1}/certs/{2}.key'.format(cert_base_path(),
tls_dir,
cert_filename)
- with salt.utils.files.fopen(priv_key_path, 'wb+') as priv_key:
+ fp = os.open(priv_key_path, os.O_CREAT | os.O_RDWR, 0o600)
+ with os.fdopen(fp, 'wb+') as priv_key:
priv_key.write(
salt.utils.stringutils.to_bytes(
OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,
diff --git a/salt/netapi/__init__.py b/salt/netapi/__init__.py
index 31a24bb420..4e5b6b093a 100644
index dec19b37ef..cba1ec574f 100644
--- a/salt/netapi/__init__.py
+++ b/salt/netapi/__init__.py
@@ -3,24 +3,36 @@
Make api awesomeness
'''
from __future__ import absolute_import, print_function, unicode_literals
+
+import copy
+
# Import Python libs
import inspect
+import logging
import os
# Import Salt libs
import salt.log # pylint: disable=W0611
+import salt.auth
import salt.client
import salt.config
+import salt.daemons.masterapi
import salt.runner
import salt.syspaths
import salt.wheel
import salt.utils.args
import salt.client.ssh.client
import salt.exceptions
+import salt.utils.args
+import salt.utils.minions
+import salt.wheel
+from salt.defaults import DEFAULT_TARGET_DELIM
# Import third party libs
from salt.ext import six
+log = logging.getLogger(__name__)
+
class NetapiClient(object):
'''
@@ -34,6 +46,15 @@ class NetapiClient(object):
def __init__(self, opts):
self.opts = opts
+ apiopts = copy.deepcopy(self.opts)
+ apiopts["enable_ssh_minions"] = True
+ apiopts["cachedir"] = os.path.join(opts["cachedir"], "saltapi")
+ if not os.path.exists(apiopts["cachedir"]):
+ os.makedirs(apiopts["cachedir"])
+ self.resolver = salt.auth.Resolver(apiopts)
+ self.loadauth = salt.auth.LoadAuth(apiopts)
+ self.key = salt.daemons.masterapi.access_keys(apiopts)
+ self.ckminions = salt.utils.minions.CkMinions(apiopts)
def _is_master_running(self):
'''
@@ -55,6 +76,49 @@ class NetapiClient(object):
self.opts['sock_dir'],
ipc_file))
@@ -109,6 +109,49 @@ class NetapiClient:
"Authorization error occurred."
)
+ def _prep_auth_info(self, clear_load):
+ sensitive_load_keys = []
@ -230,352 +63,58 @@ index 31a24bb420..4e5b6b093a 100644
+ )
+
def run(self, low):
'''
"""
Execute the specified function in the specified client by passing the
@@ -80,6 +144,9 @@ class NetapiClient(object):
raise salt.exceptions.EauthAuthenticationError(
'Raw shell option not allowed.')
+ if low['client'] == 'ssh':
+ self._authorize_ssh(low)
+
l_fun = getattr(self, low['client'])
f_call = salt.utils.args.format_call(l_fun, low)
return l_fun(*f_call.get('args', ()), **f_call.get('kwargs', {}))
diff --git a/tests/integration/netapi/test_client.py b/tests/integration/netapi/test_client.py
index 08030f31ec..b99bdfe313 100644
index 70471d3148..9eb6e52920 100644
--- a/tests/integration/netapi/test_client.py
+++ b/tests/integration/netapi/test_client.py
@@ -1,26 +1,30 @@
# encoding: utf-8
-
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
+import copy
import logging
import os
import time
+import salt.config
+import salt.netapi
+import salt.utils.files
+import salt.utils.platform
+import salt.utils.pycrypto
+
# Import Salt Testing libs
from tests.support.paths import TMP_CONF_DIR, TMP
@@ -15,10 +15,12 @@ from tests.support.helpers import (
SKIP_IF_NOT_RUNNING_PYTEST,
SaveRequestsPostHandler,
Webserver,
+ requires_sshd_server,
slowTest,
)
from tests.support.mixins import AdaptedConfigurationTestCaseMixin
from tests.support.mock import patch
+from tests.support.paths import TMP, TMP_CONF_DIR
from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import TestCase, skipIf
from tests.support.mock import patch
-from tests.support.case import SSHCase
+from tests.support.case import ModuleCase, SSHCase
+from salt.exceptions import EauthAuthenticationError
from tests.support.helpers import (
Webserver,
SaveRequestsPostHandler,
requires_sshd_server
)
-# Import Salt libs
-import salt.config
-import salt.netapi
from salt.exceptions import (
EauthAuthenticationError
@@ -174,6 +178,10 @@ class NetapiSSHClientTest(SSHCase):
'''
opts = salt.config.client_config(os.path.join(TMP_CONF_DIR, 'master'))
@@ -178,7 +180,12 @@ class NetapiSSHClientTest(SSHCase):
"""
opts = AdaptedConfigurationTestCaseMixin.get_config("client_config").copy()
self.netapi = salt.netapi.NetapiClient(opts)
+ opts = salt.config.client_config(os.path.join(TMP_CONF_DIR, "master"))
+ naopts = copy.deepcopy(opts)
+ naopts["ignore_host_keys"] = True
+ self.netapi = salt.netapi.NetapiClient(naopts)
self.priv_file = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'key_test')
self.rosters = os.path.join(RUNTIME_VARS.TMP_CONF_DIR)
@@ -271,3 +279,281 @@ class NetapiSSHClientTest(SSHCase):
self.assertEqual(ret, None)
self.assertFalse(os.path.exists('badfile.txt'))
+
+ @staticmethod
+ def cleanup_file(path):
+ try:
+ os.remove(path)
+ except OSError:
+ pass
+
+ @staticmethod
+ def cleanup_dir(path):
+ try:
+ salt.utils.files.rm_rf(path)
+ except OSError:
+ pass
+
+ def test_shell_inject_ssh_priv(self):
+ """
+ Verify CVE-2020-16846 for ssh_priv variable
+ """
+ # ZDI-CAN-11143
+ path = "/tmp/test-11143"
+ self.addCleanup(self.cleanup_file, path)
+ self.addCleanup(self.cleanup_file, "aaa")
+ self.addCleanup(self.cleanup_file, "aaa.pub")
+ self.addCleanup(self.cleanup_dir, "aaa|id>")
+ low = {
+ "roster": "cache",
+ "client": "ssh",
+ "tgt": "www.zerodayinitiative.com",
+ "ssh_priv": "aaa|id>{} #".format(path),
+ "fun": "test.ping",
+ "eauth": "auto",
+ "username": "saltdev_auto",
+ "password": "saltdev",
+ }
+ ret = self.netapi.run(low)
+ self.assertFalse(os.path.exists(path))
+
+ def test_shell_inject_tgt(self):
+ """
+ Verify CVE-2020-16846 for tgt variable
+ """
+ # ZDI-CAN-11167
+ path = "/tmp/test-11167"
+ self.addCleanup(self.cleanup_file, path)
+ low = {
+ "roster": "cache",
+ "client": "ssh",
+ "tgt": "root|id>{} #@127.0.0.1".format(path),
+ "roster_file": "/tmp/salt-tests-tmpdir/config/roaster",
+ "rosters": "/",
+ "fun": "test.ping",
+ "eauth": "auto",
+ "username": "saltdev_auto",
+ "password": "saltdev",
+ }
+ ret = self.netapi.run(low)
+ self.assertFalse(os.path.exists(path))
+
+ def test_shell_inject_ssh_options(self):
+ """
+ Verify CVE-2020-16846 for ssh_options
+ """
+ # ZDI-CAN-11169
+ path = "/tmp/test-11169"
+ self.addCleanup(self.cleanup_file, path)
+ low = {
+ "roster": "cache",
+ "client": "ssh",
+ "tgt": "127.0.0.1",
+ "renderer": "cheetah",
+ "fun": "test.ping",
+ "eauth": "auto",
+ "username": "saltdev_auto",
+ "password": "saltdev",
+ "roster_file": "/tmp/salt-tests-tmpdir/config/roaster",
+ "rosters": "/",
+ "ssh_options": ["|id>{} #".format(path), "lol"],
+ }
+ ret = self.netapi.run(low)
+ self.assertFalse(os.path.exists(path))
+
+ def test_shell_inject_ssh_port(self):
+ """
+ Verify CVE-2020-16846 for ssh_port variable
+ """
+ # ZDI-CAN-11172
+ path = "/tmp/test-11172"
+ self.addCleanup(self.cleanup_file, path)
+ low = {
+ "roster": "cache",
+ "client": "ssh",
+ "tgt": "127.0.0.1",
+ "renderer": "cheetah",
+ "fun": "test.ping",
+ "eauth": "auto",
+ "username": "saltdev_auto",
+ "password": "saltdev",
+ "roster_file": "/tmp/salt-tests-tmpdir/config/roaster",
+ "rosters": "/",
+ "ssh_port": "hhhhh|id>{} #".format(path),
+ }
+ ret = self.netapi.run(low)
+ self.assertFalse(os.path.exists(path))
+
+ def test_shell_inject_remote_port_forwards(self):
+ """
+ Verify CVE-2020-16846 for remote_port_forwards variable
+ """
+ # ZDI-CAN-11173
+ path = "/tmp/test-1173"
+ self.addCleanup(self.cleanup_file, path)
+ low = {
+ "roster": "cache",
+ "client": "ssh",
+ "tgt": "127.0.0.1",
+ "renderer": "cheetah",
+ "fun": "test.ping",
+ "roster_file": "/tmp/salt-tests-tmpdir/config/roaster",
+ "rosters": "/",
+ "ssh_remote_port_forwards": "hhhhh|id>{} #, lol".format(path),
+ "eauth": "auto",
+ "username": "saltdev_auto",
+ "password": "saltdev",
+ }
+ ret = self.netapi.run(low)
+ self.assertFalse(os.path.exists(path))
+
+
+@requires_sshd_server
+class NetapiSSHClientAuthTest(SSHCase):
+
+ USERA = "saltdev"
+ USERA_PWD = "saltdev"
+
+ def setUp(self):
+ """
+ Set up a NetapiClient instance
+ """
- self.priv_file = os.path.join(RUNTIME_VARS.TMP_SSH_CONF_DIR, "client_key")
+ opts = salt.config.client_config(os.path.join(TMP_CONF_DIR, "master"))
+ naopts = copy.deepcopy(opts)
+ naopts["ignore_host_keys"] = True
+ self.netapi = salt.netapi.NetapiClient(naopts)
+
+ self.priv_file = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, "key_test")
+ self.rosters = os.path.join(RUNTIME_VARS.TMP_CONF_DIR)
+ # Initialize salt-ssh
+ self.run_function("test.ping")
+ self.mod_case = ModuleCase()
+ try:
+ add_user = self.mod_case.run_function(
+ "user.add", [self.USERA], createhome=False
+ )
+ self.assertTrue(add_user)
+ if salt.utils.platform.is_darwin():
+ hashed_password = self.USERA_PWD
+ else:
+ hashed_password = salt.utils.pycrypto.gen_hash(password=self.USERA_PWD)
+ add_pwd = self.mod_case.run_function(
+ "shadow.set_password", [self.USERA, hashed_password],
+ )
+ self.assertTrue(add_pwd)
+ except AssertionError:
+ self.mod_case.run_function("user.delete", [self.USERA], remove=True)
+ self.skipTest("Could not add user or password, skipping test")
+
+ def tearDown(self):
+ del self.netapi
+ self.mod_case.run_function("user.delete", [self.USERA], remove=True)
+
+ @classmethod
+ def setUpClass(cls):
+ cls.post_webserver = Webserver(handler=SaveRequestsPostHandler)
+ cls.post_webserver.start()
+ cls.post_web_root = cls.post_webserver.web_root
+ cls.post_web_handler = cls.post_webserver.handler
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.post_webserver.stop()
+ del cls.post_webserver
+
+ def test_ssh_auth_bypass(self):
+ """
+ CVE-2020-25592 - Bogus eauth raises exception.
+ """
+ low = {
+ "roster": "cache",
+ "client": "ssh",
+ "tgt": "127.0.0.1",
+ "renderer": "cheetah",
+ "fun": "test.ping",
self.rosters = os.path.join(RUNTIME_VARS.TMP_CONF_DIR)
self.roster_file = os.path.join(self.rosters, "roster")
@@ -325,7 +332,7 @@ class NetapiSSHClientTest(SSHCase):
"roster": "cache",
"client": "ssh",
"tgt": "root|id>{} #@127.0.0.1".format(path),
- "roster_file": self.roster_file,
+ "roster_file": "/tmp/salt-tests-tmpdir/config/roaster",
+ "rosters": "/",
+ "eauth": "xx",
+ }
+ with self.assertRaises(salt.exceptions.EauthAuthenticationError):
+ ret = self.netapi.run(low)
+
+ def test_ssh_auth_valid(self):
+ """
+ CVE-2020-25592 - Valid eauth works as expected.
+ """
+ low = {
+ "client": "ssh",
+ "tgt": "localhost",
+ "fun": "test.ping",
+ "roster_file": "roster",
+ "rosters": [self.rosters],
+ "ssh_priv": self.priv_file,
+ "eauth": "pam",
+ "username": "saltdev",
+ "password": "saltdev",
+ }
+ ret = self.netapi.run(low)
+ assert "localhost" in ret
+ assert ret["localhost"]["return"] is True
+
+ def test_ssh_auth_invalid(self):
+ """
+ CVE-2020-25592 - Wrong password raises exception.
+ """
+ low = {
+ "client": "ssh",
+ "tgt": "localhost",
+ "fun": "test.ping",
+ "roster_file": "roster",
+ "rosters": [self.rosters],
+ "ssh_priv": self.priv_file,
+ "eauth": "pam",
+ "username": "saltdev",
+ "password": "notvalidpassword",
+ }
+ with self.assertRaises(salt.exceptions.EauthAuthenticationError):
+ ret = self.netapi.run(low)
+
+ def test_ssh_auth_invalid_acl(self):
+ """
+ CVE-2020-25592 - Eauth ACL enforced.
+ """
+ low = {
+ "client": "ssh",
+ "tgt": "localhost",
+ "fun": "at.at",
+ "args": ["12:05am", "echo foo"],
+ "roster_file": "roster",
+ "rosters": [self.rosters],
+ "ssh_priv": self.priv_file,
+ "eauth": "pam",
+ "username": "saltdev",
+ "password": "notvalidpassword",
+ }
+ with self.assertRaises(salt.exceptions.EauthAuthenticationError):
+ ret = self.netapi.run(low)
+
+ def test_ssh_auth_token(self):
+ """
+ CVE-2020-25592 - Eauth tokens work as expected.
+ """
+ low = {
+ "eauth": "pam",
+ "username": "saltdev",
+ "password": "saltdev",
+ }
+ ret = self.netapi.loadauth.mk_token(low)
+ assert "token" in ret and ret["token"]
+ low = {
+ "client": "ssh",
+ "tgt": "localhost",
+ "fun": "test.ping",
+ "roster_file": "roster",
+ "rosters": [self.rosters],
+ "ssh_priv": self.priv_file,
+ "token": ret["token"],
+ }
+ ret = self.netapi.run(low)
+ assert "localhost" in ret
+ assert ret["localhost"]["return"] is True
"rosters": "/",
"fun": "test.ping",
"eauth": "auto",
@@ -355,7 +362,7 @@ class NetapiSSHClientTest(SSHCase):
"eauth": "auto",
"username": "saltdev_auto",
"password": "saltdev",
- "roster_file": self.roster_file,
+ "roster_file": "/tmp/salt-tests-tmpdir/config/roaster",
"rosters": "/",
"ssh_options": ["|id>{} #".format(path), "lol"],
}
--
2.28.0
2.29.2

View File

@ -1,4 +1,4 @@
From e6f6b38c75027c4c4f6395117b734dce6fb7433e Mon Sep 17 00:00:00 2001
From 3b96edd8d23c65c6788a9980114a7e1c220c9640 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 4 Oct 2019 15:00:50 +0100
@ -6,49 +6,49 @@ Subject: [PATCH] Fix failing unit tests for batch async
---
salt/cli/batch_async.py | 2 +-
tests/unit/cli/test_batch_async.py | 57 ++++++++++++++++++++++----------------
2 files changed, 34 insertions(+), 25 deletions(-)
tests/unit/cli/test_batch_async.py | 66 +++++++++++++++++-------------
2 files changed, 39 insertions(+), 29 deletions(-)
diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py
index f9e736f804..6d0dca1da5 100644
index 89405ba917..b2d04f9d4d 100644
--- a/salt/cli/batch_async.py
+++ b/salt/cli/batch_async.py
@@ -88,7 +88,7 @@ class BatchAsync(object):
io_loop=ioloop,
keep_loop=True)
@@ -91,7 +91,7 @@ class BatchAsync:
keep_loop=True,
)
self.scheduled = False
- self.patterns = {}
+ self.patterns = set()
def __set_event_handler(self):
ping_return_pattern = 'salt/job/{0}/ret/*'.format(self.ping_jid)
ping_return_pattern = "salt/job/{}/ret/*".format(self.ping_jid)
diff --git a/tests/unit/cli/test_batch_async.py b/tests/unit/cli/test_batch_async.py
index 441f9c58b9..12dfe543bc 100644
index 66332a548a..c18b42be57 100644
--- a/tests/unit/cli/test_batch_async.py
+++ b/tests/unit/cli/test_batch_async.py
@@ -68,8 +68,8 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -61,8 +61,8 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
ret = self.batch.start()
# assert start_batch is called later with batch_presence_ping_timeout as param
self.assertEqual(
- self.batch.event.io_loop.call_later.call_args[0],
- (self.batch.batch_presence_ping_timeout, self.batch.start_batch))
- (self.batch.batch_presence_ping_timeout, self.batch.start_batch),
+ self.batch.event.io_loop.spawn_callback.call_args[0],
+ (self.batch.start_batch,))
+ (self.batch.start_batch,),
)
# assert test.ping called
self.assertEqual(
self.batch.local.run_job_async.call_args[0],
@@ -88,8 +88,8 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -81,8 +81,8 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
ret = self.batch.start()
# assert start_batch is called later with gather_job_timeout as param
self.assertEqual(
- self.batch.event.io_loop.call_later.call_args[0],
- (self.batch.opts['gather_job_timeout'], self.batch.start_batch))
- (self.batch.opts["gather_job_timeout"], self.batch.start_batch),
+ self.batch.event.io_loop.spawn_callback.call_args[0],
+ (self.batch.start_batch,))
+ (self.batch.start_batch,),
)
def test_batch_fire_start_event(self):
self.batch.minions = set(['foo', 'bar'])
@@ -113,12 +113,11 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -107,12 +107,11 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
def test_start_batch_calls_next(self):
self.batch.run_next = MagicMock(return_value=MagicMock())
self.batch.event = MagicMock()
@ -59,127 +59,138 @@ index 441f9c58b9..12dfe543bc 100644
self.assertEqual(self.batch.initialized, True)
- self.assertEqual(len(self.batch.run_next.mock_calls), 1)
+ self.assertEqual(
+ self.batch.event.io_loop.spawn_callback.call_args[0],
+ (self.batch.run_next,))
+ self.batch.event.io_loop.spawn_callback.call_args[0], (self.batch.run_next,)
+ )
def test_batch_fire_done_event(self):
self.batch.targeted_minions = {'foo', 'baz', 'bar'}
@@ -154,14 +153,14 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.batch.targeted_minions = {"foo", "baz", "bar"}
@@ -147,14 +146,14 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
future = tornado.gen.Future()
future.set_result({'minions': ['foo', 'bar']})
future.set_result({"minions": ["foo", "bar"]})
self.batch.local.run_job_async.return_value = future
- ret = self.batch.run_next().result()
+ self.batch.run_next()
self.assertEqual(
self.batch.local.run_job_async.call_args[0],
({'foo', 'bar'}, 'my.fun', [], 'list')
({"foo", "bar"}, "my.fun", [], "list"),
)
self.assertEqual(
- self.batch.event.io_loop.call_later.call_args[0],
- (self.batch.opts['timeout'], self.batch.find_job, {'foo', 'bar'})
- (self.batch.opts["timeout"], self.batch.find_job, {"foo", "bar"}),
+ self.batch.event.io_loop.spawn_callback.call_args[0],
+ (self.batch.find_job, {'foo', 'bar'})
+ (self.batch.find_job, {"foo", "bar"}),
)
self.assertEqual(self.batch.active, {'bar', 'foo'})
self.assertEqual(self.batch.active, {"bar", "foo"})
@@ -252,13 +251,14 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -249,15 +248,21 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(self.batch.active, set())
self.assertEqual(self.batch.done_minions, {'foo'})
self.assertEqual(self.batch.done_minions, {"foo"})
self.assertEqual(
- self.batch.event.io_loop.call_later.call_args[0],
- (self.batch.batch_delay, self.batch.run_next))
- (self.batch.batch_delay, self.batch.run_next),
+ self.batch.event.io_loop.spawn_callback.call_args[0],
+ (self.batch.schedule_next,))
+ (self.batch.schedule_next,),
)
def test_batch__event_handler_find_job_return(self):
self.batch.event = MagicMock(
- unpack=MagicMock(return_value=('salt/job/1236/ret/foo', {'id': 'foo'})))
+ unpack=MagicMock(return_value=('salt/job/1236/ret/foo', {'id': 'foo', 'return': 'deadbeaf'})))
- unpack=MagicMock(return_value=("salt/job/1236/ret/foo", {"id": "foo"}))
+ unpack=MagicMock(
+ return_value=(
+ "salt/job/1236/ret/foo",
+ {"id": "foo", "return": "deadbeaf"},
+ )
+ )
)
self.batch.start()
+ self.batch.patterns.add(('salt/job/1236/ret/*', 'find_job_return'))
+ self.batch.patterns.add(("salt/job/1236/ret/*", "find_job_return"))
self.batch._BatchAsync__event_handler(MagicMock())
self.assertEqual(self.batch.find_job_returned, {'foo'})
self.assertEqual(self.batch.find_job_returned, {"foo"})
@@ -275,10 +275,13 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -274,14 +279,13 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
future = tornado.gen.Future()
future.set_result({})
self.batch.local.run_job_async.return_value = future
+ self.batch.minions = set(['foo', 'bar'])
+ self.batch.minions = {"foo", "bar"}
+ self.batch.jid_gen = MagicMock(return_value="1234")
+ tornado.gen.sleep = MagicMock(return_value=future)
self.batch.find_job({'foo', 'bar'})
self.batch.find_job({"foo", "bar"})
self.assertEqual(
- self.batch.event.io_loop.call_later.call_args[0],
- (self.batch.opts['gather_job_timeout'], self.batch.check_find_job, {'foo', 'bar'})
- (
- self.batch.opts["gather_job_timeout"],
- self.batch.check_find_job,
- {"foo", "bar"},
- ),
+ self.batch.event.io_loop.spawn_callback.call_args[0],
+ (self.batch.check_find_job, {'foo', 'bar'}, "1234")
+ (self.batch.check_find_job, {"foo", "bar"}, "1234"),
)
@tornado.testing.gen_test
@@ -288,17 +291,21 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -291,17 +295,21 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
future = tornado.gen.Future()
future.set_result({})
self.batch.local.run_job_async.return_value = future
+ self.batch.minions = set(['foo', 'bar'])
+ self.batch.minions = {"foo", "bar"}
+ self.batch.jid_gen = MagicMock(return_value="1234")
+ tornado.gen.sleep = MagicMock(return_value=future)
self.batch.find_job({'foo', 'bar'})
self.batch.find_job({"foo", "bar"})
self.assertEqual(
- self.batch.event.io_loop.call_later.call_args[0],
- (self.batch.opts['gather_job_timeout'], self.batch.check_find_job, {'foo'})
- (self.batch.opts["gather_job_timeout"], self.batch.check_find_job, {"foo"}),
+ self.batch.event.io_loop.spawn_callback.call_args[0],
+ (self.batch.check_find_job, {'foo'}, "1234")
+ (self.batch.check_find_job, {"foo"}, "1234"),
)
def test_batch_check_find_job_did_not_return(self):
self.batch.event = MagicMock()
self.batch.active = {'foo'}
self.batch.active = {"foo"}
self.batch.find_job_returned = set()
- self.batch.check_find_job({'foo'})
+ self.batch.patterns = { ('salt/job/1234/ret/*', 'find_job_return') }
+ self.batch.check_find_job({'foo'}, jid="1234")
- self.batch.check_find_job({"foo"})
+ self.batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
+ self.batch.check_find_job({"foo"}, jid="1234")
self.assertEqual(self.batch.find_job_returned, set())
self.assertEqual(self.batch.active, set())
self.assertEqual(len(self.batch.event.io_loop.add_callback.mock_calls), 0)
@@ -306,9 +313,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -309,9 +317,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
def test_batch_check_find_job_did_return(self):
self.batch.event = MagicMock()
self.batch.find_job_returned = {'foo'}
- self.batch.check_find_job({'foo'})
+ self.batch.patterns = { ('salt/job/1234/ret/*', 'find_job_return') }
+ self.batch.check_find_job({'foo'}, jid="1234")
self.batch.find_job_returned = {"foo"}
- self.batch.check_find_job({"foo"})
+ self.batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
+ self.batch.check_find_job({"foo"}, jid="1234")
self.assertEqual(
- self.batch.event.io_loop.add_callback.call_args[0],
+ self.batch.event.io_loop.spawn_callback.call_args[0],
(self.batch.find_job, {'foo'})
(self.batch.find_job, {"foo"}),
)
@@ -329,7 +337,8 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -332,7 +341,8 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
# both not yet done but only 'foo' responded to find_job
not_done = {'foo', 'bar'}
not_done = {"foo", "bar"}
- self.batch.check_find_job(not_done)
+ self.batch.patterns = { ('salt/job/1234/ret/*', 'find_job_return') }
+ self.batch.patterns = {("salt/job/1234/ret/*", "find_job_return")}
+ self.batch.check_find_job(not_done, jid="1234")
# assert 'bar' removed from active
self.assertEqual(self.batch.active, {'foo'})
@@ -339,7 +348,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.assertEqual(self.batch.active, {"foo"})
@@ -342,7 +352,7 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
# assert 'find_job' schedueled again only for 'foo'
self.assertEqual(
- self.batch.event.io_loop.add_callback.call_args[0],
+ self.batch.event.io_loop.spawn_callback.call_args[0],
(self.batch.find_job, {'foo'})
(self.batch.find_job, {"foo"}),
)
@@ -347,4 +356,4 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
@@ -350,4 +360,4 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.batch.event = MagicMock()
self.batch.scheduled = True
self.batch.schedule_next()
- self.assertEqual(len(self.batch.event.io_loop.call_later.mock_calls), 0)
+ self.assertEqual(len(self.batch.event.io_loop.spawn_callback.mock_calls), 0)
--
2.16.4
2.29.2

View File

@ -1,738 +0,0 @@
From c3d8ef9d1387ac3d69fbbd1f8042bf89ba87821a Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Tue, 13 Oct 2020 09:28:39 +0300
Subject: [PATCH] Fix for bsc#1102248 - psutil is broken and so Process
is not working on Python 3 as it is implemented
---
salt/modules/ps.py | 268 ++++++++++++++++++++++++++-------------------
1 file changed, 157 insertions(+), 111 deletions(-)
diff --git a/salt/modules/ps.py b/salt/modules/ps.py
index bb37873f48..9925e29968 100644
--- a/salt/modules/ps.py
+++ b/salt/modules/ps.py
@@ -1,31 +1,33 @@
# -*- coding: utf-8 -*-
-'''
+"""
A salt interface to psutil, a system and process library.
See http://code.google.com/p/psutil.
:depends: - psutil Python module, version 0.3.0 or later
- python-utmp package (optional)
-'''
+"""
# Import python libs
-from __future__ import absolute_import, unicode_literals, print_function
-import time
+from __future__ import absolute_import, print_function, unicode_literals
+
import datetime
import re
+import time
# Import salt libs
import salt.utils.data
-from salt.exceptions import SaltInvocationError, CommandExecutionError
# Import third party libs
import salt.utils.decorators.path
+from salt.exceptions import CommandExecutionError, SaltInvocationError
from salt.ext import six
+
# pylint: disable=import-error
try:
import salt.utils.psutil_compat as psutil
HAS_PSUTIL = True
- PSUTIL2 = getattr(psutil, 'version_info', ()) >= (2, 0)
+ PSUTIL2 = getattr(psutil, "version_info", ()) >= (2, 0)
except ImportError:
HAS_PSUTIL = False
# pylint: enable=import-error
@@ -33,7 +35,10 @@ except ImportError:
def __virtual__():
if not HAS_PSUTIL:
- return False, 'The ps module cannot be loaded: python module psutil not installed.'
+ return (
+ False,
+ "The ps module cannot be loaded: python module psutil not installed.",
+ )
# Functions and attributes used in this execution module seem to have been
# added as of psutil 0.3.0, from an inspection of the source code. Only
@@ -44,15 +49,20 @@ def __virtual__():
# as of Dec. 2013 EPEL is on 0.6.1, Debian 7 is on 0.5.1, etc.).
if psutil.version_info >= (0, 3, 0):
return True
- return (False, 'The ps execution module cannot be loaded: the psutil python module version {0} is less than 0.3.0'.format(psutil.version_info))
+ return (
+ False,
+ "The ps execution module cannot be loaded: the psutil python module version {0} is less than 0.3.0".format(
+ psutil.version_info
+ ),
+ )
def _get_proc_cmdline(proc):
- '''
+ """
Returns the cmdline of a Process instance.
It's backward compatible with < 2.0 versions of psutil.
- '''
+ """
try:
return salt.utils.data.decode(proc.cmdline() if PSUTIL2 else proc.cmdline)
except (psutil.NoSuchProcess, psutil.AccessDenied):
@@ -60,23 +70,25 @@ def _get_proc_cmdline(proc):
def _get_proc_create_time(proc):
- '''
+ """
Returns the create_time of a Process instance.
It's backward compatible with < 2.0 versions of psutil.
- '''
+ """
try:
- return salt.utils.data.decode(proc.create_time() if PSUTIL2 else proc.create_time)
+ return salt.utils.data.decode(
+ proc.create_time() if PSUTIL2 else proc.create_time
+ )
except (psutil.NoSuchProcess, psutil.AccessDenied):
return None
def _get_proc_name(proc):
- '''
+ """
Returns the name of a Process instance.
It's backward compatible with < 2.0 versions of psutil.
- '''
+ """
try:
return salt.utils.data.decode(proc.name() if PSUTIL2 else proc.name)
except (psutil.NoSuchProcess, psutil.AccessDenied):
@@ -84,11 +96,11 @@ def _get_proc_name(proc):
def _get_proc_status(proc):
- '''
+ """
Returns the status of a Process instance.
It's backward compatible with < 2.0 versions of psutil.
- '''
+ """
try:
return salt.utils.data.decode(proc.status() if PSUTIL2 else proc.status)
except (psutil.NoSuchProcess, psutil.AccessDenied):
@@ -96,11 +108,11 @@ def _get_proc_status(proc):
def _get_proc_username(proc):
- '''
+ """
Returns the username of a Process instance.
It's backward compatible with < 2.0 versions of psutil.
- '''
+ """
try:
return salt.utils.data.decode(proc.username() if PSUTIL2 else proc.username)
except (psutil.NoSuchProcess, psutil.AccessDenied, KeyError):
@@ -108,16 +120,16 @@ def _get_proc_username(proc):
def _get_proc_pid(proc):
- '''
+ """
Returns the pid of a Process instance.
It's backward compatible with < 2.0 versions of psutil.
- '''
+ """
return proc.pid
def top(num_processes=5, interval=3):
- '''
+ """
Return a list of top CPU consuming processes during the interval.
num_processes = return the top N CPU consuming processes
interval = the number of seconds to sample CPU usage over
@@ -129,57 +141,63 @@ def top(num_processes=5, interval=3):
salt '*' ps.top
salt '*' ps.top 5 10
- '''
+ """
result = []
start_usage = {}
for pid in psutil.pids():
try:
process = psutil.Process(pid)
- user, system = process.cpu_times()
- except ValueError:
- user, system, _, _ = process.cpu_times()
except psutil.NoSuchProcess:
continue
+ else:
+ try:
+ user, system = process.cpu_times()[:2]
+ except psutil.ZombieProcess:
+ user = system = 0.0
start_usage[process] = user + system
time.sleep(interval)
usage = set()
for process, start in six.iteritems(start_usage):
try:
- user, system = process.cpu_times()
- except ValueError:
- user, system, _, _ = process.cpu_times()
+ user, system = process.cpu_times()[:2]
except psutil.NoSuchProcess:
continue
now = user + system
diff = now - start
usage.add((diff, process))
- for idx, (diff, process) in enumerate(reversed(sorted(usage))):
- if num_processes and idx >= num_processes:
- break
- if len(_get_proc_cmdline(process)) == 0:
- cmdline = _get_proc_name(process)
- else:
- cmdline = _get_proc_cmdline(process)
- info = {'cmd': cmdline,
- 'user': _get_proc_username(process),
- 'status': _get_proc_status(process),
- 'pid': _get_proc_pid(process),
- 'create_time': _get_proc_create_time(process),
- 'cpu': {},
- 'mem': {},
+ for diff, process in sorted(usage, key=lambda x: x[0], reverse=True):
+ info = {
+ "cmd": _get_proc_cmdline(process) or _get_proc_name(process),
+ "user": _get_proc_username(process),
+ "status": _get_proc_status(process),
+ "pid": _get_proc_pid(process),
+ "create_time": _get_proc_create_time(process),
+ "cpu": {},
+ "mem": {},
}
- for key, value in six.iteritems(process.cpu_times()._asdict()):
- info['cpu'][key] = value
- for key, value in six.iteritems(process.memory_info()._asdict()):
- info['mem'][key] = value
+ try:
+ for key, value in six.iteritems(process.cpu_times()._asdict()):
+ info["cpu"][key] = value
+ for key, value in six.iteritems(process.memory_info()._asdict()):
+ info["mem"][key] = value
+ except psutil.NoSuchProcess:
+ # Process ended since psutil.pids() was run earlier in this
+ # function. Ignore this process and do not include this process in
+ # the return data.
+ continue
+
result.append(info)
+ # Stop gathering process info since we've reached the desired number
+ if len(result) >= num_processes:
+ break
+
return result
def get_pid_list():
- '''
+ """
Return a list of process ids (PIDs) for all running processes.
CLI Example:
@@ -187,12 +205,12 @@ def get_pid_list():
.. code-block:: bash
salt '*' ps.get_pid_list
- '''
+ """
return psutil.pids()
def proc_info(pid, attrs=None):
- '''
+ """
Return a dictionary of information for a process id (PID).
CLI Example:
@@ -209,7 +227,7 @@ def proc_info(pid, attrs=None):
Optional list of desired process attributes. The list of possible
attributes can be found here:
http://pythonhosted.org/psutil/#psutil.Process
- '''
+ """
try:
proc = psutil.Process(pid)
return proc.as_dict(attrs)
@@ -218,7 +236,7 @@ def proc_info(pid, attrs=None):
def kill_pid(pid, signal=15):
- '''
+ """
Kill a process by PID.
.. code-block:: bash
@@ -239,7 +257,7 @@ def kill_pid(pid, signal=15):
.. code-block:: bash
salt 'minion' ps.kill_pid 2000 signal=9
- '''
+ """
try:
psutil.Process(pid).send_signal(signal)
return True
@@ -248,7 +266,7 @@ def kill_pid(pid, signal=15):
def pkill(pattern, user=None, signal=15, full=False):
- '''
+ """
Kill processes matching a pattern.
.. code-block:: bash
@@ -283,12 +301,15 @@ def pkill(pattern, user=None, signal=15, full=False):
.. code-block:: bash
salt '*' ps.pkill bash signal=9 user=tom
- '''
+ """
killed = []
for proc in psutil.process_iter():
- name_match = pattern in ' '.join(_get_proc_cmdline(proc)) if full \
+ name_match = (
+ pattern in " ".join(_get_proc_cmdline(proc))
+ if full
else pattern in _get_proc_name(proc)
+ )
user_match = True if user is None else user == _get_proc_username(proc)
if name_match and user_match:
try:
@@ -299,11 +320,11 @@ def pkill(pattern, user=None, signal=15, full=False):
if not killed:
return None
else:
- return {'killed': killed}
+ return {"killed": killed}
-def pgrep(pattern, user=None, full=False):
- '''
+def pgrep(pattern, user=None, full=False, pattern_is_regex=False):
+ """
Return the pids for processes matching a pattern.
If full is true, the full command line is searched for a match,
@@ -323,6 +344,12 @@ def pgrep(pattern, user=None, full=False):
A boolean value indicating whether only the name of the command or
the full command line should be matched against the pattern.
+ pattern_is_regex
+ This flag enables ps.pgrep to mirror the regex search functionality
+ found in the pgrep command line utility.
+
+ .. versionadded:: 3001
+
**Examples:**
Find all httpd processes on all 'www' minions:
@@ -336,20 +363,34 @@ def pgrep(pattern, user=None, full=False):
.. code-block:: bash
salt '*' ps.pgrep bash user=tom
- '''
+ """
procs = []
+
+ if pattern_is_regex:
+ pattern = re.compile(str(pattern))
+
for proc in psutil.process_iter():
- name_match = pattern in ' '.join(_get_proc_cmdline(proc)) if full \
- else pattern in _get_proc_name(proc)
+ if full:
+ process_line = " ".join(_get_proc_cmdline(proc))
+ else:
+ process_line = _get_proc_name(proc)
+
+ if pattern_is_regex:
+ name_match = re.search(pattern, process_line)
+ else:
+ name_match = pattern in process_line
+
user_match = True if user is None else user == _get_proc_username(proc)
+
if name_match and user_match:
procs.append(_get_proc_pid(proc))
+
return procs or None
def cpu_percent(interval=0.1, per_cpu=False):
- '''
+ """
Return the percent of time the CPU is busy.
interval
@@ -363,7 +404,7 @@ def cpu_percent(interval=0.1, per_cpu=False):
.. code-block:: bash
salt '*' ps.cpu_percent
- '''
+ """
if per_cpu:
result = list(psutil.cpu_percent(interval, True))
else:
@@ -372,7 +413,7 @@ def cpu_percent(interval=0.1, per_cpu=False):
def cpu_times(per_cpu=False):
- '''
+ """
Return the percent of time the CPU spends in each state,
e.g. user, system, idle, nice, iowait, irq, softirq.
@@ -385,7 +426,7 @@ def cpu_times(per_cpu=False):
.. code-block:: bash
salt '*' ps.cpu_times
- '''
+ """
if per_cpu:
result = [dict(times._asdict()) for times in psutil.cpu_times(True)]
else:
@@ -394,7 +435,7 @@ def cpu_times(per_cpu=False):
def virtual_memory():
- '''
+ """
.. versionadded:: 2014.7.0
Return a dict that describes statistics about system memory usage.
@@ -408,15 +449,15 @@ def virtual_memory():
.. code-block:: bash
salt '*' ps.virtual_memory
- '''
+ """
if psutil.version_info < (0, 6, 0):
- msg = 'virtual_memory is only available in psutil 0.6.0 or greater'
+ msg = "virtual_memory is only available in psutil 0.6.0 or greater"
raise CommandExecutionError(msg)
return dict(psutil.virtual_memory()._asdict())
def swap_memory():
- '''
+ """
.. versionadded:: 2014.7.0
Return a dict that describes swap memory statistics.
@@ -430,15 +471,15 @@ def swap_memory():
.. code-block:: bash
salt '*' ps.swap_memory
- '''
+ """
if psutil.version_info < (0, 6, 0):
- msg = 'swap_memory is only available in psutil 0.6.0 or greater'
+ msg = "swap_memory is only available in psutil 0.6.0 or greater"
raise CommandExecutionError(msg)
return dict(psutil.swap_memory()._asdict())
def disk_partitions(all=False):
- '''
+ """
Return a list of disk partitions and their device, mount point, and
filesystem type.
@@ -451,14 +492,13 @@ def disk_partitions(all=False):
.. code-block:: bash
salt '*' ps.disk_partitions
- '''
- result = [dict(partition._asdict()) for partition in
- psutil.disk_partitions(all)]
+ """
+ result = [dict(partition._asdict()) for partition in psutil.disk_partitions(all)]
return result
def disk_usage(path):
- '''
+ """
Given a path, return a dict listing the total available space as well as
the free space, and used space.
@@ -467,12 +507,12 @@ def disk_usage(path):
.. code-block:: bash
salt '*' ps.disk_usage /home
- '''
+ """
return dict(psutil.disk_usage(path)._asdict())
def disk_partition_usage(all=False):
- '''
+ """
Return a list of disk partitions plus the mount point, filesystem and usage
statistics.
@@ -481,15 +521,15 @@ def disk_partition_usage(all=False):
.. code-block:: bash
salt '*' ps.disk_partition_usage
- '''
+ """
result = disk_partitions(all)
for partition in result:
- partition.update(disk_usage(partition['mountpoint']))
+ partition.update(disk_usage(partition["mountpoint"]))
return result
def total_physical_memory():
- '''
+ """
Return the total number of bytes of physical memory.
CLI Example:
@@ -497,9 +537,9 @@ def total_physical_memory():
.. code-block:: bash
salt '*' ps.total_physical_memory
- '''
+ """
if psutil.version_info < (0, 6, 0):
- msg = 'virtual_memory is only available in psutil 0.6.0 or greater'
+ msg = "virtual_memory is only available in psutil 0.6.0 or greater"
raise CommandExecutionError(msg)
try:
return psutil.virtual_memory().total
@@ -510,7 +550,7 @@ def total_physical_memory():
def num_cpus():
- '''
+ """
Return the number of CPUs.
CLI Example:
@@ -518,7 +558,7 @@ def num_cpus():
.. code-block:: bash
salt '*' ps.num_cpus
- '''
+ """
try:
return psutil.cpu_count()
except AttributeError:
@@ -528,7 +568,7 @@ def num_cpus():
def boot_time(time_format=None):
- '''
+ """
Return the boot time in number of seconds since the epoch began.
CLI Example:
@@ -545,7 +585,7 @@ def boot_time(time_format=None):
.. code-block:: bash
salt '*' ps.boot_time
- '''
+ """
try:
b_time = int(psutil.boot_time())
except AttributeError:
@@ -558,12 +598,12 @@ def boot_time(time_format=None):
try:
return b_time.strftime(time_format)
except TypeError as exc:
- raise SaltInvocationError('Invalid format string: {0}'.format(exc))
+ raise SaltInvocationError("Invalid format string: {0}".format(exc))
return b_time
def network_io_counters(interface=None):
- '''
+ """
Return network I/O statistics.
CLI Example:
@@ -573,7 +613,7 @@ def network_io_counters(interface=None):
salt '*' ps.network_io_counters
salt '*' ps.network_io_counters interface=eth0
- '''
+ """
if not interface:
return dict(psutil.net_io_counters()._asdict())
else:
@@ -585,7 +625,7 @@ def network_io_counters(interface=None):
def disk_io_counters(device=None):
- '''
+ """
Return disk I/O statistics.
CLI Example:
@@ -595,7 +635,7 @@ def disk_io_counters(device=None):
salt '*' ps.disk_io_counters
salt '*' ps.disk_io_counters device=sda1
- '''
+ """
if not device:
return dict(psutil.disk_io_counters()._asdict())
else:
@@ -607,7 +647,7 @@ def disk_io_counters(device=None):
def get_users():
- '''
+ """
Return logged-in users.
CLI Example:
@@ -615,7 +655,7 @@ def get_users():
.. code-block:: bash
salt '*' ps.get_users
- '''
+ """
try:
recs = psutil.users()
return [dict(x._asdict()) for x in recs]
@@ -634,14 +674,20 @@ def get_users():
started = rec[8]
if isinstance(started, tuple):
started = started[0]
- result.append({'name': rec[4], 'terminal': rec[2],
- 'started': started, 'host': rec[5]})
+ result.append(
+ {
+ "name": rec[4],
+ "terminal": rec[2],
+ "started": started,
+ "host": rec[5],
+ }
+ )
except ImportError:
return False
def lsof(name):
- '''
+ """
Retrieve the lsof information of the given process name.
CLI Example:
@@ -649,17 +695,17 @@ def lsof(name):
.. code-block:: bash
salt '*' ps.lsof apache2
- '''
+ """
sanitize_name = six.text_type(name)
- lsof_infos = __salt__['cmd.run']("lsof -c " + sanitize_name)
+ lsof_infos = __salt__["cmd.run"]("lsof -c " + sanitize_name)
ret = []
ret.extend([sanitize_name, lsof_infos])
return ret
-@salt.utils.decorators.path.which('netstat')
+@salt.utils.decorators.path.which("netstat")
def netstat(name):
- '''
+ """
Retrieve the netstat information of the given process name.
CLI Example:
@@ -667,9 +713,9 @@ def netstat(name):
.. code-block:: bash
salt '*' ps.netstat apache2
- '''
+ """
sanitize_name = six.text_type(name)
- netstat_infos = __salt__['cmd.run']("netstat -nap")
+ netstat_infos = __salt__["cmd.run"]("netstat -nap")
found_infos = []
ret = []
for info in netstat_infos.splitlines():
@@ -679,9 +725,9 @@ def netstat(name):
return ret
-@salt.utils.decorators.path.which('ss')
+@salt.utils.decorators.path.which("ss")
def ss(name):
- '''
+ """
Retrieve the ss information of the given process name.
CLI Example:
@@ -692,9 +738,9 @@ def ss(name):
.. versionadded:: 2016.11.6
- '''
+ """
sanitize_name = six.text_type(name)
- ss_infos = __salt__['cmd.run']("ss -neap")
+ ss_infos = __salt__["cmd.run"]("ss -neap")
found_infos = []
ret = []
for info in ss_infos.splitlines():
@@ -705,7 +751,7 @@ def ss(name):
def psaux(name):
- '''
+ """
Retrieve information corresponding to a "ps aux" filtered
with the given pattern. It could be just a name or a regular
expression (using python search from "re" module).
@@ -715,11 +761,11 @@ def psaux(name):
.. code-block:: bash
salt '*' ps.psaux www-data.+apache2
- '''
+ """
sanitize_name = six.text_type(name)
pattern = re.compile(sanitize_name)
salt_exception_pattern = re.compile("salt.+ps.psaux.+")
- ps_aux = __salt__['cmd.run']("ps aux")
+ ps_aux = __salt__["cmd.run"]("ps aux")
found_infos = []
ret = []
nb_lines = 0
--
2.29.1

View File

@ -1,4 +1,4 @@
From e0ca0d0d2a62f18e2712223e130af5faa8e0fe05 Mon Sep 17 00:00:00 2001
From b4f54187ae7d231250f72244ffd874cc2c846150 Mon Sep 17 00:00:00 2001
From: Jochen Breuer <jbreuer@suse.de>
Date: Thu, 28 Nov 2019 15:23:36 +0100
Subject: [PATCH] Fix for log checking in x509 test
@ -9,10 +9,10 @@ We are logging in debug and not in trace mode here.
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/tests/unit/modules/test_x509.py b/tests/unit/modules/test_x509.py
index 624a927bec..976af634c7 100644
index 40aea12272..e7503395eb 100644
--- a/tests/unit/modules/test_x509.py
+++ b/tests/unit/modules/test_x509.py
@@ -68,9 +68,9 @@ class X509TestCase(TestCase, LoaderModuleMockMixin):
@@ -127,9 +127,9 @@ class X509TestCase(TestCase, LoaderModuleMockMixin):
subj = FakeSubject()
x509._parse_subject(subj)
@ -23,9 +23,9 @@ index 624a927bec..976af634c7 100644
+ assert x509.log.debug.call_args[0][1] == list(subj.nid.keys())[0]
+ assert isinstance(x509.log.debug.call_args[0][2], TypeError)
@skipIf(not HAS_M2CRYPTO, 'Skipping, M2Crypto is unavailble')
@skipIf(not HAS_M2CRYPTO, "Skipping, M2Crypto is unavailable")
def test_get_pem_entry(self):
--
2.16.4
2.29.2

View File

@ -1,113 +0,0 @@
From 0c988e1db59a255b2f707c4e626cec21ff06d7a3 Mon Sep 17 00:00:00 2001
From: Jochen Breuer <jbreuer@suse.de>
Date: Thu, 9 Apr 2020 17:12:54 +0200
Subject: [PATCH] Fix for return value ret vs return in batch mode
The least intrusive fix for ret vs return in batch mode.
---
salt/cli/batch.py | 16 ++++++----
tests/unit/cli/test_batch.py | 62 ++++++++++++++++++++++++++++++++++++
2 files changed, 71 insertions(+), 7 deletions(-)
diff --git a/salt/cli/batch.py b/salt/cli/batch.py
index 10fc81a5f4..d5b8754ad7 100644
--- a/salt/cli/batch.py
+++ b/salt/cli/batch.py
@@ -234,14 +234,16 @@ class Batch(object):
if not self.quiet:
salt.utils.stringutils.print_cli('\nExecuting run on {0}\n'.format(sorted(next_)))
# create a new iterator for this batch of minions
+ return_value = self.opts.get("return", self.opts.get("ret", ""))
new_iter = self.local.cmd_iter_no_block(
- *args,
- raw=self.opts.get('raw', False),
- ret=self.opts.get('return', ''),
- show_jid=show_jid,
- verbose=show_verbose,
- gather_job_timeout=self.opts['gather_job_timeout'],
- **self.eauth)
+ *args,
+ raw=self.opts.get("raw", False),
+ ret=return_value,
+ show_jid=show_jid,
+ verbose=show_verbose,
+ gather_job_timeout=self.opts["gather_job_timeout"],
+ **self.eauth
+ )
# add it to our iterators and to the minion_tracker
iters.append(new_iter)
minion_tracker[new_iter] = {}
diff --git a/tests/unit/cli/test_batch.py b/tests/unit/cli/test_batch.py
index acabbe51f5..d7411e8039 100644
--- a/tests/unit/cli/test_batch.py
+++ b/tests/unit/cli/test_batch.py
@@ -72,3 +72,65 @@ class BatchTestCase(TestCase):
'''
ret = Batch.get_bnum(self.batch)
self.assertEqual(ret, None)
+
+ def test_return_value_in_run_for_ret(self):
+ """
+ cmd_iter_no_block should have been called with a return no matter if
+ the return value was in ret or return.
+ """
+ self.batch.opts = {
+ "batch": "100%",
+ "timeout": 5,
+ "fun": "test",
+ "arg": "foo",
+ "gather_job_timeout": 5,
+ "ret": "my_return",
+ }
+ self.batch.minions = ["foo", "bar", "baz"]
+ self.batch.local.cmd_iter_no_block = MagicMock(return_value=iter([]))
+ ret = Batch.run(self.batch)
+ # We need to fetch at least one object to trigger the relevant code path.
+ x = next(ret)
+ self.batch.local.cmd_iter_no_block.assert_called_with(
+ ["baz", "bar", "foo"],
+ "test",
+ "foo",
+ 5,
+ "list",
+ raw=False,
+ ret="my_return",
+ show_jid=False,
+ verbose=False,
+ gather_job_timeout=5,
+ )
+
+ def test_return_value_in_run_for_return(self):
+ """
+ cmd_iter_no_block should have been called with a return no matter if
+ the return value was in ret or return.
+ """
+ self.batch.opts = {
+ "batch": "100%",
+ "timeout": 5,
+ "fun": "test",
+ "arg": "foo",
+ "gather_job_timeout": 5,
+ "return": "my_return",
+ }
+ self.batch.minions = ["foo", "bar", "baz"]
+ self.batch.local.cmd_iter_no_block = MagicMock(return_value=iter([]))
+ ret = Batch.run(self.batch)
+ # We need to fetch at least one object to trigger the relevant code path.
+ x = next(ret)
+ self.batch.local.cmd_iter_no_block.assert_called_with(
+ ["baz", "bar", "foo"],
+ "test",
+ "foo",
+ 5,
+ "list",
+ raw=False,
+ ret="my_return",
+ show_jid=False,
+ verbose=False,
+ gather_job_timeout=5,
+ )
--
2.26.1

View File

@ -1,4 +1,4 @@
From 16d656744d2e7d915757d6f2ae26b57ad8230b0b Mon Sep 17 00:00:00 2001
From 369a732537937dd6865152a87f04777539b27fcd Mon Sep 17 00:00:00 2001
From: Jochen Breuer <jbreuer@suse.de>
Date: Thu, 6 Sep 2018 17:15:18 +0200
Subject: [PATCH] Fix for SUSE Expanded Support detection
@ -14,26 +14,26 @@ This change also adds a check for redhat-release and then marks the
1 file changed, 9 insertions(+)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 9b244def9c..2851809472 100644
index 436c058eb6..00bd0565bf 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1892,6 +1892,15 @@ def os_data():
log.trace('Parsing distrib info from /etc/centos-release')
@@ -1990,6 +1990,15 @@ def os_data():
log.trace("Parsing distrib info from /etc/centos-release")
# CentOS Linux
grains['lsb_distrib_id'] = 'CentOS'
grains["lsb_distrib_id"] = "CentOS"
+ # Maybe CentOS Linux; could also be SUSE Expanded Support.
+ # SUSE ES has both, centos-release and redhat-release.
+ if os.path.isfile('/etc/redhat-release'):
+ with salt.utils.files.fopen('/etc/redhat-release') as ifile:
+ if os.path.isfile("/etc/redhat-release"):
+ with salt.utils.files.fopen("/etc/redhat-release") as ifile:
+ for line in ifile:
+ if "red hat enterprise linux server" in line.lower():
+ # This is a SUSE Expanded Support Rhel installation
+ grains['lsb_distrib_id'] = 'RedHat'
+ grains["lsb_distrib_id"] = "RedHat"
+ break
with salt.utils.files.fopen('/etc/centos-release') as ifile:
with salt.utils.files.fopen("/etc/centos-release") as ifile:
for line in ifile:
# Need to pull out the version and codename
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From dd01a0fc594f024eee2267bed2f698f5a6c729bf Mon Sep 17 00:00:00 2001
From 33766e59bd53fac2c75e6ccfa1f363e2f7b1b65f Mon Sep 17 00:00:00 2001
From: Jochen Breuer <jbreuer@suse.de>
Date: Mon, 16 Mar 2020 15:25:42 +0100
Subject: [PATCH] Fix for temp folder definition in loader unit test
@ -8,13 +8,13 @@ Subject: [PATCH] Fix for temp folder definition in loader unit test
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/tests/unit/test_loader.py b/tests/unit/test_loader.py
index fe11cd0681..7e369f2c3b 100644
index 863e2182b9..5b23ad83e3 100644
--- a/tests/unit/test_loader.py
+++ b/tests/unit/test_loader.py
@@ -152,12 +152,12 @@ class LazyLoaderUtilsTest(TestCase):
@@ -240,12 +240,12 @@ class LazyLoaderUtilsTest(TestCase):
def setUpClass(cls):
cls.opts = salt.config.minion_config(None)
cls.opts['grains'] = salt.loader.grains(cls.opts)
cls.opts["grains"] = salt.loader.grains(cls.opts)
- if not os.path.isdir(TMP):
- os.makedirs(TMP)
+ if not os.path.isdir(RUNTIME_VARS.TMP):
@ -24,19 +24,19 @@ index fe11cd0681..7e369f2c3b 100644
# Setup the module
- self.module_dir = tempfile.mkdtemp(dir=TMP)
+ self.module_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
self.module_file = os.path.join(self.module_dir,
'{}.py'.format(self.module_name))
with salt.utils.files.fopen(self.module_file, 'w') as fh:
@@ -165,7 +165,7 @@ class LazyLoaderUtilsTest(TestCase):
self.module_file = os.path.join(
self.module_dir, "{}.py".format(self.module_name)
)
@@ -254,7 +254,7 @@ class LazyLoaderUtilsTest(TestCase):
fh.flush()
os.fsync(fh.fileno())
- self.utils_dir = tempfile.mkdtemp(dir=TMP)
+ self.utils_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
self.utils_file = os.path.join(self.utils_dir,
'{}.py'.format(self.utils_name))
with salt.utils.files.fopen(self.utils_file, 'w') as fh:
self.utils_file = os.path.join(self.utils_dir, "{}.py".format(self.utils_name))
with salt.utils.files.fopen(self.utils_file, "w") as fh:
fh.write(salt.utils.stringutils.to_str(loader_template_utils))
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 900d63bc5e85496e16373025457561b405f2329f Mon Sep 17 00:00:00 2001
From f5c9527aeee190a66a908037770c80a75e911d8c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 6 Nov 2018 16:38:54 +0000
@ -11,37 +11,42 @@ Test git ext_pillar across multiple repos using __env__
Remove unicode references
---
tests/integration/pillar/test_git_pillar.py | 45 +++++++++++++++++++++++++++++
1 file changed, 45 insertions(+)
tests/integration/pillar/test_git_pillar.py | 55 +++++++++++++++++++++
1 file changed, 55 insertions(+)
diff --git a/tests/integration/pillar/test_git_pillar.py b/tests/integration/pillar/test_git_pillar.py
index 2e549f3948..d417a7ebc3 100644
index c0362127f6..979dfebb94 100644
--- a/tests/integration/pillar/test_git_pillar.py
+++ b/tests/integration/pillar/test_git_pillar.py
@@ -1382,6 +1382,51 @@ class TestPygit2SSH(GitPillarSSHTestBase):
'nested_dict': {'master': True}}}
@@ -1600,6 +1600,61 @@ class TestPygit2SSH(GitPillarSSHTestBase):
},
)
+
+@skipIf(NO_MOCK, NO_MOCK_REASON)
+@skipIf(_windows_or_mac(), 'minion is windows or mac')
+@skipIf(_windows_or_mac(), "minion is windows or mac")
+@skip_if_not_root
+@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} and libgit2 >= {1} required'.format(PYGIT2_MINVER, LIBGIT2_MINVER))
+@skipIf(not HAS_NGINX, 'nginx not present')
+@skipIf(not HAS_VIRTUALENV, 'virtualenv not present')
+@skipIf(
+ not HAS_PYGIT2,
+ "pygit2 >= {} and libgit2 >= {} required".format(PYGIT2_MINVER, LIBGIT2_MINVER),
+)
+@skipIf(not HAS_NGINX, "nginx not present")
+@skipIf(not HAS_VIRTUALENV, "virtualenv not present")
+class TestPygit2HTTP(GitPillarHTTPTestBase):
+ '''
+ """
+ Test git_pillar with pygit2 using SSH authentication
+ '''
+ """
+
+ def test_single_source(self):
+ '''
+ """
+ Test with git_pillar_includes enabled and using "__env__" as the branch
+ name for the configured repositories.
+ The "gitinfo" repository contains top.sls file with a local reference
+ and also referencing external "nowhere.foo" which is provided by "webinfo"
+ repository mounted as "nowhere".
+ '''
+ ret = self.get_pillar('''\
+ """
+ ret = self.get_pillar(
+ """\
+ file_ignore_regex: []
+ file_ignore_glob: []
+ git_pillar_provider: pygit2
@ -56,21 +61,26 @@ index 2e549f3948..d417a7ebc3 100644
+ - __env__ {url}:
+ - name: webinfo
+ - mountpoint: nowhere
+ ''')
+ """
+ )
+ self.assertEqual(
+ ret,
+ {'branch': 'master',
+ 'motd': 'The force will be with you. Always.',
+ 'mylist': ['master'],
+ 'mydict': {'master': True,
+ 'nested_list': ['master'],
+ 'nested_dict': {'master': True}}}
+ {
+ "branch": "master",
+ "motd": "The force will be with you. Always.",
+ "mylist": ["master"],
+ "mydict": {
+ "master": True,
+ "nested_list": ["master"],
+ "nested_dict": {"master": True},
+ },
+ },
+ )
+
@requires_system_grains
def test_root_parameter(self, grains):
'''
@slowTest
def test_root_parameter(self):
"""
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 4998996a08db72a1b925b2c3f725c4fba4fe9622 Mon Sep 17 00:00:00 2001
From e2ff2f339ce7938ecdadf867f285a559bc2431dd Mon Sep 17 00:00:00 2001
From: Dominik Gedon <dgedon@suse.de>
Date: Tue, 6 Oct 2020 14:00:55 +0200
Subject: [PATCH] Fix grains.test_core unit test (#277)
@ -6,41 +6,38 @@ Subject: [PATCH] Fix grains.test_core unit test (#277)
This reverts 63b94ae and fixes the grains test_core unit test. The
changes are aligned with upstream.
---
tests/unit/grains/test_core.py | 13 ++++++++-----
1 file changed, 8 insertions(+), 5 deletions(-)
tests/unit/grains/test_core.py | 9 ++++-----
1 file changed, 4 insertions(+), 5 deletions(-)
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index 36aa49f232..d3b6515d00 100644
index 918a9155cb..15de4e363e 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -69,10 +69,11 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
def test_parse_etc_os_release(self, path_isfile_mock):
path_isfile_mock.side_effect = lambda x: x == "/usr/lib/os-release"
with salt.utils.files.fopen(os.path.join(OS_RELEASE_DIR, "ubuntu-17.10")) as os_release_file:
@@ -60,11 +60,10 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
with salt.utils.files.fopen(
os.path.join(OS_RELEASE_DIR, "ubuntu-17.10")
) as os_release_file:
- os_release_content = os_release_file.readlines()
- with patch("salt.utils.files.fopen", mock_open()) as os_release_file:
- os_release_file.return_value.__iter__.return_value = os_release_content
- os_release = core._parse_os_release(["/etc/os-release", "/usr/lib/os-release"])
+ os_release_content = os_release_file.read()
+ with patch("salt.utils.files.fopen", mock_open(read_data=os_release_content)):
+ os_release = core._parse_os_release(
os_release = core._parse_os_release(
- ["/etc/os-release", "/usr/lib/os-release"]
+ "/etc/os-release", "/usr/lib/os-release"
+ )
self.assertEqual(os_release, {
"NAME": "Ubuntu",
"VERSION": "17.10 (Artful Aardvark)",
@@ -134,7 +135,9 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
)
self.assertEqual(
os_release,
@@ -174,7 +173,7 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
def test_missing_os_release(self):
with patch('salt.utils.files.fopen', mock_open(read_data={})):
- os_release = core._parse_os_release(['/etc/os-release', '/usr/lib/os-release'])
+ os_release = core._parse_os_release(
with patch("salt.utils.files.fopen", mock_open(read_data={})):
os_release = core._parse_os_release(
- ["/etc/os-release", "/usr/lib/os-release"]
+ "/etc/os-release", "/usr/lib/os-release"
+ )
)
self.assertEqual(os_release, {})
@skipIf(not salt.utils.platform.is_windows(), 'System is not Windows')
--
2.28.0
2.29.2

View File

@ -1,4 +1,4 @@
From 2cb7515f83e2c358b84724e4eb581daa78012fdf Mon Sep 17 00:00:00 2001
From 082bb6a25b2b025a5c7c6fdbf7dbcbe64a39da2c Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 28 Sep 2018 15:22:33 +0200
Subject: [PATCH] Fix IPv6 scope (bsc#1108557)
@ -69,14 +69,14 @@ Lintfix: W0611
Reverse skipping tests: if no ipaddress
---
salt/_compat.py | 74 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++
salt/_compat.py | 74 +++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 74 insertions(+)
diff --git a/salt/_compat.py b/salt/_compat.py
index e999605d2c..965bb90da3 100644
index 011eb8af9e..d9425523cf 100644
--- a/salt/_compat.py
+++ b/salt/_compat.py
@@ -230,7 +230,81 @@ class IPv6InterfaceScoped(ipaddress.IPv6Interface, IPv6AddressScoped):
@@ -242,7 +242,81 @@ class IPv6InterfaceScoped(ipaddress.IPv6Interface, IPv6AddressScoped):
self.hostmask = self.network.hostmask
@ -159,6 +159,6 @@ index e999605d2c..965bb90da3 100644
+ ipaddress.ip_address = ip_address
+ ipaddress.ip_interface = ip_interface
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From bfdd7f946d56d799e89b33f7e3b72426732b0195 Mon Sep 17 00:00:00 2001
From db77ad3e24daf3bc014dc3d85a49aa1bb33ae1ae Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 9 Jan 2019 16:08:19 +0100
Subject: [PATCH] Fix issue #2068 test
@ -13,19 +13,19 @@ Minor update: more correct is-dict check.
1 file changed, 6 insertions(+), 3 deletions(-)
diff --git a/salt/state.py b/salt/state.py
index bc5277554e..2fa5f64ca5 100644
index b1bce4e0cd..cc6db7e1b2 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -25,6 +25,7 @@ import traceback
import re
import time
import random
+import collections
@@ -12,6 +12,7 @@ The data sent to the state calls is as follows:
"""
# Import salt libs
import salt.loader
@@ -2896,16 +2897,18 @@ class State(object):
'''
+import collections
import copy
import datetime
import fnmatch
@@ -3206,16 +3207,18 @@ class State:
"""
for chunk in high:
state = high[chunk]
+ if not isinstance(state, collections.Mapping):
@ -35,18 +35,18 @@ index bc5277554e..2fa5f64ca5 100644
+ if not isinstance(state[state_ref], list):
+ continue
for argset in state[state_ref]:
if isinstance(argset, six.string_types):
if isinstance(argset, str):
needs_default = False
break
if needs_default:
- order = state[state_ref].pop(-1)
- state[state_ref].append('__call__')
- state[state_ref].append("__call__")
- state[state_ref].append(order)
+ state[state_ref].insert(-1, '__call__')
+ state[state_ref].insert(-1, "__call__")
def call_high(self, high, orchestration_jid=None):
'''
"""
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 77d53d9567b7aec045a8fffd29afcb76a8405caf Mon Sep 17 00:00:00 2001
From 00c538383e463febba492e74577ae64be80d4d00 Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Mon, 16 Sep 2019 11:27:30 +0200
Subject: [PATCH] Fix memory leak produced by batch async find_jobs
@ -16,16 +16,16 @@ Multiple fixes:
Co-authored-by: Pablo Suárez Hernández <psuarezhernandez@suse.com>
---
salt/cli/batch_async.py | 60 ++++++++++++++++++++++++++++++++-----------------
salt/cli/batch_async.py | 59 ++++++++++++++++++++++++++++-------------
salt/client/__init__.py | 1 +
salt/master.py | 1 -
salt/master.py | 2 --
3 files changed, 41 insertions(+), 21 deletions(-)
diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py
index 8c8f481e34..8a67331102 100644
index 7225491228..388b709416 100644
--- a/salt/cli/batch_async.py
+++ b/salt/cli/batch_async.py
@@ -72,6 +72,7 @@ class BatchAsync(object):
@@ -73,6 +73,7 @@ class BatchAsync:
self.done_minions = set()
self.active = set()
self.initialized = False
@ -33,55 +33,58 @@ index 8c8f481e34..8a67331102 100644
self.ping_jid = jid_gen()
self.batch_jid = jid_gen()
self.find_job_jid = jid_gen()
@@ -89,14 +90,11 @@ class BatchAsync(object):
@@ -91,14 +92,11 @@ class BatchAsync:
def __set_event_handler(self):
ping_return_pattern = 'salt/job/{0}/ret/*'.format(self.ping_jid)
batch_return_pattern = 'salt/job/{0}/ret/*'.format(self.batch_jid)
- find_job_return_pattern = 'salt/job/{0}/ret/*'.format(self.find_job_jid)
self.event.subscribe(ping_return_pattern, match_type='glob')
self.event.subscribe(batch_return_pattern, match_type='glob')
- self.event.subscribe(find_job_return_pattern, match_type='glob')
ping_return_pattern = "salt/job/{}/ret/*".format(self.ping_jid)
batch_return_pattern = "salt/job/{}/ret/*".format(self.batch_jid)
- find_job_return_pattern = "salt/job/{}/ret/*".format(self.find_job_jid)
self.event.subscribe(ping_return_pattern, match_type="glob")
self.event.subscribe(batch_return_pattern, match_type="glob")
- self.event.subscribe(find_job_return_pattern, match_type="glob")
- self.event.patterns = {
+ self.patterns = {
(ping_return_pattern, 'ping_return'),
(batch_return_pattern, 'batch_run'),
- (find_job_return_pattern, 'find_job_return')
(ping_return_pattern, "ping_return"),
(batch_return_pattern, "batch_run"),
- (find_job_return_pattern, "find_job_return"),
}
self.event.set_event_handler(self.__event_handler)
@@ -104,7 +102,7 @@ class BatchAsync(object):
@@ -106,7 +104,7 @@ class BatchAsync:
if not self.event:
return
mtag, data = self.event.unpack(raw, self.event.serial)
- for (pattern, op) in self.event.patterns:
+ for (pattern, op) in self.patterns:
if fnmatch.fnmatch(mtag, pattern):
minion = data['id']
if op == 'ping_return':
@@ -112,7 +110,8 @@ class BatchAsync(object):
minion = data["id"]
if op == "ping_return":
@@ -114,7 +112,8 @@ class BatchAsync:
if self.targeted_minions == self.minions:
self.event.io_loop.spawn_callback(self.start_batch)
elif op == 'find_job_return':
elif op == "find_job_return":
- self.find_job_returned.add(minion)
+ if data.get("return", None):
+ self.find_job_returned.add(minion)
elif op == 'batch_run':
elif op == "batch_run":
if minion in self.active:
self.active.remove(minion)
@@ -131,31 +130,46 @@ class BatchAsync(object):
@@ -134,7 +133,11 @@ class BatchAsync:
return set(list(to_run)[:next_batch_size])
@tornado.gen.coroutine
- def check_find_job(self, batch_minions):
+ def check_find_job(self, batch_minions, jid):
+ find_job_return_pattern = 'salt/job/{0}/ret/*'.format(jid)
+ self.event.unsubscribe(find_job_return_pattern, match_type='glob')
+ find_job_return_pattern = "salt/job/{}/ret/*".format(jid)
+ self.event.unsubscribe(find_job_return_pattern, match_type="glob")
+ self.patterns.remove((find_job_return_pattern, "find_job_return"))
+
timedout_minions = batch_minions.difference(self.find_job_returned).difference(self.done_minions)
self.timedout_minions = self.timedout_minions.union(timedout_minions)
self.active = self.active.difference(self.timedout_minions)
running = batch_minions.difference(self.done_minions).difference(self.timedout_minions)
timedout_minions = batch_minions.difference(self.find_job_returned).difference(
self.done_minions
)
@@ -143,27 +146,39 @@ class BatchAsync:
running = batch_minions.difference(self.done_minions).difference(
self.timedout_minions
)
+
if timedout_minions:
self.schedule_next()
@ -95,56 +98,59 @@ index 8c8f481e34..8a67331102 100644
- not_done = minions.difference(self.done_minions)
- ping_return = yield self.local.run_job_async(
- not_done,
- 'saltutil.find_job',
- "saltutil.find_job",
- [self.batch_jid],
- 'list',
- gather_job_timeout=self.opts['gather_job_timeout'],
- "list",
- gather_job_timeout=self.opts["gather_job_timeout"],
- jid=self.find_job_jid,
- **self.eauth)
- **self.eauth
- )
- self.event.io_loop.call_later(
- self.opts['gather_job_timeout'],
- self.check_find_job,
- not_done)
+ not_done = minions.difference(self.done_minions).difference(self.timedout_minions)
+
- self.opts["gather_job_timeout"], self.check_find_job, not_done
+ not_done = minions.difference(self.done_minions).difference(
+ self.timedout_minions
)
+ if not_done:
+ jid = self.jid_gen()
+ find_job_return_pattern = 'salt/job/{0}/ret/*'.format(jid)
+ find_job_return_pattern = "salt/job/{}/ret/*".format(jid)
+ self.patterns.add((find_job_return_pattern, "find_job_return"))
+ self.event.subscribe(find_job_return_pattern, match_type='glob')
+ self.event.subscribe(find_job_return_pattern, match_type="glob")
+
+ ret = yield self.local.run_job_async(
+ not_done,
+ 'saltutil.find_job',
+ "saltutil.find_job",
+ [self.batch_jid],
+ 'list',
+ gather_job_timeout=self.opts['gather_job_timeout'],
+ "list",
+ gather_job_timeout=self.opts["gather_job_timeout"],
+ jid=jid,
+ **self.eauth)
+ **self.eauth
+ )
+ self.event.io_loop.call_later(
+ self.opts['gather_job_timeout'],
+ self.check_find_job,
+ not_done,
+ jid)
+ self.opts["gather_job_timeout"], self.check_find_job, not_done, jid
+ )
+
@tornado.gen.coroutine
def start(self):
@@ -203,6 +217,9 @@ class BatchAsync(object):
self.__set_event_handler()
@@ -211,6 +226,9 @@ class BatchAsync:
}
self.event.fire_event(data, "salt/batch/{0}/done".format(self.batch_jid))
self.event.fire_event(data, "salt/batch/{}/done".format(self.batch_jid))
self.event.remove_event_handler(self.__event_handler)
+ for (pattern, label) in self.patterns:
+ if label in ["ping_return", "batch_run"]:
+ self.event.unsubscribe(pattern, match_type='glob')
+ self.event.unsubscribe(pattern, match_type="glob")
def schedule_next(self):
if not self.scheduled:
@@ -226,9 +243,12 @@ class BatchAsync(object):
gather_job_timeout=self.opts['gather_job_timeout'],
@@ -235,11 +253,14 @@ class BatchAsync:
jid=self.batch_jid,
metadata=self.metadata)
metadata=self.metadata,
)
+
self.event.io_loop.call_later(self.opts['timeout'], self.find_job, set(next_batch))
self.event.io_loop.call_later(
self.opts["timeout"], self.find_job, set(next_batch)
)
except Exception as ex:
+ log.error("Error in scheduling next batch: %s", ex)
self.active = self.active.difference(next_batch)
@ -153,30 +159,31 @@ index 8c8f481e34..8a67331102 100644
self.scheduled = False
+ yield
diff --git a/salt/client/__init__.py b/salt/client/__init__.py
index 3bbc7f9de7..a48d79ef8d 100644
index 1e9f11df4c..cc8fd4048d 100644
--- a/salt/client/__init__.py
+++ b/salt/client/__init__.py
@@ -1622,6 +1622,7 @@ class LocalClient(object):
'key': self.key,
'tgt_type': tgt_type,
'ret': ret,
+ 'timeout': timeout,
'jid': jid}
@@ -1776,6 +1776,7 @@ class LocalClient:
"key": self.key,
"tgt_type": tgt_type,
"ret": ret,
+ "timeout": timeout,
"jid": jid,
}
# if kwargs are passed, pack them.
diff --git a/salt/master.py b/salt/master.py
index 5e2277ba76..3abf7ae60b 100644
index b9bc1a7a67..7a99af357a 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -2044,7 +2044,6 @@ class ClearFuncs(object):
@@ -2232,8 +2232,6 @@ class ClearFuncs(TransportMethods):
def publish_batch(self, clear_load, minions, missing):
batch_load = {}
batch_load.update(clear_load)
- import salt.cli.batch_async
-
batch = salt.cli.batch_async.BatchAsync(
self.local.opts,
functools.partial(self._prep_jid, clear_load, {}),
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From f69c1178de003866af412e61e0146597974eec0d Mon Sep 17 00:00:00 2001
From 4123cf7b9428af1442f4aa0a54489e5c0deb4aaa Mon Sep 17 00:00:00 2001
From: Martin Seidl <mseidl@suse.de>
Date: Tue, 27 Oct 2020 16:12:29 +0100
Subject: [PATCH] Fix novendorchange option (#284)
@ -7,39 +7,43 @@ Subject: [PATCH] Fix novendorchange option (#284)
* refactor handling of novendorchange and fix tests
---
salt/modules/zypperpkg.py | 19 ++--
tests/unit/modules/test_zypperpkg.py | 150 ++++++++++++++++++++++++---
2 files changed, 148 insertions(+), 21 deletions(-)
salt/modules/zypperpkg.py | 21 +++---
tests/unit/modules/test_zypperpkg.py | 100 ++++++++++++++++++++++++++-
2 files changed, 108 insertions(+), 13 deletions(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index ad11da4ad1..d84a6af6e0 100644
index 5369a0342e..d06c265202 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -1617,7 +1617,7 @@ def upgrade(refresh=True,
dryrun=False,
dist_upgrade=False,
fromrepo=None,
- novendorchange=False,
+ novendorchange=True,
skip_verify=False,
no_recommends=False,
root=None,
@@ -1701,13 +1701,18 @@ def upgrade(refresh=True,
log.info('Targeting repos: %s', fromrepo)
@@ -1707,7 +1707,7 @@ def upgrade(
dryrun=False,
dist_upgrade=False,
fromrepo=None,
- novendorchange=False,
+ novendorchange=True,
skip_verify=False,
no_recommends=False,
root=None,
@@ -1794,19 +1794,18 @@ def upgrade(
log.info("Targeting repos: %s", fromrepo)
if dist_upgrade:
- if novendorchange:
- # TODO: Grains validation should be moved to Zypper class
- if __grains__['osrelease_info'][0] > 11:
- cmd_update.append('--no-allow-vendor-change')
- log.info('Disabling vendor changes')
- if __grains__["osrelease_info"][0] > 11:
+ # TODO: Grains validation should be moved to Zypper class
+ if __grains__["osrelease_info"][0] > 11:
+ if novendorchange:
+ cmd_update.append("--no-allow-vendor-change")
+ log.info("Disabling vendor changes")
cmd_update.append("--no-allow-vendor-change")
log.info("Disabling vendor changes")
else:
- log.warning('Disabling vendor changes is not supported on this Zypper version')
- log.warning(
- "Disabling vendor changes is not supported on this Zypper version"
- )
-
- if no_recommends:
- cmd_update.append("--no-recommends")
- log.info("Disabling recommendations")
+ cmd_update.append("--allow-vendor-change")
+ log.info("Enabling vendor changes")
+ else:
@ -48,121 +52,26 @@ index ad11da4ad1..d84a6af6e0 100644
+ )
if no_recommends:
cmd_update.append('--no-recommends')
cmd_update.append("--no-recommends")
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index a3d20f66d5..8cc84485b5 100644
index a60b1546c6..eaa4d9a76a 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -480,7 +480,11 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
with patch('salt.modules.zypperpkg.list_pkgs', MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}])):
@@ -642,7 +642,9 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
ret = zypper.upgrade(dist_upgrade=True)
self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}})
- zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses')
+ zypper_mock.assert_any_call(
zypper_mock.assert_any_call(
- "dist-upgrade", "--auto-agree-with-licenses"
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--no-allow-vendor-change",
+ )
)
with patch('salt.modules.zypperpkg.list_pkgs', MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}])):
ret = zypper.upgrade(dist_upgrade=True, dryrun=True)
@@ -488,25 +492,138 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses',
'--dry-run', '--debug-solver')
with patch(
@@ -660,6 +662,76 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
"--debug-solver",
)
- with patch('salt.modules.zypperpkg.list_pkgs', MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}])):
- ret = zypper.upgrade(dist_upgrade=True, dryrun=True,
- fromrepo=["Dummy", "Dummy2"], novendorchange=True)
- zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run',
- '--from', "Dummy", '--from', 'Dummy2', '--no-allow-vendor-change')
- zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--dry-run',
- '--from', "Dummy", '--from', 'Dummy2', '--no-allow-vendor-change',
- '--debug-solver')
-
with patch('salt.modules.zypperpkg.list_pkgs', MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}])):
ret = zypper.upgrade(dist_upgrade=False, fromrepo=["Dummy", "Dummy2"], dryrun=False)
zypper_mock.assert_any_call('update', '--auto-agree-with-licenses', '--repo', "Dummy", '--repo', 'Dummy2')
with patch('salt.modules.zypperpkg.list_pkgs', MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}])):
ret = zypper.upgrade(dist_upgrade=True, fromrepo=["Dummy", "Dummy2"], novendorchange=True)
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--no-allow-vendor-change",
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--no-allow-vendor-change",
+ )
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ novendorchange=False,
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--from",
+ "Dummy",
+ "--from",
+ "Dummy2",
+ "--allow-vendor-change",
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--from",
+ "Dummy",
+ "--from",
+ "Dummy2",
+ "--allow-vendor-change",
+ "--debug-solver",
+ )
+
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ novendorchange=True,
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--from",
+ "Dummy",
+ "--from",
+ "Dummy2",
+ "--no-allow-vendor-change",
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--from",
+ "Dummy",
+ "--from",
+ "Dummy2",
+ "--no-allow-vendor-change",
+ "--debug-solver",
+ )
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
@ -188,9 +97,57 @@ index a3d20f66d5..8cc84485b5 100644
+ fromrepo=["Dummy", "Dummy2"],
+ novendorchange=True,
+ )
self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}})
zypper_mock.assert_any_call('dist-upgrade', '--auto-agree-with-licenses', '--from', "Dummy",
'--from', 'Dummy2', '--no-allow-vendor-change')
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--no-allow-vendor-change",
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--no-allow-vendor-change",
+ )
+
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
+ ):
+ ret = zypper.upgrade(
+ dist_upgrade=True,
+ dryrun=True,
+ fromrepo=["Dummy", "Dummy2"],
+ novendorchange=False,
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--from",
+ "Dummy",
+ "--from",
+ "Dummy2",
+ "--allow-vendor-change",
+ )
+ zypper_mock.assert_any_call(
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--dry-run",
+ "--from",
+ "Dummy",
+ "--from",
+ "Dummy2",
+ "--allow-vendor-change",
+ "--debug-solver",
+ )
+
with patch(
"salt.modules.zypperpkg.list_pkgs",
MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]),
@@ -728,6 +800,26 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
"--no-allow-vendor-change",
)
+ with patch(
+ "salt.modules.zypperpkg.list_pkgs",
@ -211,30 +168,24 @@ index a3d20f66d5..8cc84485b5 100644
+ "Dummy2",
+ "--allow-vendor-change",
+ )
+
def test_upgrade_kernel(self):
'''
"""
Test kernel package upgrade success.
@@ -558,10 +675,15 @@ Repository 'DUMMY' not found by its alias, number, or URI.
with patch('salt.modules.zypperpkg.list_pkgs', MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}])):
with self.assertRaises(CommandExecutionError) as cmd_exc:
ret = zypper.upgrade(dist_upgrade=True, fromrepo=["DUMMY"])
- self.assertEqual(cmd_exc.exception.info['changes'], {})
- self.assertEqual(cmd_exc.exception.info['result']['stdout'], zypper_out)
- zypper_mock.noraise.call.assert_called_with('dist-upgrade', '--auto-agree-with-licenses',
- '--from', 'DUMMY')
+ self.assertEqual(cmd_exc.exception.info["changes"], {})
+ self.assertEqual(cmd_exc.exception.info["result"]["stdout"], zypper_out)
+ zypper_mock.noraise.call.assert_called_with(
@@ -815,7 +907,11 @@ Repository 'DUMMY' not found by its alias, number, or URI.
self.assertEqual(cmd_exc.exception.info["changes"], {})
self.assertEqual(cmd_exc.exception.info["result"]["stdout"], zypper_out)
zypper_mock.noraise.call.assert_called_with(
- "dist-upgrade", "--auto-agree-with-licenses", "--from", "DUMMY"
+ "dist-upgrade",
+ "--auto-agree-with-licenses",
+ "--from",
+ "DUMMY",
+ "--no-allow-vendor-change",
+ )
)
def test_upgrade_available(self):
'''
--
2.28.0
2.29.2

View File

@ -0,0 +1,99 @@
From 435d9fbee299b06e1c58cdc0574b6a1975841879 Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 25 Nov 2020 15:09:41 +0300
Subject: [PATCH] Fix salt.utils.stringutils.to_str calls to make it
working with numeric uid/gid
---
salt/modules/file.py | 16 ++++++++++------
salt/states/file.py | 11 +++++++++--
2 files changed, 19 insertions(+), 8 deletions(-)
diff --git a/salt/modules/file.py b/salt/modules/file.py
index b830b390d3..b9744393d7 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -4970,6 +4970,12 @@ def check_perms(
is_dir = os.path.isdir(name)
is_link = os.path.islink(name)
+ def __safe_to_str(s):
+ try:
+ return salt.utils.stringutils.to_str(s)
+ except:
+ return salt.utils.stringutils.to_str(str(s))
+
# user/group changes if needed, then check if it worked
if user:
if isinstance(user, int):
@@ -4979,7 +4985,7 @@ def check_perms(
and user_to_uid(user) != user_to_uid(perms["luser"])
) or (
not salt.utils.platform.is_windows()
- and salt.utils.stringutils.to_str(user) != perms["luser"]
+ and __safe_to_str(user) != perms["luser"]
):
perms["cuser"] = user
@@ -4991,7 +4997,7 @@ def check_perms(
and group_to_gid(group) != group_to_gid(perms["lgroup"])
) or (
not salt.utils.platform.is_windows()
- and salt.utils.stringutils.to_str(group) != perms["lgroup"]
+ and __safe_to_str(group) != perms["lgroup"]
):
perms["cgroup"] = group
@@ -5023,8 +5029,7 @@ def check_perms(
and user != ""
) or (
not salt.utils.platform.is_windows()
- and salt.utils.stringutils.to_str(user)
- != get_user(name, follow_symlinks=follow_symlinks)
+ and __safe_to_str(user) != get_user(name, follow_symlinks=follow_symlinks)
and user != ""
):
if __opts__["test"] is True:
@@ -5045,8 +5050,7 @@ def check_perms(
and group != ""
) or (
not salt.utils.platform.is_windows()
- and salt.utils.stringutils.to_str(group)
- != get_group(name, follow_symlinks=follow_symlinks)
+ and __safe_to_str(group) != get_group(name, follow_symlinks=follow_symlinks)
and group != ""
):
if __opts__["test"] is True:
diff --git a/salt/states/file.py b/salt/states/file.py
index 89c70eb454..fd8ffde757 100644
--- a/salt/states/file.py
+++ b/salt/states/file.py
@@ -989,15 +989,22 @@ def _check_dir_meta(name, user, group, mode, follow_symlinks=False):
if not stats:
changes["directory"] = "new"
return changes
+
+ def __safe_to_str(s):
+ try:
+ return salt.utils.stringutils.to_str(s)
+ except:
+ return salt.utils.stringutils.to_str(str(s))
+
if (
user is not None
- and salt.utils.stringutils.to_str(user) != stats["user"]
+ and __safe_to_str(user) != stats["user"]
and user != stats.get("uid")
):
changes["user"] = user
if (
group is not None
- and salt.utils.stringutils.to_str(group) != stats["group"]
+ and __safe_to_str(group) != stats["group"]
and group != stats.get("gid")
):
changes["group"] = group
--
2.29.2

View File

@ -1,4 +1,4 @@
From 5b6ac3bb81f24bbb8c39f80c71c490c339cce756 Mon Sep 17 00:00:00 2001
From 01e2e60a5aba609d219b73f1018f12517a294a64 Mon Sep 17 00:00:00 2001
From: Cedric Bosdonnat <cbosdonnat@suse.com>
Date: Tue, 15 Sep 2020 13:46:06 +0200
Subject: [PATCH] Fix the removed six.itermitems and six.*_type* (#262)
@ -11,29 +11,81 @@ on python 2.7.
* fixup! Fix the removed six.itermitems and six.*_type*
---
salt/_compat.py | 1 +
salt/modules/virt.py | 57 +++++++++++-----------
salt/states/virt.py | 15 +++---
salt/utils/data.py | 51 ++++++++++----------
salt/utils/xmlutil.py | 5 +-
salt/_compat.py | 25 ++++++++++++++++---------
salt/modules/virt.py | 11 ++++-------
salt/states/virt.py | 1 +
salt/utils/xmlutil.py | 3 ++-
tests/unit/modules/test_virt.py | 2 +-
tests/unit/utils/test_data.py | 85 ++++++++++++++++++---------------
7 files changed, 115 insertions(+), 101 deletions(-)
5 files changed, 24 insertions(+), 18 deletions(-)
diff --git a/salt/_compat.py b/salt/_compat.py
index 965bb90da3..22daaa31a0 100644
index d9425523cf..de100de3fa 100644
--- a/salt/_compat.py
+++ b/salt/_compat.py
@@ -39,6 +39,7 @@ except Exception: # pylint: disable=broad-except
# True if we are running on Python 3.
PY3 = sys.version_info.major == 3
@@ -7,6 +7,7 @@ Salt compatibility code
import binascii
import logging
import sys
+import xml.sax.saxutils as saxutils
if PY3:
import builtins
from salt.exceptions import SaltException
from salt.ext.six import binary_type, integer_types, string_types, text_type
@@ -261,21 +262,25 @@ def ip_address(address):
try:
return ipaddress.IPv4Address(address)
except (ipaddress.AddressValueError, ipaddress.NetmaskValueError) as err:
- log.debug('Error while parsing IPv4 address: %s', address)
+ log.debug("Error while parsing IPv4 address: %s", address)
log.debug(err)
try:
return IPv6AddressScoped(address)
except (ipaddress.AddressValueError, ipaddress.NetmaskValueError) as err:
- log.debug('Error while parsing IPv6 address: %s', address)
+ log.debug("Error while parsing IPv6 address: %s", address)
log.debug(err)
if isinstance(address, bytes):
- raise ipaddress.AddressValueError('{} does not appear to be an IPv4 or IPv6 address. '
- 'Did you pass in a bytes (str in Python 2) instead '
- 'of a unicode object?'.format(repr(address)))
+ raise ipaddress.AddressValueError(
+ "{} does not appear to be an IPv4 or IPv6 address. "
+ "Did you pass in a bytes (str in Python 2) instead "
+ "of a unicode object?".format(repr(address))
+ )
- raise ValueError('{} does not appear to be an IPv4 or IPv6 address'.format(repr(address)))
+ raise ValueError(
+ "{} does not appear to be an IPv4 or IPv6 address".format(repr(address))
+ )
def ip_interface(address):
@@ -302,16 +307,18 @@ def ip_interface(address):
try:
return ipaddress.IPv4Interface(address)
except (ipaddress.AddressValueError, ipaddress.NetmaskValueError) as err:
- log.debug('Error while getting IPv4 interface for address %s', address)
+ log.debug("Error while getting IPv4 interface for address %s", address)
log.debug(err)
try:
return ipaddress.IPv6Interface(address)
except (ipaddress.AddressValueError, ipaddress.NetmaskValueError) as err:
- log.debug('Error while getting IPv6 interface for address %s', address)
+ log.debug("Error while getting IPv6 interface for address %s", address)
log.debug(err)
- raise ValueError('{} does not appear to be an IPv4 or IPv6 interface'.format(address))
+ raise ValueError(
+ "{} does not appear to be an IPv4 or IPv6 interface".format(address)
+ )
if ipaddress:
diff --git a/salt/modules/virt.py b/salt/modules/virt.py
index cd80fbe608..c07fabb406 100644
index ec40f08359..c042738370 100644
--- a/salt/modules/virt.py
+++ b/salt/modules/virt.py
@@ -88,8 +88,6 @@ import string # pylint: disable=deprecated-module
@ -43,87 +95,20 @@ index cd80fbe608..c07fabb406 100644
-from xml.etree import ElementTree
-from xml.sax import saxutils
# Import third party libs
import jinja2.exceptions
@@ -104,7 +102,10 @@ import salt.utils.templates
import salt.utils.files
@@ -99,8 +97,9 @@ import salt.utils.stringutils
import salt.utils.templates
import salt.utils.xmlutil as xmlutil
import salt.utils.yaml
from salt._compat import ipaddress
+from salt._compat import ElementTree
+from salt._compat import saxutils
-from salt._compat import ipaddress
+from salt._compat import ElementTree, ipaddress, saxutils
from salt.exceptions import CommandExecutionError, SaltInvocationError
+from salt.ext import six
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
from salt.ext.six.moves.urllib.parse import urlparse, urlunparse
from salt.utils.virt import check_remote, download_remote
@@ -657,8 +658,8 @@ def _gen_xml(
context = {
"hypervisor": hypervisor,
"name": name,
- "cpu": str(cpu),
- "mem": str(mem),
+ "cpu": six.text_type(cpu),
+ "mem": six.text_type(mem),
}
if hypervisor in ["qemu", "kvm"]:
context["controller_model"] = False
@@ -722,7 +723,7 @@ def _gen_xml(
"target_dev": _get_disk_target(targets, len(diskp), prefix),
"disk_bus": disk["model"],
"format": disk.get("format", "raw"),
- "index": str(i),
+ "index": six.text_type(i),
}
targets.append(disk_context["target_dev"])
if disk.get("source_file"):
@@ -827,8 +828,8 @@ def _gen_vol_xml(
"name": name,
"target": {"permissions": permissions, "nocow": nocow},
"format": format,
- "size": str(size),
- "allocation": str(int(allocation) * 1024),
+ "size": six.text_type(size),
+ "allocation": six.text_type(int(allocation) * 1024),
"backingStore": backing_store,
}
fn_ = "libvirt_volume.jinja"
@@ -1253,7 +1254,7 @@ def _disk_profile(conn, profile, hypervisor, disks, vm_name):
)
# Transform the list to remove one level of dictionary and add the name as a property
- disklist = [dict(d, name=name) for disk in disklist for name, d in disk.items()]
+ disklist = [dict(d, name=name) for disk in disklist for name, d in six.iteritems(disk)]
# Merge with the user-provided disks definitions
if disks:
@@ -1274,7 +1275,7 @@ def _disk_profile(conn, profile, hypervisor, disks, vm_name):
disk["model"] = "ide"
# Add the missing properties that have defaults
- for key, val in overlay.items():
+ for key, val in six.iteritems(overlay):
if key not in disk:
disk[key] = val
@@ -1422,7 +1423,7 @@ def _complete_nics(interfaces, hypervisor):
"""
Apply the default overlay to attributes
"""
- for key, value in overlays[hypervisor].items():
+ for key, value in six.iteritems(overlays[hypervisor]):
if key not in attributes or not attributes[key]:
attributes[key] = value
@@ -1449,7 +1450,7 @@ def _nic_profile(profile_name, hypervisor):
"""
Append dictionary profile data to interfaces list
"""
- for interface_name, attributes in profile_dict.items():
+ for interface_name, attributes in six.iteritems(profile_dict):
attributes["name"] = interface_name
interfaces.append(attributes)
@@ -1520,7 +1521,7 @@ def _handle_remote_boot_params(orig_boot):
@@ -1516,7 +1515,7 @@ def _handle_remote_boot_params(orig_boot):
"""
saltinst_dir = None
new_boot = orig_boot.copy()
@ -132,139 +117,22 @@ index cd80fbe608..c07fabb406 100644
cases = [
{"efi"},
{"kernel", "initrd", "efi"},
@@ -2380,8 +2381,8 @@ def update(
# Update the cpu
cpu_node = desc.find("vcpu")
if cpu and int(cpu_node.text) != cpu:
- cpu_node.text = str(cpu)
- cpu_node.set("current", str(cpu))
+ cpu_node.text = six.text_type(cpu)
+ cpu_node.set("current", six.text_type(cpu))
need_update = True
@@ -2559,9 +2558,7 @@ def update(
def _set_loader(node, value):
@@ -2394,7 +2395,7 @@ def update(
node.set("template", value)
def _set_with_mib_unit(node, value):
- node.text = str(value)
+ node.text = six.text_type(value)
node.set("unit", "MiB")
# Update the kernel boot parameters
@@ -2426,7 +2427,7 @@ def update(
},
]
- data = {k: v for k, v in locals().items() if bool(v)}
+ data = {k: v for k, v in six.iteritems(locals()) if bool(v)}
if boot_dev:
data["boot_dev"] = {i + 1: dev for i, dev in enumerate(boot_dev.split())}
need_update = need_update or salt.utils.xmlutil.change_xml(
@@ -2547,7 +2548,7 @@ def update(
# Attaching device
if source_file:
ElementTree.SubElement(
- ElementTree.SubElement(
- updated_disk, "source", attrib={"file": source_file}
+ updated_disk, "source", file=source_file
)
- )
+ ElementTree.SubElement(updated_disk, "source", file=source_file)
changes["disk"]["new"] = new_disks
@@ -2609,7 +2610,7 @@ def update(
except libvirt.libvirtError as err:
if "errors" not in status:
status["errors"] = []
- status["errors"].append(str(err))
+ status["errors"].append(six.text_type(err))
conn.close()
return status
@@ -2823,7 +2824,7 @@ def _node_info(conn):
info = {
"cpucores": raw[6],
"cpumhz": raw[3],
- "cpumodel": str(raw[0]),
+ "cpumodel": six.text_type(raw[0]),
"cpus": raw[2],
"cputhreads": raw[7],
"numanodes": raw[4],
@@ -3628,7 +3629,7 @@ def _define_vol_xml_str(conn, xml, pool=None): # pylint: disable=redefined-oute
poolname = (
pool if pool else __salt__["config.get"]("virt:storagepool", default_pool)
)
- pool = conn.storagePoolLookupByName(str(poolname))
+ pool = conn.storagePoolLookupByName(six.text_type(poolname))
ret = pool.createXML(xml, 0) is not None
return ret
@@ -3829,7 +3830,7 @@ def seed_non_shared_migrate(disks, force=False):
salt '*' virt.seed_non_shared_migrate <disks>
"""
- for _, data in disks.items():
+ for _, data in six.iteritems(disks):
fn_ = data["file"]
form = data["file format"]
size = data["virtual size"].split()[1][1:]
@@ -4852,7 +4853,7 @@ def capabilities(**kwargs):
try:
caps = _capabilities(conn)
except libvirt.libvirtError as err:
- raise CommandExecutionError(str(err))
+ raise CommandExecutionError(six.text_type(err))
finally:
conn.close()
return caps
@@ -5352,7 +5353,7 @@ def network_info(name=None, **kwargs):
for net in nets
}
except libvirt.libvirtError as err:
- log.debug("Silenced libvirt error: %s", str(err))
+ log.debug("Silenced libvirt error: %s", six.text_type(err))
finally:
conn.close()
return result
@@ -6214,7 +6215,7 @@ def pool_info(name=None, **kwargs):
]
result = {pool.name(): _pool_extract_infos(pool) for pool in pools}
except libvirt.libvirtError as err:
- log.debug("Silenced libvirt error: %s", str(err))
+ log.debug("Silenced libvirt error: %s", six.text_type(err))
finally:
conn.close()
return result
@@ -6591,12 +6592,12 @@ def volume_infos(pool=None, volume=None, **kwargs):
if vol.path():
as_backing_store = {
path
- for (path, all_paths) in backing_stores.items()
+ for (path, all_paths) in six.iteritems(backing_stores)
if vol.path() in all_paths
}
used_by = [
vm_name
- for (vm_name, vm_disks) in disks.items()
+ for (vm_name, vm_disks) in six.iteritems(disks)
if vm_disks & as_backing_store or vol.path() in vm_disks
]
@@ -6625,9 +6626,9 @@ def volume_infos(pool=None, volume=None, **kwargs):
}
for pool_obj in pools
}
- return {pool_name: volumes for (pool_name, volumes) in vols.items() if volumes}
+ return {pool_name: volumes for (pool_name, volumes) in six.iteritems(vols) if volumes}
except libvirt.libvirtError as err:
- log.debug("Silenced libvirt error: %s", str(err))
+ log.debug("Silenced libvirt error: %s", six.text_type(err))
finally:
conn.close()
return result
diff --git a/salt/states/virt.py b/salt/states/virt.py
index 3d99fd53c8..1a0c889d58 100644
index b45cf72ed3..df7ebb63e6 100644
--- a/salt/states/virt.py
+++ b/salt/states/virt.py
@@ -23,6 +23,7 @@ import salt.utils.files
@@ -22,6 +22,7 @@ import salt.utils.files
import salt.utils.stringutils
import salt.utils.versions
from salt.exceptions import CommandExecutionError, SaltInvocationError
@ -272,288 +140,19 @@ index 3d99fd53c8..1a0c889d58 100644
try:
import libvirt # pylint: disable=import-error
@@ -97,7 +98,7 @@ def keys(name, basepath="/etc/pki", **kwargs):
# rename them to something hopefully unique to avoid
# overriding anything existing
pillar_kwargs = {}
- for key, value in kwargs.items():
+ for key, value in six.iteritems(kwargs):
pillar_kwargs["ext_pillar_virt.{}".format(key)] = value
pillar = __salt__["pillar.ext"]({"libvirt": "_"}, pillar_kwargs)
@@ -187,7 +188,7 @@ def _virt_call(
else:
noaction_domains.append(targeted_domain)
except libvirt.libvirtError as err:
- ignored_domains.append({"domain": targeted_domain, "issue": str(err)})
+ ignored_domains.append({"domain": targeted_domain, "issue": six.text_type(err)})
if not changed_domains:
ret["result"] = not ignored_domains and bool(targeted_domains)
ret["comment"] = "No changes had happened"
@@ -461,7 +462,7 @@ def defined(
ret["comment"] = "Domain {} defined".format(name)
except libvirt.libvirtError as err:
# Something bad happened when defining / updating the VM, report it
- ret["comment"] = str(err)
+ ret["comment"] = six.text_type(err)
ret["result"] = False
return ret
@@ -704,7 +705,7 @@ def running(
except libvirt.libvirtError as err:
# Something bad happened when starting / updating the VM, report it
- ret["comment"] = str(err)
+ ret["comment"] = six.text_type(err)
ret["result"] = False
return ret
@@ -867,7 +868,7 @@ def reverted(
}
except CommandExecutionError as err:
if len(domains) > 1:
- ignored_domains.append({"domain": domain, "issue": str(err)})
+ ignored_domains.append({"domain": domain, "issue": six.text_type(err)})
if len(domains) > 1:
if result:
ret["changes"]["reverted"].append(result)
@@ -885,9 +886,9 @@ def reverted(
if not ret["changes"]["reverted"]:
ret["changes"].pop("reverted")
except libvirt.libvirtError as err:
- ret["comment"] = str(err)
+ ret["comment"] = six.text_type(err)
except CommandExecutionError as err:
- ret["comment"] = str(err)
+ ret["comment"] = six.text_type(err)
return ret
diff --git a/salt/utils/data.py b/salt/utils/data.py
index 1c4c22efb3..d98b56e06f 100644
--- a/salt/utils/data.py
+++ b/salt/utils/data.py
@@ -4,6 +4,7 @@ Functions for manipulating, inspecting, or otherwise working with data types
and data structures.
"""
+from __future__ import absolute_import, print_function, unicode_literals
# Import Python libs
import copy
@@ -71,7 +72,7 @@ class CaseInsensitiveDict(MutableMapping):
return self._data[to_lowercase(key)][1]
def __iter__(self):
- return (item[0] for item in self._data.values())
+ return (item[0] for item in six.itervalues(self._data))
def __eq__(self, rval):
if not isinstance(rval, Mapping):
@@ -80,20 +81,20 @@ class CaseInsensitiveDict(MutableMapping):
return dict(self.items_lower()) == dict(CaseInsensitiveDict(rval).items_lower())
def __repr__(self):
- return repr(dict(self.items()))
+ return repr(dict(six.iteritems(self)))
def items_lower(self):
"""
Returns a generator iterating over keys and values, with the keys all
being lowercase.
"""
- return ((key, val[1]) for key, val in self._data.items())
+ return ((key, val[1]) for key, val in six.iteritems(self._data))
def copy(self):
"""
Returns a copy of the object
"""
- return CaseInsensitiveDict(self._data.items())
+ return CaseInsensitiveDict(six.iteritems(self._data))
def __change_case(data, attr, preserve_dict_class=False):
@@ -115,7 +116,7 @@ def __change_case(data, attr, preserve_dict_class=False):
__change_case(key, attr, preserve_dict_class),
__change_case(val, attr, preserve_dict_class),
)
- for key, val in data.items()
+ for key, val in six.iteritems(data)
)
if isinstance(data, Sequence):
return data_type(
@@ -145,7 +146,7 @@ def compare_dicts(old=None, new=None):
dict describing the changes that were made.
"""
ret = {}
- for key in set(new or {}).union(old or {}):
+ for key in set((new or {})).union((old or {})):
if key not in old:
# New key
ret[key] = {"old": "", "new": new[key]}
@@ -205,7 +206,7 @@ def _remove_circular_refs(ob, _seen=None):
if isinstance(ob, dict):
res = {
_remove_circular_refs(k, _seen): _remove_circular_refs(v, _seen)
- for k, v in ob.items()
+ for k, v in six.iteritems(ob)
}
elif isinstance(ob, (list, tuple, set, frozenset)):
res = type(ob)(_remove_circular_refs(v, _seen) for v in ob)
@@ -336,7 +337,7 @@ def decode_dict(
)
# Make sure we preserve OrderedDicts
ret = data.__class__() if preserve_dict_class else {}
- for key, value in data.items():
+ for key, value in six.iteritems(data):
if isinstance(key, tuple):
key = (
decode_tuple(
@@ -592,7 +593,7 @@ def encode_dict(
# Clean data object before encoding to avoid circular references
data = _remove_circular_refs(data)
ret = data.__class__() if preserve_dict_class else {}
- for key, value in data.items():
+ for key, value in six.iteritems(data):
if isinstance(key, tuple):
key = (
encode_tuple(key, encoding, errors, keep, preserve_dict_class)
@@ -734,8 +735,8 @@ def filter_by(lookup_dict, lookup, traverse, merge=None, default="default", base
# lookup_dict keys
for each in val if isinstance(val, list) else [val]:
for key in lookup_dict:
- test_key = key if isinstance(key, str) else str(key)
- test_each = each if isinstance(each, str) else str(each)
+ test_key = key if isinstance(key, six.string_types) else six.text_type(key)
+ test_each = each if isinstance(each, six.string_types) else six.text_type(each)
if fnmatch.fnmatchcase(test_each, test_key):
ret = lookup_dict[key]
break
@@ -851,11 +852,11 @@ def subdict_match(
# begin with is that (by design) to_unicode will raise a TypeError if a
# non-string/bytestring/bytearray value is passed.
try:
- target = str(target).lower()
+ target = six.text_type(target).lower()
except UnicodeDecodeError:
target = salt.utils.stringutils.to_unicode(target).lower()
try:
- pattern = str(pattern).lower()
+ pattern = six.text_type(pattern).lower()
except UnicodeDecodeError:
pattern = salt.utils.stringutils.to_unicode(pattern).lower()
@@ -997,7 +998,7 @@ def repack_dictlist(data, strict=False, recurse=False, key_cb=None, val_cb=None)
Takes a list of one-element dicts (as found in many SLS schemas) and
repacks into a single dictionary.
"""
- if isinstance(data, str):
+ if isinstance(data, six.string_types):
try:
data = salt.utils.yaml.safe_load(data)
except salt.utils.yaml.parser.ParserError as err:
@@ -1009,7 +1010,7 @@ def repack_dictlist(data, strict=False, recurse=False, key_cb=None, val_cb=None)
if val_cb is None:
val_cb = lambda x, y: y
- valid_non_dict = ((str,), (int,), float)
+ valid_non_dict = (six.string_types, six.integer_types, float)
if isinstance(data, list):
for element in data:
if isinstance(element, valid_non_dict):
@@ -1067,7 +1068,7 @@ def is_list(value):
@jinja_filter("is_iter")
-def is_iter(thing, ignore=(str,)):
+def is_iter(thing, ignore=six.string_types):
"""
Test if an object is iterable, but not a string type.
@@ -1124,10 +1125,10 @@ def is_true(value=None):
pass
# Now check for truthiness
- if isinstance(value, ((int,), float)):
+ if isinstance(value, (six.integer_types, float)):
return value > 0
- if isinstance(value, str):
- return str(value).lower() == "true"
+ if isinstance(value, six.string_types):
+ return six.text_type(value).lower() == "true"
return bool(value)
@@ -1167,7 +1168,7 @@ def simple_types_filter(data):
if data is None:
return data
- simpletypes_keys = ((str,), str, (int,), float, bool)
+ simpletypes_keys = (six.string_types, six.text_type, six.integer_types, float, bool)
simpletypes_values = tuple(list(simpletypes_keys) + [list, tuple])
if isinstance(data, (list, tuple)):
@@ -1183,7 +1184,7 @@ def simple_types_filter(data):
if isinstance(data, dict):
simpledict = {}
- for key, value in data.items():
+ for key, value in six.iteritems(data):
if key is not None and not isinstance(key, simpletypes_keys):
key = repr(key)
if value is not None and isinstance(value, (dict, list, tuple)):
@@ -1205,8 +1206,8 @@ def stringify(data):
for item in data:
if six.PY2 and isinstance(item, str):
item = salt.utils.stringutils.to_unicode(item)
- elif not isinstance(item, str):
- item = str(item)
+ elif not isinstance(item, six.string_types):
+ item = six.text_type(item)
ret.append(item)
return ret
@@ -1282,7 +1283,7 @@ def filter_falsey(data, recurse_depth=None, ignore_types=()):
if isinstance(data, dict):
processed_elements = [
- (key, filter_element(value)) for key, value in data.items()
+ (key, filter_element(value)) for key, value in six.iteritems(data)
]
return type(data)(
[
@@ -1472,7 +1473,7 @@ def get_value(obj, path, default=None):
if obj is None:
return res
if isinstance(obj, dict):
- items = obj.items()
+ items = six.iteritems(obj)
elif isinstance(obj, list):
items = enumerate(obj)
diff --git a/salt/utils/xmlutil.py b/salt/utils/xmlutil.py
index 2b9c7bf43f..68191bc528 100644
index b9f047820b..111ca155d4 100644
--- a/salt/utils/xmlutil.py
+++ b/salt/utils/xmlutil.py
@@ -9,6 +9,7 @@ from xml.etree import ElementTree
@@ -7,6 +7,7 @@ import string # pylint: disable=deprecated-module
from xml.etree import ElementTree
# Import salt libs
import salt.utils.data
+from salt.ext import six
def _conv_name(x):
@@ -147,7 +148,7 @@ def set_node_text(node, value):
:param node: the node to set the text to
:param value: the value to set
"""
- node.text = str(value)
+ node.text = six.text_type(value)
def clean_node(parent_map, node, ignored=None):
@@ -162,7 +163,7 @@ def clean_node(parent_map, node, ignored=None):
@@ -160,7 +161,7 @@ def clean_node(parent_map, node, ignored=None):
has_text = node.text is not None and node.text.strip()
parent = parent_map.get(node)
if (
@ -563,10 +162,10 @@ index 2b9c7bf43f..68191bc528 100644
and not has_text
):
diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py
index 5ec8de77e7..27c4b9d1b0 100644
index 4775fec31f..4a4c0395a7 100644
--- a/tests/unit/modules/test_virt.py
+++ b/tests/unit/modules/test_virt.py
@@ -48,7 +48,7 @@ class LibvirtMock(MagicMock): # pylint: disable=too-many-ancestors
@@ -45,7 +45,7 @@ class LibvirtMock(MagicMock): # pylint: disable=too-many-ancestors
"""
def __init__(self, msg):
@ -575,202 +174,7 @@ index 5ec8de77e7..27c4b9d1b0 100644
self.msg = msg
def get_error_message(self):
diff --git a/tests/unit/utils/test_data.py b/tests/unit/utils/test_data.py
index 8a6956d442..fb4a8cc3c2 100644
--- a/tests/unit/utils/test_data.py
+++ b/tests/unit/utils/test_data.py
@@ -1,14 +1,17 @@
+# -*- coding: utf-8 -*-
"""
Tests for salt.utils.data
"""
# Import Python libs
+from __future__ import absolute_import, print_function, unicode_literals
import logging
# Import Salt libs
import salt.utils.data
import salt.utils.stringutils
+from salt.ext import six
# Import 3rd party libs
from salt.ext.six.moves import ( # pylint: disable=import-error,redefined-builtin
@@ -414,18 +417,19 @@ class DataTestCase(TestCase):
)
self.assertEqual(ret, expected)
- # The binary data in the data structure should fail to decode, even
- # using the fallback, and raise an exception.
- self.assertRaises(
- UnicodeDecodeError,
- salt.utils.data.decode,
- self.test_data,
- keep=False,
- normalize=True,
- preserve_dict_class=True,
- preserve_tuples=True,
- to_str=True,
- )
+ if six.PY3:
+ # The binary data in the data structure should fail to decode, even
+ # using the fallback, and raise an exception.
+ self.assertRaises(
+ UnicodeDecodeError,
+ salt.utils.data.decode,
+ self.test_data,
+ keep=False,
+ normalize=True,
+ preserve_dict_class=True,
+ preserve_tuples=True,
+ to_str=True,
+ )
# Now munge the expected data so that we get what we would expect if we
# disable preservation of dict class and tuples
@@ -469,9 +473,14 @@ class DataTestCase(TestCase):
# Test binary blob
self.assertEqual(salt.utils.data.decode(BYTES, keep=True, to_str=True), BYTES)
- self.assertRaises(
- UnicodeDecodeError, salt.utils.data.decode, BYTES, keep=False, to_str=True,
- )
+ if six.PY3:
+ self.assertRaises(
+ UnicodeDecodeError,
+ salt.utils.data.decode,
+ BYTES,
+ keep=False,
+ to_str=True,
+ )
def test_decode_fallback(self):
"""
@@ -666,7 +675,7 @@ class DataTestCase(TestCase):
self.assertRaises(TypeError, salt.utils.data.stringify, 9)
self.assertEqual(
salt.utils.data.stringify(
- ["one", "two", "three", 4, 5]
+ ["one", "two", str("three"), 4, 5]
), # future lint: disable=blacklisted-function
["one", "two", "three", "4", "5"],
)
@@ -720,7 +729,7 @@ class FilterFalseyTestCase(TestCase):
# Check returned type equality
self.assertIs(type(old_list), type(new_list))
# Test with set
- old_set = {"foo", "bar"}
+ old_set = set(["foo", "bar"])
new_set = salt.utils.data.filter_falsey(old_set)
self.assertEqual(old_set, new_set)
# Check returned type equality
@@ -839,9 +848,9 @@ class FilterFalseyTestCase(TestCase):
Test filtering a set without recursing.
Note that a set cannot contain unhashable types, so recursion is not possible.
"""
- old_set = {"foo", None, 0, ""}
+ old_set = set(["foo", None, 0, ""])
new_set = salt.utils.data.filter_falsey(old_set)
- expect_set = {"foo"}
+ expect_set = set(["foo"])
self.assertEqual(expect_set, new_set)
self.assertIs(type(expect_set), type(new_set))
@@ -1053,13 +1062,13 @@ class FilterRecursiveDiff(TestCase):
"""
Test cases where equal sets are compared.
"""
- test_set = {0, 1, 2, 3, "foo"}
+ test_set = set([0, 1, 2, 3, "foo"])
self.assertEqual({}, salt.utils.data.recursive_diff(test_set, test_set))
# This is a bit of an oddity, as python seems to sort the sets in memory
# so both sets end up with the same ordering (0..3).
- set_one = {0, 1, 2, 3}
- set_two = {3, 2, 1, 0}
+ set_one = set([0, 1, 2, 3])
+ set_two = set([3, 2, 1, 0])
self.assertEqual({}, salt.utils.data.recursive_diff(set_one, set_two))
def test_tuple_equality(self):
@@ -1149,13 +1158,13 @@ class FilterRecursiveDiff(TestCase):
Tricky as the sets are compared zipped, so shuffled sets of equal values
are considered different.
"""
- set_one = {0, 1, 2, 4}
- set_two = {0, 1, 3, 4}
- expected_result = {"old": {2}, "new": {3}}
+ set_one = set([0, 1, 2, 4])
+ set_two = set([0, 1, 3, 4])
+ expected_result = {"old": set([2]), "new": set([3])}
self.assertEqual(
expected_result, salt.utils.data.recursive_diff(set_one, set_two)
)
- expected_result = {"new": {2}, "old": {3}}
+ expected_result = {"new": set([2]), "old": set([3])}
self.assertEqual(
expected_result, salt.utils.data.recursive_diff(set_two, set_one)
)
@@ -1164,8 +1173,8 @@ class FilterRecursiveDiff(TestCase):
# Python 2.7 seems to sort it (i.e. set_one below becomes {0, 1, 'foo', 'bar'}
# However Python 3.6.8 stores it differently each run.
# So just test for "not equal" here.
- set_one = {0, "foo", 1, "bar"}
- set_two = {"foo", 1, "bar", 2}
+ set_one = set([0, "foo", 1, "bar"])
+ set_two = set(["foo", 1, "bar", 2])
expected_result = {}
self.assertNotEqual(
expected_result, salt.utils.data.recursive_diff(set_one, set_two)
@@ -1203,18 +1212,18 @@ class FilterRecursiveDiff(TestCase):
expected_result, salt.utils.data.recursive_diff(list_two, list_one)
)
- mixed_one = {"foo": {0, 1, 2}, "bar": [0, 1, 2]}
- mixed_two = {"foo": {1, 2, 3}, "bar": [1, 2, 3]}
+ mixed_one = {"foo": set([0, 1, 2]), "bar": [0, 1, 2]}
+ mixed_two = {"foo": set([1, 2, 3]), "bar": [1, 2, 3]}
expected_result = {
- "old": {"foo": {0}, "bar": [0, 1, 2]},
- "new": {"foo": {3}, "bar": [1, 2, 3]},
+ "old": {"foo": set([0]), "bar": [0, 1, 2]},
+ "new": {"foo": set([3]), "bar": [1, 2, 3]},
}
self.assertEqual(
expected_result, salt.utils.data.recursive_diff(mixed_one, mixed_two)
)
expected_result = {
- "new": {"foo": {0}, "bar": [0, 1, 2]},
- "old": {"foo": {3}, "bar": [1, 2, 3]},
+ "new": {"foo": set([0]), "bar": [0, 1, 2]},
+ "old": {"foo": set([3]), "bar": [1, 2, 3]},
}
self.assertEqual(
expected_result, salt.utils.data.recursive_diff(mixed_two, mixed_one)
@@ -1236,7 +1245,7 @@ class FilterRecursiveDiff(TestCase):
Test case comparing a list with a set, will be compared unordered.
"""
mixed_one = [1, 2, 3]
- mixed_two = {3, 2, 1}
+ mixed_two = set([3, 2, 1])
expected_result = {}
self.assertEqual(
expected_result, salt.utils.data.recursive_diff(mixed_one, mixed_two)
@@ -1351,9 +1360,9 @@ class FilterRecursiveDiff(TestCase):
Test case comparing two sets of unequal length.
This does not do anything special, as it is unordered.
"""
- set_one = {1, 2, 3}
- set_two = {4, 3, 2, 1}
- expected_result = {"old": set(), "new": {4}}
+ set_one = set([1, 2, 3])
+ set_two = set([4, 3, 2, 1])
+ expected_result = {"old": set([]), "new": set([4])}
self.assertEqual(
expected_result, salt.utils.data.recursive_diff(set_one, set_two)
)
--
2.28.0
2.29.2

View File

@ -1,28 +0,0 @@
From 5a2c7671be0fcdf03050049ac4a1bbf4929abf1e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 27 Mar 2020 15:58:40 +0000
Subject: [PATCH] Fix typo on msgpack version when sanitizing msgpack
kwargs (bsc#1167437)
---
salt/utils/msgpack.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/utils/msgpack.py b/salt/utils/msgpack.py
index 1d02aa96ba8b659eb4038f00563c9cfc31a568e5..4b5a256513a524a33d7d42773644567a0970a46b 100644
--- a/salt/utils/msgpack.py
+++ b/salt/utils/msgpack.py
@@ -61,7 +61,7 @@ def _sanitize_msgpack_kwargs(kwargs):
assert isinstance(kwargs, dict)
if version < (0, 6, 0) and kwargs.pop('strict_map_key', None) is not None:
log.info('removing unsupported `strict_map_key` argument from msgpack call')
- if version < (0, 5, 5) and kwargs.pop('raw', None) is not None:
+ if version < (0, 5, 2) and kwargs.pop('raw', None) is not None:
log.info('removing unsupported `raw` argument from msgpack call')
if version < (0, 4, 0) and kwargs.pop('use_bin_type', None) is not None:
log.info('removing unsupported `use_bin_type` argument from msgpack call')
--
2.23.0

View File

@ -1,42 +1,41 @@
From 6bb7b6c4a530abb7e831449545a35ee5ede49dcb Mon Sep 17 00:00:00 2001
From 192bac1ae2f20b098384264c8802034a340cd124 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 11 Oct 2018 16:20:40 +0200
Subject: [PATCH] Fix unit test for grains core
---
tests/unit/grains/test_core.py | 11 +++++------
1 file changed, 5 insertions(+), 6 deletions(-)
tests/unit/grains/test_core.py | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index b31f5dcddd..c40595eb3f 100644
index 34aaa4f5bc..7dbf34deac 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -68,11 +68,10 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
def test_parse_etc_os_release(self, path_isfile_mock):
path_isfile_mock.side_effect = lambda x: x == "/usr/lib/os-release"
with salt.utils.files.fopen(os.path.join(OS_RELEASE_DIR, "ubuntu-17.10")) as os_release_file:
@@ -59,10 +59,11 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
with salt.utils.files.fopen(
os.path.join(OS_RELEASE_DIR, "ubuntu-17.10")
) as os_release_file:
- os_release_content = os_release_file.read()
- with patch("salt.utils.files.fopen", mock_open(read_data=os_release_content)):
- os_release = core._parse_os_release(
- '/etc/os-release',
- '/usr/lib/os-release')
+ os_release_content = os_release_file.readlines()
+ with patch("salt.utils.files.fopen", mock_open()) as os_release_file:
+ os_release_file.return_value.__iter__.return_value = os_release_content
+ os_release = core._parse_os_release(["/etc/os-release", "/usr/lib/os-release"])
self.assertEqual(os_release, {
"NAME": "Ubuntu",
"VERSION": "17.10 (Artful Aardvark)",
@@ -134,7 +133,7 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
os_release = core._parse_os_release(
- "/etc/os-release", "/usr/lib/os-release"
+ ["/etc/os-release", "/usr/lib/os-release"]
)
self.assertEqual(
os_release,
@@ -172,7 +173,7 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
def test_missing_os_release(self):
with patch('salt.utils.files.fopen', mock_open(read_data={})):
- os_release = core._parse_os_release('/etc/os-release', '/usr/lib/os-release')
+ os_release = core._parse_os_release(['/etc/os-release', '/usr/lib/os-release'])
with patch("salt.utils.files.fopen", mock_open(read_data={})):
os_release = core._parse_os_release(
- "/etc/os-release", "/usr/lib/os-release"
+ ["/etc/os-release", "/usr/lib/os-release"]
)
self.assertEqual(os_release, {})
@skipIf(not salt.utils.platform.is_windows(), 'System is not Windows')
--
2.16.4
2.29.2

View File

@ -1,21 +1,21 @@
From e9f2af1256a52d58a7c8e6dd0122eb6d5cc47dd3 Mon Sep 17 00:00:00 2001
From 09a871c197be4933475ee4582755d9b0cb5a700e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 4 Mar 2020 10:13:43 +0000
Subject: [PATCH] Fix unit tests for batch async after refactor
---
tests/unit/cli/test_batch_async.py | 18 +++++++++++++++++-
1 file changed, 17 insertions(+), 1 deletion(-)
tests/unit/cli/test_batch_async.py | 20 +++++++++++++++++++-
1 file changed, 19 insertions(+), 1 deletion(-)
diff --git a/tests/unit/cli/test_batch_async.py b/tests/unit/cli/test_batch_async.py
index f1d36a81fb..e1ce60859b 100644
index b04965268a..dcee9a87bd 100644
--- a/tests/unit/cli/test_batch_async.py
+++ b/tests/unit/cli/test_batch_async.py
@@ -126,9 +126,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.batch.timedout_minions = {'bar'}
@@ -120,9 +120,10 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
self.batch.timedout_minions = {"bar"}
self.batch.event = MagicMock()
self.batch.metadata = {'mykey': 'myvalue'}
self.batch.metadata = {"mykey": "myvalue"}
+ old_event = self.batch.event
self.batch.end_batch()
self.assertEqual(
@ -23,8 +23,8 @@ index f1d36a81fb..e1ce60859b 100644
+ old_event.fire_event.call_args[0],
(
{
'available_minions': set(['foo', 'bar']),
@@ -146,6 +147,21 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
"available_minions": {"foo", "bar"},
@@ -140,6 +141,23 @@ class AsyncBatchTestCase(AsyncTestCase, TestCase):
event = MagicMock()
batch.event = event
batch.__del__()
@ -36,17 +36,19 @@ index f1d36a81fb..e1ce60859b 100644
+ batch = BatchAsync(MagicMock(), MagicMock(), MagicMock())
+ event = MagicMock()
+ batch.event = event
+ batch.patterns = { ('salt/job/1234/ret/*', 'find_job_return'), ('salt/job/4321/ret/*', 'find_job_return') }
+ batch.patterns = {
+ ("salt/job/1234/ret/*", "find_job_return"),
+ ("salt/job/4321/ret/*", "find_job_return"),
+ }
+ batch.close_safe()
+ self.assertEqual(batch.local, None)
+ self.assertEqual(batch.event, None)
+ self.assertEqual(batch.ioloop, None)
+ self.assertEqual(
+ len(event.unsubscribe.mock_calls), 2)
self.assertEqual(
len(event.remove_event_handler.mock_calls), 1)
+ self.assertEqual(len(event.unsubscribe.mock_calls), 2)
self.assertEqual(len(event.remove_event_handler.mock_calls), 1)
@tornado.testing.gen_test
--
2.23.0
2.29.2

View File

@ -1,4 +1,4 @@
From 37800f008e46a7321bcd4b88b4858d3ea1fabcdf Mon Sep 17 00:00:00 2001
From c05d571058b9520dbaf4aba3de001b1aefe8e2c2 Mon Sep 17 00:00:00 2001
From: Cedric Bosdonnat <cbosdonnat@suse.com>
Date: Tue, 15 Sep 2020 16:03:30 +0200
Subject: [PATCH] Fix virt.update with cpu defined (#263)
@ -10,11 +10,11 @@ updated.
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/salt/modules/virt.py b/salt/modules/virt.py
index c07fabb406..4a8a55ced6 100644
index c042738370..c1a73fcb7f 100644
--- a/salt/modules/virt.py
+++ b/salt/modules/virt.py
@@ -2430,9 +2430,9 @@ def update(
data = {k: v for k, v in six.iteritems(locals()) if bool(v)}
@@ -2441,9 +2441,9 @@ def update(
data = {k: v for k, v in locals().items() if bool(v)}
if boot_dev:
data["boot_dev"] = {i + 1: dev for i, dev in enumerate(boot_dev.split())}
- need_update = need_update or salt.utils.xmlutil.change_xml(
@ -26,6 +26,6 @@ index c07fabb406..4a8a55ced6 100644
# Update the XML definition with the new disks and diff changes
devices_node = desc.find("devices")
--
2.28.0
2.29.2

View File

@ -1,70 +1,79 @@
From a8f0a15e4067ec278c8a2d690e3bf815523286ca Mon Sep 17 00:00:00 2001
From f3ac041e34952a4b753e4afc9dc4b6adaa1d0ff2 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 12 Mar 2020 13:26:51 +0000
Subject: [PATCH] Fix wrong test_mod_del_repo_multiline_values test after
rebase
Subject: [PATCH] Fix wrong test_mod_del_repo_multiline_values test
after rebase
---
tests/integration/modules/test_pkg.py | 56 +++------------------------
1 file changed, 6 insertions(+), 50 deletions(-)
tests/integration/modules/test_pkg.py | 63 ++++-----------------------
1 file changed, 8 insertions(+), 55 deletions(-)
diff --git a/tests/integration/modules/test_pkg.py b/tests/integration/modules/test_pkg.py
index 6f3767bfbd272848277b877d1fe640caf8f349f6..0f4c5c9d459c56bb485408f943c1dee49c46cd21 100644
index 3ece73074b..933755a9ec 100644
--- a/tests/integration/modules/test_pkg.py
+++ b/tests/integration/modules/test_pkg.py
@@ -134,6 +134,10 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin):
if repo is not None:
self.run_function('pkg.del_repo', [repo])
@@ -143,6 +143,10 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin):
self.run_function("pkg.del_repo", [repo])
@slowTest
+ @destructiveTest
+ @requires_salt_modules('pkg.mod_repo', 'pkg.del_repo', 'pkg.get_repo')
+ @requires_salt_modules("pkg.mod_repo", "pkg.del_repo", "pkg.get_repo")
+ @requires_network()
+ @requires_system_grains
def test_mod_del_repo_multiline_values(self):
'''
"""
test modifying and deleting a software repository defined with multiline values
@@ -141,8 +145,9 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin):
os_grain = self.run_function('grains.item', ['os'])['os']
@@ -150,10 +154,13 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin):
os_grain = self.run_function("grains.item", ["os"])["os"]
repo = None
try:
- if os_grain in ['CentOS', 'RedHat']:
+ if os_grain in ['CentOS', 'RedHat', 'SUSE']:
my_baseurl = 'http://my.fake.repo/foo/bar/\n http://my.fake.repo.alt/foo/bar/'
+ expected_get_repo_baseurl_zypp = 'http://my.fake.repo/foo/bar/%0A%20http://my.fake.repo.alt/foo/bar/'
expected_get_repo_baseurl = 'http://my.fake.repo/foo/bar/\nhttp://my.fake.repo.alt/foo/bar/'
major_release = int(
self.run_function(
@@ -189,55 +194,6 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin):
- if os_grain in ["CentOS", "RedHat"]:
+ if os_grain in ["CentOS", "RedHat", "SUSE"]:
my_baseurl = (
"http://my.fake.repo/foo/bar/\n http://my.fake.repo.alt/foo/bar/"
)
+ expected_get_repo_baseurl_zypp = (
+ "http://my.fake.repo/foo/bar/%0A%20http://my.fake.repo.alt/foo/bar/"
+ )
expected_get_repo_baseurl = (
"http://my.fake.repo/foo/bar/\nhttp://my.fake.repo.alt/foo/bar/"
)
@@ -207,60 +214,6 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin):
if repo is not None:
self.run_function('pkg.del_repo', [repo])
self.run_function("pkg.del_repo", [repo])
- def test_mod_del_repo_multiline_values(self):
- '''
- """
- test modifying and deleting a software repository defined with multiline values
- '''
- os_grain = self.run_function('grains.item', ['os'])['os']
- """
- os_grain = self.run_function("grains.item", ["os"])["os"]
- repo = None
- try:
- if os_grain in ['CentOS', 'RedHat', 'SUSE']:
- my_baseurl = 'http://my.fake.repo/foo/bar/\n http://my.fake.repo.alt/foo/bar/'
- expected_get_repo_baseurl_zypp = 'http://my.fake.repo/foo/bar/%0A%20http://my.fake.repo.alt/foo/bar/'
- expected_get_repo_baseurl = 'http://my.fake.repo/foo/bar/\nhttp://my.fake.repo.alt/foo/bar/'
- major_release = int(
- self.run_function(
- 'grains.item',
- ['osmajorrelease']
- )['osmajorrelease']
- if os_grain in ["CentOS", "RedHat", "SUSE"]:
- my_baseurl = (
- "http://my.fake.repo/foo/bar/\n http://my.fake.repo.alt/foo/bar/"
- )
- repo = 'fakerepo'
- name = 'Fake repo for RHEL/CentOS/SUSE'
- expected_get_repo_baseurl_zypp = (
- "http://my.fake.repo/foo/bar/%0A%20http://my.fake.repo.alt/foo/bar/"
- )
- expected_get_repo_baseurl = (
- "http://my.fake.repo/foo/bar/\nhttp://my.fake.repo.alt/foo/bar/"
- )
- major_release = int(
- self.run_function("grains.item", ["osmajorrelease"])[
- "osmajorrelease"
- ]
- )
- repo = "fakerepo"
- name = "Fake repo for RHEL/CentOS/SUSE"
- baseurl = my_baseurl
- gpgkey = 'https://my.fake.repo/foo/bar/MY-GPG-KEY.pub'
- failovermethod = 'priority'
- gpgkey = "https://my.fake.repo/foo/bar/MY-GPG-KEY.pub"
- failovermethod = "priority"
- gpgcheck = 1
- enabled = 1
- ret = self.run_function(
- 'pkg.mod_repo',
- "pkg.mod_repo",
- [repo],
- name=name,
- baseurl=baseurl,
@ -78,20 +87,20 @@ index 6f3767bfbd272848277b877d1fe640caf8f349f6..0f4c5c9d459c56bb485408f943c1dee4
- self.assertNotEqual(ret, {})
- repo_info = ret[next(iter(ret))]
- self.assertIn(repo, repo_info)
- self.assertEqual(repo_info[repo]['baseurl'], my_baseurl)
- ret = self.run_function('pkg.get_repo', [repo])
- self.assertEqual(ret['baseurl'], expected_get_repo_baseurl)
- self.run_function('pkg.mod_repo', [repo])
- ret = self.run_function('pkg.get_repo', [repo])
- self.assertEqual(ret['baseurl'], expected_get_repo_baseurl)
- self.assertEqual(repo_info[repo]["baseurl"], my_baseurl)
- ret = self.run_function("pkg.get_repo", [repo])
- self.assertEqual(ret["baseurl"], expected_get_repo_baseurl)
- self.run_function("pkg.mod_repo", [repo])
- ret = self.run_function("pkg.get_repo", [repo])
- self.assertEqual(ret["baseurl"], expected_get_repo_baseurl)
- finally:
- if repo is not None:
- self.run_function('pkg.del_repo', [repo])
- self.run_function("pkg.del_repo", [repo])
-
@requires_salt_modules('pkg.owner')
@requires_salt_modules("pkg.owner")
def test_owner(self):
'''
"""
--
2.23.0
2.29.2

View File

@ -1,43 +1,53 @@
From eb51734ad93b1fa0c6bc8fde861fdabfe3e0d6b0 Mon Sep 17 00:00:00 2001
From 81f38c8cb16634b2c86b3e1e7c745870f90771d0 Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Thu, 13 Jun 2019 17:48:55 +0200
Subject: [PATCH] Fix zypper pkg.list_pkgs expectation and dpkg mocking
---
tests/unit/modules/test_dpkg_lowpkg.py | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
tests/unit/modules/test_dpkg_lowpkg.py | 22 ++++++++++++++++------
1 file changed, 16 insertions(+), 6 deletions(-)
diff --git a/tests/unit/modules/test_dpkg_lowpkg.py b/tests/unit/modules/test_dpkg_lowpkg.py
index a0b3346f9d..bc564f080a 100644
index 160bbcd5b1..dadbc30dfa 100644
--- a/tests/unit/modules/test_dpkg_lowpkg.py
+++ b/tests/unit/modules/test_dpkg_lowpkg.py
@@ -125,9 +125,9 @@ class DpkgTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(dpkg.__salt__, {'cmd.run_all': mock}):
self.assertEqual(dpkg.file_dict('httpd'), 'Error: error')
@@ -308,9 +308,14 @@ class DpkgTestCase(TestCase, LoaderModuleMockMixin):
dpkg.bin_pkg_info("package.deb")["name"], "package_name"
)
- @patch('salt.modules.dpkg._get_pkg_ds_avail', MagicMock(return_value=dselect_pkg))
- @patch('salt.modules.dpkg._get_pkg_info', MagicMock(return_value=pkgs_info))
- @patch('salt.modules.dpkg._get_pkg_license', MagicMock(return_value='BSD v3'))
+ @patch('salt.modules.dpkg_lowpkg._get_pkg_ds_avail', MagicMock(return_value=dselect_pkg))
+ @patch('salt.modules.dpkg_lowpkg._get_pkg_info', MagicMock(return_value=pkgs_info))
+ @patch('salt.modules.dpkg_lowpkg._get_pkg_license', MagicMock(return_value='BSD v3'))
- @patch("salt.modules.dpkg._get_pkg_ds_avail", MagicMock(return_value=dselect_pkg))
- @patch("salt.modules.dpkg._get_pkg_info", MagicMock(return_value=pkgs_info))
- @patch("salt.modules.dpkg._get_pkg_license", MagicMock(return_value="BSD v3"))
+ @patch(
+ "salt.modules.dpkg_lowpkg._get_pkg_ds_avail",
+ MagicMock(return_value=dselect_pkg),
+ )
+ @patch("salt.modules.dpkg_lowpkg._get_pkg_info", MagicMock(return_value=pkgs_info))
+ @patch(
+ "salt.modules.dpkg_lowpkg._get_pkg_license", MagicMock(return_value="BSD v3")
+ )
def test_info(self):
'''
"""
Test info
@@ -152,9 +152,9 @@ class DpkgTestCase(TestCase, LoaderModuleMockMixin):
assert pkg_data['maintainer'] == 'Simpsons Developers <simpsons-devel-discuss@lists.springfield.org>'
assert pkg_data['license'] == 'BSD v3'
@@ -359,9 +364,14 @@ class DpkgTestCase(TestCase, LoaderModuleMockMixin):
)
assert pkg_data["license"] == "BSD v3"
- @patch('salt.modules.dpkg._get_pkg_ds_avail', MagicMock(return_value=dselect_pkg))
- @patch('salt.modules.dpkg._get_pkg_info', MagicMock(return_value=pkgs_info))
- @patch('salt.modules.dpkg._get_pkg_license', MagicMock(return_value='BSD v3'))
+ @patch('salt.modules.dpkg_lowpkg._get_pkg_ds_avail', MagicMock(return_value=dselect_pkg))
+ @patch('salt.modules.dpkg_lowpkg._get_pkg_info', MagicMock(return_value=pkgs_info))
+ @patch('salt.modules.dpkg_lowpkg._get_pkg_license', MagicMock(return_value='BSD v3'))
- @patch("salt.modules.dpkg._get_pkg_ds_avail", MagicMock(return_value=dselect_pkg))
- @patch("salt.modules.dpkg._get_pkg_info", MagicMock(return_value=pkgs_info))
- @patch("salt.modules.dpkg._get_pkg_license", MagicMock(return_value="BSD v3"))
+ @patch(
+ "salt.modules.dpkg_lowpkg._get_pkg_ds_avail",
+ MagicMock(return_value=dselect_pkg),
+ )
+ @patch("salt.modules.dpkg_lowpkg._get_pkg_info", MagicMock(return_value=pkgs_info))
+ @patch(
+ "salt.modules.dpkg_lowpkg._get_pkg_license", MagicMock(return_value="BSD v3")
+ )
def test_info_attr(self):
'''
"""
Test info with 'attr' parameter
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 0612549b3acfeb15e0b499b6f469d64062d6ae2d Mon Sep 17 00:00:00 2001
From b9ba6875945e1ffafdeb862d8b2ac7fccd9cccf5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 25 Jun 2018 13:06:40 +0100
@ -14,17 +14,17 @@ Fix '_find_remove_targets' after aligning Zypper with pkg state
1 file changed, 21 deletions(-)
diff --git a/salt/states/pkg.py b/salt/states/pkg.py
index c0fa2f6b69..a13d418400 100644
index a1b2a122bb..f7327a33e3 100644
--- a/salt/states/pkg.py
+++ b/salt/states/pkg.py
@@ -450,16 +450,6 @@ def _find_remove_targets(name=None,
@@ -477,16 +477,6 @@ def _find_remove_targets(
if __grains__['os'] == 'FreeBSD' and origin:
cver = [k for k, v in six.iteritems(cur_pkgs) if v['origin'] == pkgname]
- elif __grains__['os_family'] == 'Suse':
if __grains__["os"] == "FreeBSD" and origin:
cver = [k for k, v in cur_pkgs.items() if v["origin"] == pkgname]
- elif __grains__["os_family"] == "Suse":
- # On SUSE systems. Zypper returns packages without "arch" in name
- try:
- namepart, archpart = pkgname.rsplit('.', 1)
- namepart, archpart = pkgname.rsplit(".", 1)
- except ValueError:
- cver = cur_pkgs.get(pkgname, [])
- else:
@ -34,14 +34,14 @@ index c0fa2f6b69..a13d418400 100644
else:
cver = cur_pkgs.get(pkgname, [])
@@ -866,17 +856,6 @@ def _verify_install(desired, new_pkgs, ignore_epoch=False, new_caps=None):
cver = new_pkgs.get(pkgname.split('%')[0])
elif __grains__['os_family'] == 'Debian':
cver = new_pkgs.get(pkgname.split('=')[0])
- elif __grains__['os_family'] == 'Suse':
@@ -930,17 +920,6 @@ def _verify_install(desired, new_pkgs, ignore_epoch=None, new_caps=None):
cver = new_pkgs.get(pkgname.split("%")[0])
elif __grains__["os_family"] == "Debian":
cver = new_pkgs.get(pkgname.split("=")[0])
- elif __grains__["os_family"] == "Suse":
- # On SUSE systems. Zypper returns packages without "arch" in name
- try:
- namepart, archpart = pkgname.rsplit('.', 1)
- namepart, archpart = pkgname.rsplit(".", 1)
- except ValueError:
- cver = new_pkgs.get(pkgname)
- else:
@ -53,6 +53,6 @@ index c0fa2f6b69..a13d418400 100644
cver = new_pkgs.get(pkgname)
if not cver and pkgname in new_caps:
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 3df8359421f60140fd335d95c3c06de0bfd6ac4f Mon Sep 17 00:00:00 2001
From 17ad05e3cbb3718ca12cef20600be81aa5d42d33 Mon Sep 17 00:00:00 2001
From: tyl0re <andreas@vogler.name>
Date: Wed, 17 Jul 2019 10:13:09 +0200
Subject: [PATCH] Fixed Bug LVM has no Parttion Type. the Scipt Later
@ -11,23 +11,41 @@ Subject: [PATCH] Fixed Bug LVM has no Parttion Type. the Scipt Later
So the check on not defined fs_type is missing
---
salt/modules/parted_partition.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
salt/modules/parted_partition.py | 19 ++++++++++++++++---
1 file changed, 16 insertions(+), 3 deletions(-)
diff --git a/salt/modules/parted_partition.py b/salt/modules/parted_partition.py
index 9441fec49fd1833da590b3f65637e8e92b287d1c..7d08a7b315c990e7a87c9c77fd6550a6174b7160 100644
index 015d4cbc29..bb34cd58b4 100644
--- a/salt/modules/parted_partition.py
+++ b/salt/modules/parted_partition.py
@@ -515,7 +515,7 @@ def mkpartfs(device, part_type, fs_type, start, end):
'Invalid part_type passed to partition.mkpartfs'
)
@@ -552,10 +552,23 @@ def mkpartfs(device, part_type, fs_type=None, start=None, end=None):
- if not _is_fstype(fs_type):
.. code-block:: bash
- salt '*' partition.mkpartfs /dev/sda primary fs_type=fat32 start=0 end=639
- salt '*' partition.mkpartfs /dev/sda primary start=0 end=639
+ salt '*' partition.mkpartfs /dev/sda logical ext2 440 670
"""
- out = mkpart(device, part_type, fs_type, start, end)
+ _validate_device(device)
+
+ if part_type not in {"primary", "logical", "extended"}:
+ raise CommandExecutionError("Invalid part_type passed to partition.mkpartfs")
+
+ if fs_type and not _is_fstype(fs_type):
raise CommandExecutionError(
'Invalid fs_type passed to partition.mkpartfs'
)
+ raise CommandExecutionError("Invalid fs_type passed to partition.mkpartfs")
+
+ _validate_partition_boundary(start)
+ _validate_partition_boundary(end)
+
+ cmd = "parted -m -s -- {} mkpart {} {} {} {}".format(
+ device, part_type, fs_type, start, end
+ )
+ out = __salt__["cmd.run"](cmd).splitlines()
return out
--
2.23.0
2.29.2

View File

@ -1,4 +1,4 @@
From 9ec54e8c1394ab678c6129d98f07c6eafd446399 Mon Sep 17 00:00:00 2001
From 731a53bd241240e08c455a8cb3a59e4d65a6abb5 Mon Sep 17 00:00:00 2001
From: Erik Johnson <palehose@gmail.com>
Date: Fri, 24 Aug 2018 10:35:55 -0500
Subject: [PATCH] Fixes: CVE-2018-15750, CVE-2018-15751
@ -12,43 +12,47 @@ Handle Auth exceptions in run_job
Update tornado test to correct authentication message
---
salt/netapi/rest_cherrypy/app.py | 7 -------
tests/integration/netapi/rest_tornado/test_app.py | 4 ++--
2 files changed, 2 insertions(+), 9 deletions(-)
tests/integration/netapi/rest_tornado/test_app.py | 8 ++++++--
2 files changed, 6 insertions(+), 9 deletions(-)
diff --git a/salt/netapi/rest_cherrypy/app.py b/salt/netapi/rest_cherrypy/app.py
index fa1b540e5f..f8b500482b 100644
index e7641ccbc5..5dfbadf759 100644
--- a/salt/netapi/rest_cherrypy/app.py
+++ b/salt/netapi/rest_cherrypy/app.py
@@ -1176,13 +1176,6 @@ class LowDataAdapter(object):
@@ -1181,13 +1181,6 @@ class LowDataAdapter:
except (TypeError, ValueError):
raise cherrypy.HTTPError(401, 'Invalid token')
raise cherrypy.HTTPError(401, "Invalid token")
- if 'token' in chunk:
- if "token" in chunk:
- # Make sure that auth token is hex
- try:
- int(chunk['token'], 16)
- int(chunk["token"], 16)
- except (TypeError, ValueError):
- raise cherrypy.HTTPError(401, 'Invalid token')
- raise cherrypy.HTTPError(401, "Invalid token")
-
if client:
chunk['client'] = client
chunk["client"] = client
diff --git a/tests/integration/netapi/rest_tornado/test_app.py b/tests/integration/netapi/rest_tornado/test_app.py
index 10ec29f7fa..4102b5645a 100644
index e3ad8820d3..4e5e741f1d 100644
--- a/tests/integration/netapi/rest_tornado/test_app.py
+++ b/tests/integration/netapi/rest_tornado/test_app.py
@@ -282,8 +282,8 @@ class TestSaltAPIHandler(_SaltnadoIntegrationTestCase):
self.assertIn('jid', ret[0]) # the first 2 are regular returns
self.assertIn('jid', ret[1])
self.assertIn('Failed to authenticate', ret[2]) # bad auth
- self.assertEqual(ret[0]['minions'], sorted(['minion', 'sub_minion']))
- self.assertEqual(ret[1]['minions'], sorted(['minion', 'sub_minion']))
+ self.assertEqual(ret[0]['minions'], sorted(['minion', 'sub_minion', 'localhost']))
+ self.assertEqual(ret[1]['minions'], sorted(['minion', 'sub_minion', 'localhost']))
@@ -326,8 +326,12 @@ class TestSaltAPIHandler(_SaltnadoIntegrationTestCase):
self.assertIn("jid", ret[0]) # the first 2 are regular returns
self.assertIn("jid", ret[1])
self.assertIn("Failed to authenticate", ret[2]) # bad auth
- self.assertEqual(ret[0]["minions"], sorted(["minion", "sub_minion"]))
- self.assertEqual(ret[1]["minions"], sorted(["minion", "sub_minion"]))
+ self.assertEqual(
+ ret[0]["minions"], sorted(["minion", "sub_minion", "localhost"])
+ )
+ self.assertEqual(
+ ret[1]["minions"], sorted(["minion", "sub_minion", "localhost"])
+ )
@slowTest
def test_simple_local_async_post_no_tgt(self):
low = [{'client': 'local_async',
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 9a5f007a5baa4ba1d28b0e6708bac8b134e4891c Mon Sep 17 00:00:00 2001
From 82d1cadff4fa6248a9d891a3c228fc415207d8d6 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Mihai=20Dinc=C4=83?= <dincamihai@users.noreply.github.com>
Date: Tue, 26 Nov 2019 18:26:31 +0100
Subject: [PATCH] Fixing StreamClosed issue
@ -8,18 +8,18 @@ Subject: [PATCH] Fixing StreamClosed issue
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py
index 754c257b36..c4545e3ebc 100644
index f3d92b88f1..8d2601e636 100644
--- a/salt/cli/batch_async.py
+++ b/salt/cli/batch_async.py
@@ -221,7 +221,6 @@ class BatchAsync(object):
"metadata": self.metadata
@@ -232,7 +232,6 @@ class BatchAsync:
"metadata": self.metadata,
}
self.event.fire_event(data, "salt/batch/{0}/done".format(self.batch_jid))
self.event.fire_event(data, "salt/batch/{}/done".format(self.batch_jid))
- self.event.remove_event_handler(self.__event_handler)
for (pattern, label) in self.patterns:
if label in ["ping_return", "batch_run"]:
self.event.unsubscribe(pattern, match_type='glob')
@@ -265,6 +264,7 @@ class BatchAsync(object):
self.event.unsubscribe(pattern, match_type="glob")
@@ -277,6 +276,7 @@ class BatchAsync:
def __del__(self):
self.local = None
@ -28,6 +28,6 @@ index 754c257b36..c4545e3ebc 100644
self.ioloop = None
gc.collect()
--
2.16.4
2.29.2

View File

@ -0,0 +1,29 @@
From 36b107fb5108fe4e52e9ef522765d6ada588c50d Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 9 Dec 2020 14:58:55 +0300
Subject: [PATCH] Force zyppnotify to prefer Packages.db than Packages
if it exists
---
scripts/suse/zypper/plugins/commit/zyppnotify | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/scripts/suse/zypper/plugins/commit/zyppnotify b/scripts/suse/zypper/plugins/commit/zyppnotify
index 51ac02254e..d6a1bef42b 100755
--- a/scripts/suse/zypper/plugins/commit/zyppnotify
+++ b/scripts/suse/zypper/plugins/commit/zyppnotify
@@ -20,7 +20,9 @@ class DriftDetector(Plugin):
def __init__(self):
Plugin.__init__(self)
self.ck_path = "/var/cache/salt/minion/rpmdb.cookie"
- self.rpm_path = "/var/lib/rpm/Packages"
+ self.rpm_path = "/var/lib/rpm/Packages.db"
+ if not os.path.exists(self.rpm_path):
+ self.rpm_path = "/var/lib/rpm/Packages"
def _get_mtime(self):
"""
--
2.29.2

View File

@ -1,4 +1,4 @@
From 98f3bd70aaa145b88e8bd4b947b578435e2b1e57 Mon Sep 17 00:00:00 2001
From e987664551debb9affce8ce5a70593ef0750dcd5 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 14 Nov 2018 17:36:23 +0100
Subject: [PATCH] Get os_arch also without RPM package installed
@ -17,29 +17,31 @@ Add UT for OS arch detection when no CPU arch or machine can be determined
Remove unsupported testcase
---
tests/unit/utils/test_pkg.py | 48 ++++++++------------------------------------
1 file changed, 8 insertions(+), 40 deletions(-)
tests/unit/utils/test_pkg.py | 53 ++++++------------------------------
1 file changed, 8 insertions(+), 45 deletions(-)
diff --git a/tests/unit/utils/test_pkg.py b/tests/unit/utils/test_pkg.py
index e8b19bef14..361e0bf92f 100644
index b4a67b8e57..404b01b12b 100644
--- a/tests/unit/utils/test_pkg.py
+++ b/tests/unit/utils/test_pkg.py
@@ -2,51 +2,19 @@
from __future__ import absolute_import, unicode_literals, print_function
-from tests.support.unit import TestCase
-from tests.support.mock import MagicMock, patch
+from tests.support.unit import TestCase, skipIf
+from tests.support.mock import Mock, MagicMock, patch, NO_MOCK, NO_MOCK_REASON
@@ -1,53 +1,16 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import absolute_import, print_function, unicode_literals
-
import salt.utils.pkg
from salt.utils.pkg import rpm
-from tests.support.mock import MagicMock, patch
-from tests.support.unit import TestCase
-
+from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, Mock, patch
+from tests.support.unit import TestCase, skipIf
-class PkgUtilsTestCase(TestCase):
- '''
- """
- TestCase for salt.utils.pkg module
- '''
- """
-
- test_parameters = [
- ("16.0.0.49153-0+f1", "", "16.0.0.49153-0+f1"),
- ("> 15.0.0", ">", "15.0.0"),
@ -62,13 +64,13 @@ index e8b19bef14..361e0bf92f 100644
- ("<=>15.0.0", "<=>", "15.0.0"),
- ("<>15.0.0", "<>", "15.0.0"),
- ("=15.0.0", "=", "15.0.0"),
- ("", "", "")
- ("", "", ""),
- ]
-
- def test_split_comparison(self):
- '''
- """
- Tests salt.utils.pkg.split_comparison
- '''
- """
- for test_parameter in self.test_parameters:
- oper, verstr = salt.utils.pkg.split_comparison(test_parameter[0])
- self.assertEqual(test_parameter[1], oper)
@ -80,11 +82,11 @@ index e8b19bef14..361e0bf92f 100644
+@skipIf(NO_MOCK, NO_MOCK_REASON)
+@skipIf(pytest is None, 'PyTest is missing')
+@skipIf(pytest is None, "PyTest is missing")
class PkgRPMTestCase(TestCase):
'''
"""
Test case for pkg.rpm utils
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 0b6106815b708bc4cf25b4a02ebc8b7ebf299b39 Mon Sep 17 00:00:00 2001
From d9618fed8ff241c6f127f08ec59fea9c8b8e12a6 Mon Sep 17 00:00:00 2001
From: Alberto Planas <aplanas@suse.com>
Date: Tue, 27 Oct 2020 13:16:37 +0100
Subject: [PATCH] grains: master can read grains
@ -8,10 +8,10 @@ Subject: [PATCH] grains: master can read grains
1 file changed, 8 insertions(+), 2 deletions(-)
diff --git a/salt/grains/extra.py b/salt/grains/extra.py
index 6a26aece77..f80061ff4e 100644
index d25faac3b7..7729a5c0a5 100644
--- a/salt/grains/extra.py
+++ b/salt/grains/extra.py
@@ -94,8 +94,14 @@ def __secure_boot():
@@ -76,8 +76,14 @@ def __secure_boot():
enabled = False
sboot = glob.glob("/sys/firmware/efi/vars/SecureBoot-*/data")
if len(sboot) == 1:
@ -29,6 +29,6 @@ index 6a26aece77..f80061ff4e 100644
--
2.29.1
2.29.2

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b2c1abe2851b8a9055a361fc2409477ac01ec4829f0588f3b58533cb5f1e4e89
size 8775440
oid sha256:7480c92d4197b02504c9a130a0268fd028eb0fd45d3c7a7075b8b78da85050ed
size 9943287

View File

@ -1,4 +1,4 @@
From a11587a1209cd198f421fafdb43510b6d651f4b2 Mon Sep 17 00:00:00 2001
From ac34a8d839f91285f4ced605250422a1ecf5cb55 Mon Sep 17 00:00:00 2001
From: EricS <54029547+ESiebigteroth@users.noreply.github.com>
Date: Tue, 3 Sep 2019 11:22:53 +0200
Subject: [PATCH] Implement network.fqdns module function (bsc#1134860)
@ -9,70 +9,30 @@ Subject: [PATCH] Implement network.fqdns module function (bsc#1134860)
* Reuse network.fqdns in grains.core.fqdns
* Return empty list when fqdns grains is disabled
Co-authored-by: Eric Siebigteroth <eric.siebigteroth@suse.de>
---
salt/grains/core.py | 66 +++++-------------------------------------
salt/modules/network.py | 60 ++++++++++++++++++++++++++++++++++++++
salt/utils/network.py | 12 ++++++++
tests/unit/grains/test_core.py | 63 +++++++++++++++++++++++++++++++---------
4 files changed, 130 insertions(+), 71 deletions(-)
salt/grains/core.py | 58 +++-------------------------------
salt/modules/network.py | 12 +++----
salt/utils/network.py | 2 +-
tests/unit/grains/test_core.py | 55 ++++++++++++--------------------
4 files changed, 31 insertions(+), 96 deletions(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 0f3ccd9b92..77ae99590f 100644
index 5f18ba4a58..0dc1d97f97 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -26,8 +26,9 @@ from errno import EACCES, EPERM
import datetime
@@ -23,7 +23,6 @@ import uuid
import warnings
import time
+import salt.modules.network
import zlib
from errno import EACCES, EPERM
-from multiprocessing.pool import ThreadPool
+from salt.utils.network import _get_interfaces
# pylint: disable=import-error
try:
@@ -84,6 +85,7 @@ __salt__ = {
'cmd.run_all': salt.modules.cmdmod._run_all_quiet,
'smbios.records': salt.modules.smbios.records,
'smbios.get': salt.modules.smbios.get,
+ 'network.fqdns': salt.modules.network.fqdns,
}
log = logging.getLogger(__name__)
@@ -107,7 +109,6 @@ HAS_UNAME = True
if not hasattr(os, 'uname'):
HAS_UNAME = False
-_INTERFACES = {}
# Possible value for h_errno defined in netdb.h
HOST_NOT_FOUND = 1
@@ -1553,17 +1554,6 @@ def _linux_bin_exists(binary):
return False
-def _get_interfaces():
- '''
- Provide a dict of the connected interfaces and their ip addresses
- '''
-
- global _INTERFACES
- if not _INTERFACES:
- _INTERFACES = salt.utils.network.interfaces()
- return _INTERFACES
-
-
def _parse_lsb_release():
ret = {}
try:
@@ -2271,52 +2261,12 @@ def fqdns():
'''
Return all known FQDNs for the system by enumerating all interfaces and
import distro
import salt.exceptions
@@ -2406,59 +2405,10 @@ def fqdns():
then trying to reverse resolve them (excluding 'lo' interface).
+ To disable the fqdns grain, set enable_fqdns_grains: False in the minion configuration file.
'''
To disable the fqdns grain, set enable_fqdns_grains: False in the minion configuration file.
"""
- # Provides:
- # fqdns
-
@ -82,224 +42,220 @@ index 0f3ccd9b92..77ae99590f 100644
- def _lookup_fqdn(ip):
- try:
- name, aliaslist, addresslist = socket.gethostbyaddr(ip)
- return [socket.getfqdn(name)] + [als for als in aliaslist if salt.utils.network.is_fqdn(als)]
- return [socket.getfqdn(name)] + [
- als for als in aliaslist if salt.utils.network.is_fqdn(als)
- ]
- except socket.herror as err:
- if err.errno in (0, HOST_NOT_FOUND, NO_DATA):
- # No FQDN for this IP address, so we don't need to know this all the time.
- log.debug("Unable to resolve address %s: %s", ip, err)
- else:
- log.error(err_message, ip, err)
- except (socket.error, socket.gaierror, socket.timeout) as err:
- except (OSError, socket.gaierror, socket.timeout) as err:
- log.error(err_message, ip, err)
-
- start = time.time()
-
- addresses = salt.utils.network.ip_addrs(include_loopback=False, interface_data=_get_interfaces())
- addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False, interface_data=_get_interfaces()))
- err_message = 'Exception during resolving address: %s'
- addresses = salt.utils.network.ip_addrs(
- include_loopback=False, interface_data=_get_interfaces()
- )
- addresses.extend(
- salt.utils.network.ip_addrs6(
- include_loopback=False, interface_data=_get_interfaces()
- )
- )
- err_message = "Exception during resolving address: %s"
-
- # Create a ThreadPool to process the underlying calls to 'socket.gethostbyaddr' in parallel.
- # This avoid blocking the execution when the "fqdn" is not defined for certains IP addresses, which was causing
- # that "socket.timeout" was reached multiple times secuencially, blocking execution for several seconds.
-
- try:
- pool = ThreadPool(8)
- results = pool.map(_lookup_fqdn, addresses)
- pool.close()
- pool.join()
- pool = ThreadPool(8)
- results = pool.map(_lookup_fqdn, addresses)
- pool.close()
- pool.join()
- except Exception as exc:
- log.error("Exception while creating a ThreadPool for resolving FQDNs: %s", exc)
- log.error("Exception while creating a ThreadPool for resolving FQDNs: %s", exc)
-
- for item in results:
- if item:
- fqdns.update(item)
-
- elapsed = time.time() - start
- log.debug('Elapsed time getting FQDNs: {} seconds'.format(elapsed))
- log.debug("Elapsed time getting FQDNs: {} seconds".format(elapsed))
-
- return {"fqdns": sorted(list(fqdns))}
+ opt = {"fqdns": []}
+ if __opts__.get('enable_fqdns_grains', True) == True:
+ opt = __salt__['network.fqdns']()
+ if __opts__.get("enable_fqdns_grains", True) == True:
+ opt = __salt__["network.fqdns"]()
+ return opt
def ip_fqdn():
diff --git a/salt/modules/network.py b/salt/modules/network.py
index 38e2bc326e..880f4f8d5f 100644
index 2e1410c288..59ed43bba6 100644
--- a/salt/modules/network.py
+++ b/salt/modules/network.py
@@ -11,6 +11,10 @@ import logging
import re
import os
import socket
+import time
+
+from multiprocessing.pool import ThreadPool
+
@@ -2,7 +2,6 @@
Module for gathering and managing network information
"""
# Import salt libs
-# Import python libs
import datetime
import hashlib
import logging
@@ -12,7 +11,6 @@ import socket
import time
from multiprocessing.pool import ThreadPool
-# Import salt libs
import salt.utils.decorators.path
@@ -1887,3 +1891,59 @@ def iphexval(ip):
a = ip.split('.')
hexval = ['%02X' % int(x) for x in a] # pylint: disable=E1321
return ''.join(hexval)
+
+
+def fqdns():
+ '''
+ Return all known FQDNs for the system by enumerating all interfaces and
+ then trying to reverse resolve them (excluding 'lo' interface).
+ '''
+ # Provides:
+ # fqdns
+
+ # Possible value for h_errno defined in netdb.h
+ HOST_NOT_FOUND = 1
+ NO_DATA = 4
+
+ grains = {}
+ fqdns = set()
+
+ def _lookup_fqdn(ip):
+ try:
import salt.utils.functools
import salt.utils.network
@@ -20,8 +18,6 @@ import salt.utils.platform
import salt.utils.validate.net
from salt._compat import ipaddress
from salt.exceptions import CommandExecutionError
-
-# Import 3rd-party libs
from salt.ext.six.moves import range
log = logging.getLogger(__name__)
@@ -2076,7 +2072,10 @@ def fqdns():
def _lookup_fqdn(ip):
try:
- return [socket.getfqdn(socket.gethostbyaddr(ip)[0])]
+ name, aliaslist, addresslist = socket.gethostbyaddr(ip)
+ return [socket.getfqdn(name)] + [als for als in aliaslist if salt.utils.network.is_fqdn(als)]
+ except socket.herror as err:
+ if err.errno in (0, HOST_NOT_FOUND, NO_DATA):
+ # No FQDN for this IP address, so we don't need to know this all the time.
+ log.debug("Unable to resolve address %s: %s", ip, err)
+ else:
+ log.error(err_message, err)
+ except (socket.error, socket.gaierror, socket.timeout) as err:
+ log.error(err_message, err)
+
+ start = time.time()
+
+ addresses = salt.utils.network.ip_addrs(include_loopback=False, interface_data=salt.utils.network._get_interfaces())
+ addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False, interface_data=salt.utils.network._get_interfaces()))
+ err_message = 'Exception during resolving address: %s'
+
+ # Create a ThreadPool to process the underlying calls to 'socket.gethostbyaddr' in parallel.
+ # This avoid blocking the execution when the "fqdn" is not defined for certains IP addresses, which was causing
+ # that "socket.timeout" was reached multiple times secuencially, blocking execution for several seconds.
+
+ try:
+ pool = ThreadPool(8)
+ results = pool.map(_lookup_fqdn, addresses)
+ pool.close()
+ pool.join()
+ return [socket.getfqdn(name)] + [
+ als for als in aliaslist if salt.utils.network.is_fqdn(als)
+ ]
except socket.herror as err:
if err.errno in (0, HOST_NOT_FOUND, NO_DATA):
# No FQDN for this IP address, so we don't need to know this all the time.
@@ -2102,13 +2101,12 @@ def fqdns():
# This avoid blocking the execution when the "fqdn" is not defined for certains IP addresses, which was causing
# that "socket.timeout" was reached multiple times secuencially, blocking execution for several seconds.
- results = []
try:
pool = ThreadPool(8)
results = pool.map(_lookup_fqdn, addresses)
pool.close()
pool.join()
- except Exception as exc: # pylint: disable=broad-except
+ except Exception as exc:
+ log.error("Exception while creating a ThreadPool for resolving FQDNs: %s", exc)
+
+ for item in results:
+ if item:
+ fqdns.update(item)
+
+ elapsed = time.time() - start
+ log.debug('Elapsed time getting FQDNs: {} seconds'.format(elapsed))
+
+ return {"fqdns": sorted(list(fqdns))}
\ No newline at end of file
log.error("Exception while creating a ThreadPool for resolving FQDNs: %s", exc)
for item in results:
diff --git a/salt/utils/network.py b/salt/utils/network.py
index 74536cc143..4cc8a05c4a 100644
index d253ded3ab..25b2d06758 100644
--- a/salt/utils/network.py
+++ b/salt/utils/network.py
@@ -50,6 +50,18 @@ except (ImportError, OSError, AttributeError, TypeError):
pass
@@ -49,7 +49,7 @@ except (ImportError, OSError, AttributeError, TypeError):
_INTERFACES = {}
+_INTERFACES = {}
+def _get_interfaces(): #! function
+ '''
+ Provide a dict of the connected interfaces and their ip addresses
+ '''
+
+ global _INTERFACES
+ if not _INTERFACES:
+ _INTERFACES = interfaces()
+ return _INTERFACES
+
+
def sanitize_host(host):
'''
Sanitize host string.
-def _get_interfaces():
+def _get_interfaces(): #! function
"""
Provide a dict of the connected interfaces and their ip addresses
"""
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index ac03b57226..60914204b0 100644
index d760e57a54..a5ceeb8317 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -35,6 +35,7 @@ import salt.utils.path
import salt.modules.cmdmod
import salt.modules.smbios
import salt.grains.core as core
+import salt.modules.network
# Import 3rd-party libs
from salt.ext import six
@@ -1029,6 +1030,40 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
with patch.object(salt.utils.dns, 'parse_resolv', MagicMock(return_value=resolv_mock)):
@@ -18,6 +18,7 @@ import salt.utils.network
import salt.utils.path
import salt.utils.platform
from salt._compat import ipaddress
+from salt.ext import six
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, Mock, mock_open, patch
from tests.support.unit import TestCase, skipIf
@@ -1293,14 +1294,14 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
):
assert core.dns() == ret
+
- def test_enable_fqdns_false(self):
+ def test_enablefqdnsFalse(self):
+ '''
+ tests enable_fqdns_grains is set to False
+ '''
+ with patch.dict('salt.grains.core.__opts__', {'enable_fqdns_grains':False}):
+ assert core.fqdns() == {"fqdns": []}
+
+
"""
tests enable_fqdns_grains is set to False
"""
with patch.dict("salt.grains.core.__opts__", {"enable_fqdns_grains": False}):
assert core.fqdns() == {"fqdns": []}
- def test_enable_fqdns_true(self):
+ def test_enablefqdnsTrue(self):
+ '''
+ testing that grains uses network.fqdns module
+ '''
+ with patch.dict('salt.grains.core.__salt__', {'network.fqdns': MagicMock(return_value="my.fake.domain")}):
+ with patch.dict('salt.grains.core.__opts__', {'enable_fqdns_grains':True}):
+ assert core.fqdns() == 'my.fake.domain'
+
+
"""
testing that grains uses network.fqdns module
"""
@@ -1311,14 +1312,14 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict("salt.grains.core.__opts__", {"enable_fqdns_grains": True}):
assert core.fqdns() == "my.fake.domain"
- def test_enable_fqdns_none(self):
+ def test_enablefqdnsNone(self):
+ '''
+ testing default fqdns grains is returned when enable_fqdns_grains is None
+ '''
+ with patch.dict('salt.grains.core.__opts__', {'enable_fqdns_grains':None}):
+ assert core.fqdns() == {"fqdns": []}
+
+
"""
testing default fqdns grains is returned when enable_fqdns_grains is None
"""
with patch.dict("salt.grains.core.__opts__", {"enable_fqdns_grains": None}):
assert core.fqdns() == {"fqdns": []}
- def test_enable_fqdns_without_patching(self):
+ def test_enablefqdnswithoutpaching(self):
+ '''
+ testing fqdns grains is enabled by default
+ '''
+ with patch.dict('salt.grains.core.__salt__', {'network.fqdns': MagicMock(return_value="my.fake.domain")}):
+ assert core.fqdns() == 'my.fake.domain'
+
+
@skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
@patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '5.6.7.8']))
@patch('salt.utils.network.ip_addrs6',
@@ -1044,11 +1079,12 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
('foo.bar.baz', [], ['fe80::a8b2:93ff:fe00:0']),
('bluesniff.foo.bar', [], ['fe80::a8b2:93ff:dead:beef'])]
ret = {'fqdns': ['bluesniff.foo.bar', 'foo.bar.baz', 'rinzler.evil-corp.com']}
- with patch.object(socket, 'gethostbyaddr', side_effect=reverse_resolv_mock):
"""
testing fqdns grains is enabled by default
"""
@@ -1326,23 +1327,7 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
"salt.grains.core.__salt__",
{"network.fqdns": MagicMock(return_value="my.fake.domain")},
):
- # fqdns is disabled by default on Windows
- if salt.utils.platform.is_windows():
- assert core.fqdns() == {"fqdns": []}
- else:
- assert core.fqdns() == "my.fake.domain"
-
- def test_enable_fqdns_false_is_proxy(self):
- """
- testing fqdns grains is disabled by default for proxy minions
- """
- with patch("salt.utils.platform.is_proxy", return_value=True, autospec=True):
- with patch.dict(
- "salt.grains.core.__salt__",
- {"network.fqdns": MagicMock(return_value="my.fake.domain")},
- ):
- # fqdns is disabled by default on proxy minions
- assert core.fqdns() == {"fqdns": []}
+ assert core.fqdns() == "my.fake.domain"
@skipIf(not salt.utils.platform.is_linux(), "System is not Linux")
@patch(
@@ -1367,11 +1352,12 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
("bluesniff.foo.bar", [], ["fe80::a8b2:93ff:dead:beef"]),
]
ret = {"fqdns": ["bluesniff.foo.bar", "foo.bar.baz", "rinzler.evil-corp.com"]}
- with patch.object(socket, "gethostbyaddr", side_effect=reverse_resolv_mock):
- fqdns = core.fqdns()
- assert "fqdns" in fqdns
- assert len(fqdns['fqdns']) == len(ret['fqdns'])
- assert set(fqdns['fqdns']) == set(ret['fqdns'])
+ with patch.dict(core.__salt__, {'network.fqdns': salt.modules.network.fqdns}):
+ with patch.object(socket, 'gethostbyaddr', side_effect=reverse_resolv_mock):
- assert len(fqdns["fqdns"]) == len(ret["fqdns"])
- assert set(fqdns["fqdns"]) == set(ret["fqdns"])
+ with patch.dict(core.__salt__, {"network.fqdns": salt.modules.network.fqdns}):
+ with patch.object(socket, "gethostbyaddr", side_effect=reverse_resolv_mock):
+ fqdns = core.fqdns()
+ assert "fqdns" in fqdns
+ assert len(fqdns['fqdns']) == len(ret['fqdns'])
+ assert set(fqdns['fqdns']) == set(ret['fqdns'])
+ assert len(fqdns["fqdns"]) == len(ret["fqdns"])
+ assert set(fqdns["fqdns"]) == set(ret["fqdns"])
@skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
@patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4']))
@@ -1094,14 +1130,15 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
('rinzler.evil-corp.com', ["false-hostname", "badaliass"], ['5.6.7.8']),
('foo.bar.baz', [], ['fe80::a8b2:93ff:fe00:0']),
('bluesniff.foo.bar', ["alias.bluesniff.foo.bar"], ['fe80::a8b2:93ff:dead:beef'])]
- with patch.object(socket, 'gethostbyaddr', side_effect=reverse_resolv_mock):
@skipIf(not salt.utils.platform.is_linux(), "System is not Linux")
@patch("salt.utils.network.ip_addrs", MagicMock(return_value=["1.2.3.4"]))
@@ -1437,14 +1423,15 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
["fe80::a8b2:93ff:dead:beef"],
),
]
- with patch.object(socket, "gethostbyaddr", side_effect=reverse_resolv_mock):
- fqdns = core.fqdns()
- assert "fqdns" in fqdns
- for alias in ["this.is.valid.alias", "alias.bluesniff.foo.bar"]:
@ -307,8 +263,8 @@ index ac03b57226..60914204b0 100644
-
- for alias in ["throwmeaway", "false-hostname", "badaliass"]:
- assert alias not in fqdns["fqdns"]
+ with patch.dict(core.__salt__, {'network.fqdns': salt.modules.network.fqdns}):
+ with patch.object(socket, 'gethostbyaddr', side_effect=reverse_resolv_mock):
+ with patch.dict(core.__salt__, {"network.fqdns": salt.modules.network.fqdns}):
+ with patch.object(socket, "gethostbyaddr", side_effect=reverse_resolv_mock):
+ fqdns = core.fqdns()
+ assert "fqdns" in fqdns
+ for alias in ["this.is.valid.alias", "alias.bluesniff.foo.bar"]:
@ -318,8 +274,8 @@ index ac03b57226..60914204b0 100644
+ assert alias not in fqdns["fqdns"]
def test_core_virtual(self):
'''
"""
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 65e33acaf10fdd838c0cdf34ec93df3a2ed1f0d2 Mon Sep 17 00:00:00 2001
From e53d50ce5fabf67eeb5344f7be9cccbb09d0179b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 26 Sep 2019 10:41:06 +0100
@ -6,38 +6,39 @@ Subject: [PATCH] Improve batch_async to release consumed memory
(bsc#1140912)
---
salt/cli/batch_async.py | 73 ++++++++++++++++++++++++++++++-------------------
1 file changed, 45 insertions(+), 28 deletions(-)
salt/cli/batch_async.py | 89 ++++++++++++++++++++++++-----------------
1 file changed, 52 insertions(+), 37 deletions(-)
diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py
index 8a67331102..2bb50459c8 100644
index 388b709416..0a0b8f5f83 100644
--- a/salt/cli/batch_async.py
+++ b/salt/cli/batch_async.py
@@ -5,6 +5,7 @@ Execute a job on the targeted minions by using a moving window of fixed size `ba
@@ -2,7 +2,7 @@
Execute a job on the targeted minions by using a moving window of fixed size `batch`.
"""
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
-import fnmatch
+import gc
import tornado
# Import salt libs
@@ -77,6 +78,7 @@ class BatchAsync(object):
# pylint: enable=import-error,no-name-in-module,redefined-builtin
import logging
@@ -78,6 +78,7 @@ class BatchAsync:
self.batch_jid = jid_gen()
self.find_job_jid = jid_gen()
self.find_job_returned = set()
+ self.ended = False
self.event = salt.utils.event.get_event(
'master',
self.opts['sock_dir'],
@@ -86,6 +88,7 @@ class BatchAsync(object):
io_loop=ioloop,
keep_loop=True)
"master",
self.opts["sock_dir"],
@@ -88,6 +89,7 @@ class BatchAsync:
keep_loop=True,
)
self.scheduled = False
+ self.patterns = {}
def __set_event_handler(self):
ping_return_pattern = 'salt/job/{0}/ret/*'.format(self.ping_jid)
@@ -116,7 +119,7 @@ class BatchAsync(object):
ping_return_pattern = "salt/job/{}/ret/*".format(self.ping_jid)
@@ -118,7 +120,7 @@ class BatchAsync:
if minion in self.active:
self.active.remove(minion)
self.done_minions.add(minion)
@ -45,29 +46,37 @@ index 8a67331102..2bb50459c8 100644
+ self.event.io_loop.spawn_callback(self.schedule_next)
def _get_next(self):
to_run = self.minions.difference(
@@ -129,23 +132,23 @@ class BatchAsync(object):
to_run = (
@@ -132,27 +134,27 @@ class BatchAsync:
)
return set(list(to_run)[:next_batch_size])
- @tornado.gen.coroutine
def check_find_job(self, batch_minions, jid):
- find_job_return_pattern = 'salt/job/{0}/ret/*'.format(jid)
- self.event.unsubscribe(find_job_return_pattern, match_type='glob')
- find_job_return_pattern = "salt/job/{}/ret/*".format(jid)
- self.event.unsubscribe(find_job_return_pattern, match_type="glob")
- self.patterns.remove((find_job_return_pattern, "find_job_return"))
+ if self.event:
+ find_job_return_pattern = 'salt/job/{0}/ret/*'.format(jid)
+ self.event.unsubscribe(find_job_return_pattern, match_type='glob')
+ find_job_return_pattern = "salt/job/{}/ret/*".format(jid)
+ self.event.unsubscribe(find_job_return_pattern, match_type="glob")
+ self.patterns.remove((find_job_return_pattern, "find_job_return"))
- timedout_minions = batch_minions.difference(self.find_job_returned).difference(self.done_minions)
- timedout_minions = batch_minions.difference(self.find_job_returned).difference(
- self.done_minions
- )
- self.timedout_minions = self.timedout_minions.union(timedout_minions)
- self.active = self.active.difference(self.timedout_minions)
- running = batch_minions.difference(self.done_minions).difference(self.timedout_minions)
+ timedout_minions = batch_minions.difference(self.find_job_returned).difference(self.done_minions)
- running = batch_minions.difference(self.done_minions).difference(
- self.timedout_minions
- )
+ timedout_minions = batch_minions.difference(
+ self.find_job_returned
+ ).difference(self.done_minions)
+ self.timedout_minions = self.timedout_minions.union(timedout_minions)
+ self.active = self.active.difference(self.timedout_minions)
+ running = batch_minions.difference(self.done_minions).difference(self.timedout_minions)
+ running = batch_minions.difference(self.done_minions).difference(
+ self.timedout_minions
+ )
- if timedout_minions:
- self.schedule_next()
@ -83,61 +92,65 @@ index 8a67331102..2bb50459c8 100644
@tornado.gen.coroutine
def find_job(self, minions):
@@ -165,8 +168,8 @@ class BatchAsync(object):
gather_job_timeout=self.opts['gather_job_timeout'],
@@ -175,18 +177,12 @@ class BatchAsync:
jid=jid,
**self.eauth)
**self.eauth
)
- self.event.io_loop.call_later(
- self.opts['gather_job_timeout'],
+ yield tornado.gen.sleep(self.opts['gather_job_timeout'])
+ self.event.io_loop.spawn_callback(
self.check_find_job,
not_done,
jid)
@@ -174,10 +177,6 @@ class BatchAsync(object):
- self.opts["gather_job_timeout"], self.check_find_job, not_done, jid
- )
+ yield tornado.gen.sleep(self.opts["gather_job_timeout"])
+ self.event.io_loop.spawn_callback(self.check_find_job, not_done, jid)
@tornado.gen.coroutine
def start(self):
self.__set_event_handler()
- #start batching even if not all minions respond to ping
- # start batching even if not all minions respond to ping
- self.event.io_loop.call_later(
- self.batch_presence_ping_timeout or self.opts['gather_job_timeout'],
- self.start_batch)
- self.batch_presence_ping_timeout or self.opts["gather_job_timeout"],
- self.start_batch,
- )
ping_return = yield self.local.run_job_async(
self.opts['tgt'],
'test.ping',
@@ -191,6 +190,10 @@ class BatchAsync(object):
metadata=self.metadata,
**self.eauth)
self.targeted_minions = set(ping_return['minions'])
+ #start batching even if not all minions respond to ping
+ yield tornado.gen.sleep(self.batch_presence_ping_timeout or self.opts['gather_job_timeout'])
self.opts["tgt"],
"test.ping",
@@ -198,6 +194,11 @@ class BatchAsync:
**self.eauth
)
self.targeted_minions = set(ping_return["minions"])
+ # start batching even if not all minions respond to ping
+ yield tornado.gen.sleep(
+ self.batch_presence_ping_timeout or self.opts["gather_job_timeout"]
+ )
+ self.event.io_loop.spawn_callback(self.start_batch)
+
@tornado.gen.coroutine
def start_batch(self):
@@ -202,12 +205,14 @@ class BatchAsync(object):
@@ -209,14 +210,18 @@ class BatchAsync:
"down_minions": self.targeted_minions.difference(self.minions),
"metadata": self.metadata
"metadata": self.metadata,
}
- self.event.fire_event(data, "salt/batch/{0}/start".format(self.batch_jid))
- self.event.fire_event(data, "salt/batch/{}/start".format(self.batch_jid))
- yield self.run_next()
+ ret = self.event.fire_event(data, "salt/batch/{0}/start".format(self.batch_jid))
+ ret = self.event.fire_event(
+ data, "salt/batch/{}/start".format(self.batch_jid)
+ )
+ self.event.io_loop.spawn_callback(self.run_next)
+ @tornado.gen.coroutine
def end_batch(self):
left = self.minions.symmetric_difference(self.done_minions.union(self.timedout_minions))
left = self.minions.symmetric_difference(
self.done_minions.union(self.timedout_minions)
)
- if not left:
+ if not left and not self.ended:
+ self.ended = True
data = {
"available_minions": self.minions,
"down_minions": self.targeted_minions.difference(self.minions),
@@ -220,20 +225,26 @@ class BatchAsync(object):
@@ -229,20 +234,26 @@ class BatchAsync:
for (pattern, label) in self.patterns:
if label in ["ping_return", "batch_run"]:
self.event.unsubscribe(pattern, match_type='glob')
self.event.unsubscribe(pattern, match_type="glob")
+ del self
+ gc.collect()
+ yield
@ -161,14 +174,16 @@ index 8a67331102..2bb50459c8 100644
- yield self.local.run_job_async(
+ ret = yield self.local.run_job_async(
next_batch,
self.opts['fun'],
self.opts['arg'],
@@ -244,11 +255,17 @@ class BatchAsync(object):
jid=self.batch_jid,
metadata=self.metadata)
self.opts["fun"],
self.opts["arg"],
@@ -254,13 +265,17 @@ class BatchAsync:
metadata=self.metadata,
)
- self.event.io_loop.call_later(self.opts['timeout'], self.find_job, set(next_batch))
+ yield tornado.gen.sleep(self.opts['timeout'])
- self.event.io_loop.call_later(
- self.opts["timeout"], self.find_job, set(next_batch)
- )
+ yield tornado.gen.sleep(self.opts["timeout"])
+ self.event.io_loop.spawn_callback(self.find_job, set(next_batch))
except Exception as ex:
log.error("Error in scheduling next batch: %s", ex)
@ -185,6 +200,6 @@ index 8a67331102..2bb50459c8 100644
+ self.ioloop = None
+ gc.collect()
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 512b189808ea0d7b333587689d7e7eb52d16b189 Mon Sep 17 00:00:00 2001
From 3c956a1cf1de17c5c49f0856051cabe2ffb4d0f2 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 29 Jan 2019 11:11:38 +0100
Subject: [PATCH] Include aliases in the fqdns grains
@ -15,54 +15,116 @@ Add UT for fqdns aliases
Leverage cached interfaces, if any.
---
salt/grains/core.py | 14 ++++++--------
salt/utils/network.py | 12 ++++++++++++
tests/unit/grains/test_core.py | 28 +++++++++++++++++++++++++---
tests/unit/utils/test_network.py | 24 ++++++++++++++++++++++++
4 files changed, 67 insertions(+), 11 deletions(-)
salt/grains/core.py | 69 +++++++++++++++++++++-----------
salt/utils/network.py | 16 ++++++++
tests/unit/grains/test_core.py | 45 ++++++++++++++++++---
tests/unit/utils/test_network.py | 37 +++++++++++++++++
4 files changed, 138 insertions(+), 29 deletions(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 7b7e328520..309e4c9c4a 100644
index bc3cf129cd..006878f806 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -2275,14 +2275,13 @@ def fqdns():
grains = {}
fqdns = set()
@@ -1733,29 +1733,31 @@ def _parse_cpe_name(cpe):
- addresses = salt.utils.network.ip_addrs(include_loopback=False,
- interface_data=_INTERFACES)
- addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False,
- interface_data=_INTERFACES))
- err_message = 'An exception occurred resolving address \'%s\': %s'
+ addresses = salt.utils.network.ip_addrs(include_loopback=False, interface_data=_get_interfaces())
+ addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False, interface_data=_get_interfaces()))
+ err_message = 'Exception during resolving address: %s'
for ip in addresses:
try:
- fqdns.add(socket.getfqdn(socket.gethostbyaddr(ip)[0]))
def _parse_cpe_name(cpe):
- '''
+ """
Parse CPE_NAME data from the os-release
Info: https://csrc.nist.gov/projects/security-content-automation-protocol/scap-specifications/cpe
:param cpe:
:return:
- '''
+ """
part = {
- 'o': 'operating system',
- 'h': 'hardware',
- 'a': 'application',
+ "o": "operating system",
+ "h": "hardware",
+ "a": "application",
}
ret = {}
- cpe = (cpe or '').split(':')
- if len(cpe) > 4 and cpe[0] == 'cpe':
- if cpe[1].startswith('/'): # WFN to URI
- ret['vendor'], ret['product'], ret['version'] = cpe[2:5]
- ret['phase'] = cpe[5] if len(cpe) > 5 else None
- ret['part'] = part.get(cpe[1][1:])
- elif len(cpe) == 13 and cpe[1] == '2.3': # WFN to a string
- ret['vendor'], ret['product'], ret['version'], ret['phase'] = [x if x != '*' else None for x in cpe[3:7]]
- ret['part'] = part.get(cpe[2])
+ cpe = (cpe or "").split(":")
+ if len(cpe) > 4 and cpe[0] == "cpe":
+ if cpe[1].startswith("/"): # WFN to URI
+ ret["vendor"], ret["product"], ret["version"] = cpe[2:5]
+ ret["phase"] = cpe[5] if len(cpe) > 5 else None
+ ret["part"] = part.get(cpe[1][1:])
+ elif len(cpe) == 13 and cpe[1] == "2.3": # WFN to a string
+ ret["vendor"], ret["product"], ret["version"], ret["phase"] = [
+ x if x != "*" else None for x in cpe[3:7]
+ ]
+ ret["part"] = part.get(cpe[2])
return ret
@@ -2396,15 +2398,36 @@ def fqdns():
"""
# Provides:
# fqdns
- opt = {"fqdns": []}
- if __opts__.get(
- "enable_fqdns_grains",
- False
- if salt.utils.platform.is_windows() or salt.utils.platform.is_proxy()
- else True,
- ):
- opt = __salt__["network.fqdns"]()
- return opt
+
+ grains = {}
+ fqdns = set()
+
+ addresses = salt.utils.network.ip_addrs(
+ include_loopback=False, interface_data=_get_interfaces()
+ )
+ addresses.extend(
+ salt.utils.network.ip_addrs6(
+ include_loopback=False, interface_data=_get_interfaces()
+ )
+ )
+ err_message = "Exception during resolving address: %s"
+ for ip in addresses:
+ try:
+ name, aliaslist, addresslist = socket.gethostbyaddr(ip)
+ fqdns.update([socket.getfqdn(name)] + [als for als in aliaslist if salt.utils.network.is_fqdn(als)])
except socket.herror as err:
if err.errno in (0, HOST_NOT_FOUND, NO_DATA):
# No FQDN for this IP address, so we don't need to know this all the time.
@@ -2292,8 +2291,7 @@ def fqdns():
except (socket.error, socket.gaierror, socket.timeout) as err:
log.error(err_message, ip, err)
- grains['fqdns'] = sorted(list(fqdns))
- return grains
+ fqdns.update(
+ [socket.getfqdn(name)]
+ + [als for als in aliaslist if salt.utils.network.is_fqdn(als)]
+ )
+ except socket.herror as err:
+ if err.errno in (0, HOST_NOT_FOUND, NO_DATA):
+ # No FQDN for this IP address, so we don't need to know this all the time.
+ log.debug("Unable to resolve address %s: %s", ip, err)
+ else:
+ log.error(err_message, ip, err)
+ except (OSError, socket.gaierror, socket.timeout) as err:
+ log.error(err_message, ip, err)
+
+ return {"fqdns": sorted(list(fqdns))}
def ip_fqdn():
diff --git a/salt/utils/network.py b/salt/utils/network.py
index 906d1cb3bc..2ae2e213b7 100644
index b3e8db3886..dd7fceb91a 100644
--- a/salt/utils/network.py
+++ b/salt/utils/network.py
@@ -1958,3 +1958,15 @@ def parse_host_port(host_port):
raise ValueError('bad hostname: "{}"'.format(host))
return host, port
@@ -2208,3 +2208,19 @@ def filter_by_networks(values, networks):
raise ValueError("Do not know how to filter a {}".format(type(values)))
else:
return values
+
+
+def is_fqdn(hostname):
@ -74,42 +136,63 @@ index 906d1cb3bc..2ae2e213b7 100644
+ """
+
+ compliant = re.compile(r"(?!-)[A-Z\d\-\_]{1,63}(?<!-)$", re.IGNORECASE)
+ return "." in hostname and len(hostname) < 0xff and all(compliant.match(x) for x in hostname.rstrip(".").split("."))
+ return (
+ "." in hostname
+ and len(hostname) < 0xFF
+ and all(compliant.match(x) for x in hostname.rstrip(".").split("."))
+ )
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index c40595eb3f..ac03b57226 100644
index 7dbf34deac..d760e57a54 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -1046,9 +1046,9 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
ret = {'fqdns': ['bluesniff.foo.bar', 'foo.bar.baz', 'rinzler.evil-corp.com']}
with patch.object(socket, 'gethostbyaddr', side_effect=reverse_resolv_mock):
fqdns = core.fqdns()
- self.assertIn('fqdns', fqdns)
- self.assertEqual(len(fqdns['fqdns']), len(ret['fqdns']))
- self.assertEqual(set(fqdns['fqdns']), set(ret['fqdns']))
@@ -1367,12 +1367,11 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
("bluesniff.foo.bar", [], ["fe80::a8b2:93ff:dead:beef"]),
]
ret = {"fqdns": ["bluesniff.foo.bar", "foo.bar.baz", "rinzler.evil-corp.com"]}
- with patch.dict(core.__salt__, {"network.fqdns": salt.modules.network.fqdns}):
- with patch.object(socket, "gethostbyaddr", side_effect=reverse_resolv_mock):
- fqdns = core.fqdns()
- assert "fqdns" in fqdns
- assert len(fqdns["fqdns"]) == len(ret["fqdns"])
- assert set(fqdns["fqdns"]) == set(ret["fqdns"])
+ with patch.object(socket, "gethostbyaddr", side_effect=reverse_resolv_mock):
+ fqdns = core.fqdns()
+ assert "fqdns" in fqdns
+ assert len(fqdns['fqdns']) == len(ret['fqdns'])
+ assert set(fqdns['fqdns']) == set(ret['fqdns'])
+ assert len(fqdns["fqdns"]) == len(ret["fqdns"])
+ assert set(fqdns["fqdns"]) == set(ret["fqdns"])
@skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
@patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4']))
@@ -1081,6 +1081,28 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
mock_log.debug.assert_not_called()
mock_log.error.assert_called_once()
@skipIf(not salt.utils.platform.is_linux(), "System is not Linux")
@patch("salt.utils.network.ip_addrs", MagicMock(return_value=["1.2.3.4"]))
@@ -1413,6 +1412,40 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
mock_log.debug.assert_called_once()
mock_log.error.assert_called()
+ @patch.object(salt.utils.platform, 'is_windows', MagicMock(return_value=False))
+ @patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '5.6.7.8']))
+ @patch('salt.utils.network.ip_addrs6',
+ MagicMock(return_value=['fe80::a8b2:93ff:fe00:0', 'fe80::a8b2:93ff:dead:beef']))
+ @patch('salt.utils.network.socket.getfqdn', MagicMock(side_effect=lambda v: v)) # Just pass-through
+ @patch.object(salt.utils.platform, "is_windows", MagicMock(return_value=False))
+ @patch(
+ "salt.utils.network.ip_addrs", MagicMock(return_value=["1.2.3.4", "5.6.7.8"])
+ )
+ @patch(
+ "salt.utils.network.ip_addrs6",
+ MagicMock(return_value=["fe80::a8b2:93ff:fe00:0", "fe80::a8b2:93ff:dead:beef"]),
+ )
+ @patch(
+ "salt.utils.network.socket.getfqdn", MagicMock(side_effect=lambda v: v)
+ ) # Just pass-through
+ def test_fqdns_aliases(self):
+ '''
+ """
+ FQDNs aliases
+ '''
+ reverse_resolv_mock = [('foo.bar.baz', ["throwmeaway", "this.is.valid.alias"], ['1.2.3.4']),
+ ('rinzler.evil-corp.com', ["false-hostname", "badaliass"], ['5.6.7.8']),
+ ('foo.bar.baz', [], ['fe80::a8b2:93ff:fe00:0']),
+ ('bluesniff.foo.bar', ["alias.bluesniff.foo.bar"], ['fe80::a8b2:93ff:dead:beef'])]
+ with patch.object(socket, 'gethostbyaddr', side_effect=reverse_resolv_mock):
+ """
+ reverse_resolv_mock = [
+ ("foo.bar.baz", ["throwmeaway", "this.is.valid.alias"], ["1.2.3.4"]),
+ ("rinzler.evil-corp.com", ["false-hostname", "badaliass"], ["5.6.7.8"]),
+ ("foo.bar.baz", [], ["fe80::a8b2:93ff:fe00:0"]),
+ (
+ "bluesniff.foo.bar",
+ ["alias.bluesniff.foo.bar"],
+ ["fe80::a8b2:93ff:dead:beef"],
+ ),
+ ]
+ with patch.object(socket, "gethostbyaddr", side_effect=reverse_resolv_mock):
+ fqdns = core.fqdns()
+ assert "fqdns" in fqdns
+ for alias in ["this.is.valid.alias", "alias.bluesniff.foo.bar"]:
@ -119,21 +202,21 @@ index c40595eb3f..ac03b57226 100644
+ assert alias not in fqdns["fqdns"]
+
def test_core_virtual(self):
'''
"""
test virtual grain with cmd virt-what
diff --git a/tests/unit/utils/test_network.py b/tests/unit/utils/test_network.py
index 7dcca0166e..74479b0cae 100644
index 779fc0fc34..9a37a94d8f 100644
--- a/tests/unit/utils/test_network.py
+++ b/tests/unit/utils/test_network.py
@@ -701,3 +701,27 @@ class NetworkTestCase(TestCase):
# An exception is raised if unicode is passed to socket.getfqdn
minion_id = network.generate_minion_id()
assert minion_id != '', minion_id
@@ -1274,3 +1274,40 @@ class NetworkTestCase(TestCase):
),
):
self.assertEqual(network.get_fqhostname(), host)
+
+ def test_netlink_tool_remote_on(self):
+ with patch('subprocess.check_output', return_value=NETLINK_SS):
+ remotes = network._netlink_tool_remote_on('4505', 'remote')
+ self.assertEqual(remotes, set(['127.0.0.1', '::ffff:1.2.3.4']))
+ with patch("subprocess.check_output", return_value=NETLINK_SS):
+ remotes = network._netlink_tool_remote_on("4505", "remote")
+ self.assertEqual(remotes, {"127.0.0.1", "::ffff:1.2.3.4"})
+
+ def test_is_fqdn(self):
+ """
@ -141,8 +224,16 @@ index 7dcca0166e..74479b0cae 100644
+
+ :return: None
+ """
+ for fqdn in ["host.domain.com", "something.with.the.dots.still.ok", "UPPERCASE.ALSO.SHOULD.WORK",
+ "MiXeD.CaSe.AcCePtAbLe", "123.host.com", "host123.com", "some_underscore.com", "host-here.com"]:
+ for fqdn in [
+ "host.domain.com",
+ "something.with.the.dots.still.ok",
+ "UPPERCASE.ALSO.SHOULD.WORK",
+ "MiXeD.CaSe.AcCePtAbLe",
+ "123.host.com",
+ "host123.com",
+ "some_underscore.com",
+ "host-here.com",
+ ]:
+ assert network.is_fqdn(fqdn)
+
+ def test_is_not_fqdn(self):
@ -151,9 +242,14 @@ index 7dcca0166e..74479b0cae 100644
+
+ :return: None
+ """
+ for fqdn in ["hostname", "/some/path", "$variable.here", "verylonghostname.{}".format("domain" * 45)]:
+ for fqdn in [
+ "hostname",
+ "/some/path",
+ "$variable.here",
+ "verylonghostname.{}".format("domain" * 45),
+ ]:
+ assert not network.is_fqdn(fqdn)
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 8275c229fcca0e43513ea680e48cbf6263247b41 Mon Sep 17 00:00:00 2001
From 0ef6eed4f5e120a584843c33272066ba477feb3f Mon Sep 17 00:00:00 2001
From: Jochen Breuer <brejoc@gmail.com>
Date: Tue, 19 May 2020 10:34:35 +0200
Subject: [PATCH] info_installed works without status attr now
@ -8,39 +8,39 @@ detect if a package was installed or not. Now info_installed adds the
'status' for the 'lowpkg.info' request again.
---
salt/modules/aptpkg.py | 9 +++++++++
tests/unit/modules/test_aptpkg.py | 17 +++++++++++++++++
2 files changed, 26 insertions(+)
tests/unit/modules/test_aptpkg.py | 20 ++++++++++++++++++++
2 files changed, 29 insertions(+)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 2835d32263..765d69aff2 100644
index db0480b45d..e4a9872aad 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -2867,6 +2867,15 @@ def info_installed(*names, **kwargs):
failhard = kwargs.pop('failhard', True)
kwargs.pop('errors', None) # Only for compatibility with RPM
attr = kwargs.pop('attr', None) # Package attributes to return
@@ -2923,6 +2923,15 @@ def info_installed(*names, **kwargs):
failhard = kwargs.pop("failhard", True)
kwargs.pop("errors", None) # Only for compatibility with RPM
attr = kwargs.pop("attr", None) # Package attributes to return
+
+ # status is needed to see if a package is installed. So we have to add it,
+ # even if it's excluded via attr parameter. Otherwise all packages are
+ # returned.
+ if attr:
+ attr_list = set(attr.split(','))
+ attr_list.add('status')
+ attr = ','.join(attr_list)
+ attr_list = set(attr.split(","))
+ attr_list.add("status")
+ attr = ",".join(attr_list)
+
all_versions = kwargs.pop('all_versions', False) # This is for backward compatible structure only
if kwargs:
all_versions = kwargs.pop(
"all_versions", False
) # This is for backward compatible structure only
diff --git a/tests/unit/modules/test_aptpkg.py b/tests/unit/modules/test_aptpkg.py
index ba1d874e69..b0193aeaf7 100644
index 3c9744e224..51dfce29eb 100644
--- a/tests/unit/modules/test_aptpkg.py
+++ b/tests/unit/modules/test_aptpkg.py
@@ -257,6 +257,23 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(aptpkg.info_installed('wget'), installed)
@@ -297,6 +297,26 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(aptpkg.info_installed("wget"), installed)
self.assertEqual(len(aptpkg.info_installed()), 1)
+ def test_info_installed_attr_without_status(self):
+ '''
+ """
+ Test info_installed 'attr' for inclusion of 'status' attribute.
+
+ Since info_installed should only return installed packages, we need to
@ -49,17 +49,20 @@ index ba1d874e69..b0193aeaf7 100644
+ to check if the package is installed and would return everything.
+
+ :return:
+ '''
+ with patch('salt.modules.aptpkg.__salt__', {'lowpkg.info': MagicMock(return_value=LOWPKG_INFO)}) as wget_lowpkg:
+ ret = aptpkg.info_installed('wget', attr='version')
+ calls = wget_lowpkg['lowpkg.info'].call_args_list.pop()
+ self.assertIn('status', calls.kwargs['attr'])
+ self.assertIn('version', calls.kwargs['attr'])
+ """
+ with patch(
+ "salt.modules.aptpkg.__salt__",
+ {"lowpkg.info": MagicMock(return_value=LOWPKG_INFO)},
+ ) as wget_lowpkg:
+ ret = aptpkg.info_installed("wget", attr="version")
+ calls = wget_lowpkg["lowpkg.info"].call_args_list.pop()
+ self.assertIn("status", calls.kwargs["attr"])
+ self.assertIn("version", calls.kwargs["attr"])
+
@patch('salt.modules.aptpkg.__salt__', {'lowpkg.info': MagicMock(return_value=LOWPKG_INFO)})
def test_info_installed_attr(self):
'''
@patch(
"salt.modules.aptpkg.__salt__",
{"lowpkg.info": MagicMock(return_value=LOWPKG_INFO)},
--
2.27.0
2.29.2

View File

@ -1,18 +1,23 @@
From c750e854c637e405a788f91d5b9a7bd1a0a6edfd Mon Sep 17 00:00:00 2001
From bb2070d4f4e8fbb5a963c521d61feb7419abdec1 Mon Sep 17 00:00:00 2001
From: ed lane <ed.lane.0@gmail.com>
Date: Thu, 30 Aug 2018 06:07:08 -0600
Subject: [PATCH] Integration of MSI authentication with azurearm cloud
driver (#105)
---
salt/cloud/clouds/azurearm.py | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
salt/cloud/clouds/azurearm.py | 98 +++++++++++++++--------------------
1 file changed, 43 insertions(+), 55 deletions(-)
diff --git a/salt/cloud/clouds/azurearm.py b/salt/cloud/clouds/azurearm.py
index 047fdac0a9..2c1fa04ae8 100644
index 54fc7b497b..8b9254cecb 100644
--- a/salt/cloud/clouds/azurearm.py
+++ b/salt/cloud/clouds/azurearm.py
@@ -58,6 +58,9 @@ The Azure ARM cloud module is used to control access to Microsoft Azure Resource
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
Azure ARM Cloud Module
======================
@@ -61,6 +60,9 @@ The Azure ARM cloud module is used to control access to Microsoft Azure Resource
virtual machine type will be "Windows". Only set this parameter on profiles which install Windows operating systems.
@ -22,25 +27,361 @@ index 047fdac0a9..2c1fa04ae8 100644
Example ``/etc/salt/cloud.providers`` or
``/etc/salt/cloud.providers.d/azure.conf`` configuration:
@@ -258,7 +261,8 @@ def get_configured_provider():
provider = __is_provider_configured(
__opts__,
__active_provider_name__ or __virtualname__,
- ('subscription_id', 'username', 'password')
+ required_keys=('subscription_id', 'username', 'password'),
+ log_message=False
@@ -91,7 +93,6 @@ Example ``/etc/salt/cloud.providers`` or
# pylint: disable=wrong-import-position,wrong-import-order
-from __future__ import absolute_import, print_function, unicode_literals
import importlib
import logging
@@ -121,7 +122,6 @@ from salt.exceptions import (
# Salt libs
from salt.ext import six
-# Import 3rd-party libs
HAS_LIBS = False
try:
import azure.mgmt.compute.models as compute_models
@@ -179,7 +179,7 @@ def get_api_versions(call=None, kwargs=None): # pylint: disable=unused-argument
)
return provider
@@ -301,6 +305,7 @@ def get_conn(client_type):
for resource in provider_query.resource_types:
- if six.text_type(resource.resource_type) == kwargs["resource_type"]:
+ if str(resource.resource_type) == kwargs["resource_type"]:
resource_dict = resource.as_dict()
api_versions = resource_dict["api_versions"]
except CloudError as exc:
@@ -263,6 +263,7 @@ def get_conn(client_type):
)
if tenant is not None:
+ # using Service Principle style authentication...
client_id = config.get_cloud_config_value(
'client_id',
get_configured_provider(), __opts__, search_global=False
"client_id", get_configured_provider(), __opts__, search_global=False
)
@@ -319,7 +320,7 @@ def avail_locations(call=None):
)
locations = []
for resource in provider_query.resource_types:
- if six.text_type(resource.resource_type) == "virtualMachines":
+ if str(resource.resource_type) == "virtualMachines":
resource_dict = resource.as_dict()
locations = resource_dict["locations"]
for location in locations:
@@ -399,7 +400,7 @@ def avail_images(call=None):
results = pool.map_async(_get_publisher_images, publishers)
results.wait()
- ret = {k: v for result in results.get() for k, v in six.iteritems(result)}
+ ret = {k: v for result in results.get() for k, v in result.items()}
return ret
@@ -529,7 +530,7 @@ def list_nodes_full(call=None):
results = pool.map_async(_get_node_info, nodes)
results.wait()
- group_ret = {k: v for result in results.get() for k, v in six.iteritems(result)}
+ group_ret = {k: v for result in results.get() for k, v in result.items()}
ret.update(group_ret)
return ret
@@ -707,7 +708,7 @@ def create_network_interface(call=None, kwargs=None):
)
if kwargs.get("iface_name") is None:
- kwargs["iface_name"] = "{0}-iface0".format(vm_["name"])
+ kwargs["iface_name"] = "{}-iface0".format(vm_["name"])
try:
subnet_obj = netconn.subnets.get(
@@ -717,7 +718,7 @@ def create_network_interface(call=None, kwargs=None):
)
except CloudError as exc:
raise SaltCloudSystemExit(
- '{0} (Resource Group: "{1}", VNET: "{2}", Subnet: "{3}")'.format(
+ '{} (Resource Group: "{}", VNET: "{}", Subnet: "{}")'.format(
exc.message,
kwargs["network_resource_group"],
kwargs["network"],
@@ -740,11 +741,11 @@ def create_network_interface(call=None, kwargs=None):
)
pool_ids.append({"id": lbbep_data.as_dict()["id"]})
except CloudError as exc:
- log.error("There was a cloud error: %s", six.text_type(exc))
+ log.error("There was a cloud error: %s", str(exc))
except KeyError as exc:
log.error(
"There was an error getting the Backend Pool ID: %s",
- six.text_type(exc),
+ str(exc),
)
ip_kwargs["load_balancer_backend_address_pools"] = pool_ids
@@ -755,7 +756,7 @@ def create_network_interface(call=None, kwargs=None):
ip_kwargs["private_ip_allocation_method"] = IPAllocationMethod.dynamic
if kwargs.get("allocate_public_ip") is True:
- pub_ip_name = "{0}-ip".format(kwargs["iface_name"])
+ pub_ip_name = "{}-ip".format(kwargs["iface_name"])
poller = netconn.public_ip_addresses.create_or_update(
resource_group_name=kwargs["resource_group"],
public_ip_address_name=pub_ip_name,
@@ -773,11 +774,11 @@ def create_network_interface(call=None, kwargs=None):
)
if pub_ip_data.ip_address: # pylint: disable=no-member
ip_kwargs["public_ip_address"] = PublicIPAddress(
- id=six.text_type(pub_ip_data.id), # pylint: disable=no-member
+ id=str(pub_ip_data.id), # pylint: disable=no-member
)
ip_configurations = [
NetworkInterfaceIPConfiguration(
- name="{0}-ip".format(kwargs["iface_name"]),
+ name="{}-ip".format(kwargs["iface_name"]),
subnet=subnet_obj,
**ip_kwargs
)
@@ -790,7 +791,7 @@ def create_network_interface(call=None, kwargs=None):
raise ValueError("Timed out waiting for public IP Address.")
time.sleep(5)
else:
- priv_ip_name = "{0}-ip".format(kwargs["iface_name"])
+ priv_ip_name = "{}-ip".format(kwargs["iface_name"])
ip_configurations = [
NetworkInterfaceIPConfiguration(
name=priv_ip_name, subnet=subnet_obj, **ip_kwargs
@@ -900,7 +901,7 @@ def request_instance(vm_):
)
vm_["iface_id"] = iface_data["id"]
- disk_name = "{0}-vol0".format(vm_["name"])
+ disk_name = "{}-vol0".format(vm_["name"])
vm_username = config.get_cloud_config_value(
"ssh_username",
@@ -922,8 +923,8 @@ def request_instance(vm_):
ssh_publickeyfile_contents = spkc_.read()
except Exception as exc: # pylint: disable=broad-except
raise SaltCloudConfigError(
- "Failed to read ssh publickey file '{0}': "
- "{1}".format(ssh_publickeyfile, exc.args[-1])
+ "Failed to read ssh publickey file '{}': "
+ "{}".format(ssh_publickeyfile, exc.args[-1])
)
disable_password_authentication = config.get_cloud_config_value(
@@ -941,7 +942,7 @@ def request_instance(vm_):
if not win_installer and ssh_publickeyfile_contents is not None:
sshpublickey = SshPublicKey(
key_data=ssh_publickeyfile_contents,
- path="/home/{0}/.ssh/authorized_keys".format(vm_username),
+ path="/home/{}/.ssh/authorized_keys".format(vm_username),
)
sshconfiguration = SshConfiguration(public_keys=[sshpublickey],)
linuxconfiguration = LinuxConfiguration(
@@ -991,9 +992,9 @@ def request_instance(vm_):
availability_set = config.get_cloud_config_value(
"availability_set", vm_, __opts__, search_global=False, default=None
)
- if availability_set is not None and isinstance(availability_set, six.string_types):
+ if availability_set is not None and isinstance(availability_set, str):
availability_set = {
- "id": "/subscriptions/{0}/resourceGroups/{1}/providers/Microsoft.Compute/availabilitySets/{2}".format(
+ "id": "/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Compute/availabilitySets/{}".format(
subscription_id, vm_["resource_group"], availability_set
)
}
@@ -1004,7 +1005,7 @@ def request_instance(vm_):
storage_endpoint_suffix = cloud_env.suffixes.storage_endpoint
- if isinstance(vm_.get("volumes"), six.string_types):
+ if isinstance(vm_.get("volumes"), str):
volumes = salt.utils.yaml.safe_load(vm_["volumes"])
else:
volumes = vm_.get("volumes")
@@ -1018,16 +1019,14 @@ def request_instance(vm_):
lun = 0
luns = []
for volume in volumes:
- if isinstance(volume, six.string_types):
+ if isinstance(volume, str):
volume = {"name": volume}
volume.setdefault(
"name",
volume.get(
"name",
- volume.get(
- "name", "{0}-datadisk{1}".format(vm_["name"], six.text_type(lun))
- ),
+ volume.get("name", "{}-datadisk{}".format(vm_["name"], str(lun))),
),
)
@@ -1050,7 +1049,7 @@ def request_instance(vm_):
del volume["media_link"]
elif volume.get("vhd") == "unmanaged":
volume["vhd"] = VirtualHardDisk(
- uri="https://{0}.blob.{1}/vhds/{2}-datadisk{3}.vhd".format(
+ uri="https://{}.blob.{}/vhds/{}-datadisk{}.vhd".format(
vm_["storage_account"],
storage_endpoint_suffix,
vm_["name"],
@@ -1090,7 +1089,7 @@ def request_instance(vm_):
create_option=DiskCreateOptionTypes.from_image,
name=disk_name,
vhd=VirtualHardDisk(
- uri="https://{0}.blob.{1}/vhds/{2}.vhd".format(
+ uri="https://{}.blob.{}/vhds/{}.vhd".format(
vm_["storage_account"], storage_endpoint_suffix, disk_name,
),
),
@@ -1209,7 +1208,7 @@ def request_instance(vm_):
__utils__["cloud.fire_event"](
"event",
"requesting instance",
- "salt/cloud/{0}/requesting".format(vm_["name"]),
+ "salt/cloud/{}/requesting".format(vm_["name"]),
args=__utils__["cloud.filter_event"](
"requesting", vm_, ["name", "profile", "provider", "driver"]
),
@@ -1260,7 +1259,7 @@ def create(vm_):
__utils__["cloud.fire_event"](
"event",
"starting create",
- "salt/cloud/{0}/creating".format(vm_["name"]),
+ "salt/cloud/{}/creating".format(vm_["name"]),
args=__utils__["cloud.filter_event"](
"creating", vm_, ["name", "profile", "provider", "driver"]
),
@@ -1278,9 +1277,7 @@ def create(vm_):
vm_request = request_instance(vm_=vm_)
if not vm_request or "error" in vm_request:
- err_message = "Error creating VM {0}! ({1})".format(
- vm_["name"], six.text_type(vm_request)
- )
+ err_message = "Error creating VM {}! ({})".format(vm_["name"], str(vm_request))
log.error(err_message)
raise SaltCloudSystemExit(err_message)
@@ -1322,7 +1319,7 @@ def create(vm_):
try:
log.warning(exc)
finally:
- raise SaltCloudSystemExit(six.text_type(exc))
+ raise SaltCloudSystemExit(str(exc))
vm_["ssh_host"] = data
if not vm_.get("ssh_username"):
@@ -1341,7 +1338,7 @@ def create(vm_):
__utils__["cloud.fire_event"](
"event",
"created instance",
- "salt/cloud/{0}/created".format(vm_["name"]),
+ "salt/cloud/{}/created".format(vm_["name"]),
args=__utils__["cloud.filter_event"](
"created", vm_, ["name", "profile", "provider", "driver"]
),
@@ -1548,9 +1545,7 @@ def _get_cloud_environment():
cloud_env = getattr(cloud_env_module, cloud_environment or "AZURE_PUBLIC_CLOUD")
except (AttributeError, ImportError):
raise SaltCloudSystemExit(
- "The azure {0} cloud environment is not available.".format(
- cloud_environment
- )
+ "The azure {} cloud environment is not available.".format(cloud_environment)
)
return cloud_env
@@ -1585,7 +1580,7 @@ def _get_block_blob_service(kwargs=None):
resource_group, storage_account
)
storage_keys = {v.key_name: v.value for v in storage_keys.keys}
- storage_key = next(six.itervalues(storage_keys))
+ storage_key = next(iter(storage_keys.values()))
cloud_env = _get_cloud_environment()
@@ -1620,7 +1615,7 @@ def list_blobs(call=None, kwargs=None): # pylint: disable=unused-argument
"server_encrypted": blob.properties.server_encrypted,
}
except Exception as exc: # pylint: disable=broad-except
- log.warning(six.text_type(exc))
+ log.warning(str(exc))
return ret
@@ -1655,9 +1650,7 @@ def delete_managed_disk(call=None, kwargs=None): # pylint: disable=unused-argum
compconn.disks.delete(kwargs["resource_group"], kwargs["blob"])
except Exception as exc: # pylint: disable=broad-except
log.error(
- "Error deleting managed disk %s - %s",
- kwargs.get("blob"),
- six.text_type(exc),
+ "Error deleting managed disk %s - %s", kwargs.get("blob"), str(exc),
)
return False
@@ -1834,7 +1827,7 @@ def create_or_update_vmextension(
except CloudError as exc:
__utils__["azurearm.log_cloud_error"](
"compute",
- "Error attempting to create the VM extension: {0}".format(exc.message),
+ "Error attempting to create the VM extension: {}".format(exc.message),
)
ret = {"error": exc.message}
@@ -1881,11 +1874,9 @@ def stop(name, call=None):
ret = {"error": exc.message}
if not ret:
__utils__["azurearm.log_cloud_error"](
- "compute", "Unable to find virtual machine with name: {0}".format(name)
+ "compute", "Unable to find virtual machine with name: {}".format(name)
)
- ret = {
- "error": "Unable to find virtual machine with name: {0}".format(name)
- }
+ ret = {"error": "Unable to find virtual machine with name: {}".format(name)}
else:
try:
instance = compconn.virtual_machines.deallocate(
@@ -1896,7 +1887,7 @@ def stop(name, call=None):
ret = vm_result.as_dict()
except CloudError as exc:
__utils__["azurearm.log_cloud_error"](
- "compute", "Error attempting to stop {0}: {1}".format(name, exc.message)
+ "compute", "Error attempting to stop {}: {}".format(name, exc.message)
)
ret = {"error": exc.message}
@@ -1945,11 +1936,9 @@ def start(name, call=None):
ret = {"error": exc.message}
if not ret:
__utils__["azurearm.log_cloud_error"](
- "compute", "Unable to find virtual machine with name: {0}".format(name)
+ "compute", "Unable to find virtual machine with name: {}".format(name)
)
- ret = {
- "error": "Unable to find virtual machine with name: {0}".format(name)
- }
+ ret = {"error": "Unable to find virtual machine with name: {}".format(name)}
else:
try:
instance = compconn.virtual_machines.start(
@@ -1960,8 +1949,7 @@ def start(name, call=None):
ret = vm_result.as_dict()
except CloudError as exc:
__utils__["azurearm.log_cloud_error"](
- "compute",
- "Error attempting to start {0}: {1}".format(name, exc.message),
+ "compute", "Error attempting to start {}: {}".format(name, exc.message),
)
ret = {"error": exc.message}
--
2.16.4
2.29.2

View File

@ -1,4 +1,4 @@
From 1ca1bb7c01b1e589147c32b16eda719537ab5b62 Mon Sep 17 00:00:00 2001
From c9268ec731371cdd7b2fc129ad111d9f73800752 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 22 Sep 2020 15:15:51 +0100
@ -8,16 +8,16 @@ Subject: [PATCH] Invalidate file list cache when cache file modified
Add test_future_file_list_cache_file_ignored unit test
---
salt/fileserver/__init__.py | 2 +-
tests/unit/test_fileserver.py | 47 +++++++++++++++++++++++++++++++++--
2 files changed, 46 insertions(+), 3 deletions(-)
tests/unit/test_fileserver.py | 53 +++++++++++++++++++++++------------
2 files changed, 36 insertions(+), 19 deletions(-)
diff --git a/salt/fileserver/__init__.py b/salt/fileserver/__init__.py
index 919987e2fc..1b8de51bdc 100644
index c8c417168f..b9e345d8c3 100644
--- a/salt/fileserver/__init__.py
+++ b/salt/fileserver/__init__.py
@@ -142,7 +142,7 @@ def check_file_list_cache(opts, form, list_cache, w_lock):
'file=%s mtime=%s current_time=%s',
list_cache, current_time, file_mtime
@@ -132,7 +132,7 @@ def check_file_list_cache(opts, form, list_cache, w_lock):
current_time,
file_mtime,
)
- age = 0
+ age = -1
@ -25,46 +25,58 @@ index 919987e2fc..1b8de51bdc 100644
age = current_time - file_mtime
else:
diff --git a/tests/unit/test_fileserver.py b/tests/unit/test_fileserver.py
index d38e22c8e1..b92b32947b 100644
index 0bf30ee5cc..a1087bf4b0 100644
--- a/tests/unit/test_fileserver.py
+++ b/tests/unit/test_fileserver.py
@@ -6,11 +6,17 @@
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
@@ -1,14 +1,15 @@
-# -*- coding: utf-8 -*-
"""
:codeauthor: Joao Mesquita <jmesquita@sangoma.com>
"""
-# Import Salt Testing libs
-from tests.support.unit import TestCase
-# Import Python libs
-from __future__ import absolute_import, print_function, unicode_literals
-from salt import fileserver
+import datetime
+import os
+import time
-# Import Salt Testing libs
+import salt.utils.files
from salt import fileserver
+# Import Salt Testing libs
+from salt import fileserver
+from tests.support.helpers import with_tempdir
+from tests.support.unit import TestCase
+
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
class MapDiffTestCase(TestCase):
def test_diff_with_diffent_keys(self):
@@ -28,3 +34,40 @@ class MapDiffTestCase(TestCase):
map1 = {'file1': 12345}
map2 = {'file1': 1234}
@@ -31,22 +32,38 @@ class MapDiffTestCase(TestCase):
assert fileserver.diff_mtime_map(map1, map2) is True
+
+
-class VCSBackendWhitelistCase(TestCase, LoaderModuleMockMixin):
+class VCSBackendWhitelistCase(TestCase):
+ def setup_loader_modules(self):
+ return {fileserver: {}}
+
def setup_loader_modules(self):
return {fileserver: {}}
- def test_whitelist(self):
+ @with_tempdir()
+ def test_future_file_list_cache_file_ignored(self, cachedir):
+ opts = {
opts = {
- "fileserver_backend": ["roots", "git", "hgfs", "svn"],
+ "fileserver_backend": ["roots"],
+ "cachedir": cachedir,
+ "extension_modules": "",
+ }
"extension_modules": "",
}
- fs = fileserver.Fileserver(opts)
- assert fs.servers.whitelist == [
- "git",
- "gitfs",
- "hg",
- "hgfs",
- "svn",
- "svnfs",
- "roots",
- ], fs.servers.whitelist
+
+ back_cachedir = os.path.join(cachedir, "file_lists/roots")
+ os.makedirs(os.path.join(back_cachedir))
@ -90,6 +102,6 @@ index d38e22c8e1..b92b32947b 100644
+ ret[1] is True
+ ), "Cache file list cache file is not refreshed when future modification time"
--
2.28.0
2.29.2

Some files were not shown because too many files have changed in this diff Show More