Accepting request 693686 from systemsmanagement:saltstack

- No longer limiting Python3 version to <3.7

- Async batch implementation
- Added:
  * async-batch-implementation.patch

- Update to Salt 2019.2.0 release
  For further information see:
  https://docs.saltstack.com/en/latest/topics/releases/2019.2.0.html
- Added:
  * add-virt.all_capabilities.patch
  * add-virt.volume_infos-and-virt.volume_delete.patch
  * don-t-call-zypper-with-more-than-one-no-refresh.patch
  * include-aliases-in-the-fqdns-grains.patch
  * temporary-fix-extend-the-whitelist-of-allowed-comman.patch
- Removed:
  * accounting-for-when-files-in-an-archive-contain-non-.patch
  * add-engine-relaying-libvirt-events.patch
  * add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch
  * add-support-for-python-3.7.patch
  * align-suse-salt-master.service-limitnofiles-limit-wi.patch
  * avoid-incomprehensive-message-if-crashes.patch
  * change-stringio-import-in-python2-to-import-the-clas.patch
  * decode-file-contents-for-python2-bsc-1102013.patch
  * do-not-override-jid-on-returners-only-sending-back-t.patch
  * don-t-error-on-retcode-0-in-libcrypto.openssl_init_c.patch
  * feat-add-grain-for-all-fqdns.patch
  * fix-async-call-to-process-manager.patch
  * fix-decrease-loglevel-when-unable-to-resolve-addr.patch
  * fix-deprecation-warning-bsc-1095507.patch

OBS-URL: https://build.opensuse.org/request/show/693686
OBS-URL: https://build.opensuse.org/package/show/openSUSE:Factory/salt?expand=0&rev=82
This commit is contained in:
Dominique Leuenberger 2019-04-15 09:51:52 +00:00 committed by Git OBS Bridge
commit e7cf0c1932
72 changed files with 4400 additions and 9293 deletions

View File

@ -1 +1 @@
2460cb78e6bda580f2567781e060a3e6c6ba25de
65afa65b0d69f90c1cd716474cdddcdc98751274

View File

@ -3,7 +3,7 @@
<param name="url">https://github.com/openSUSE/salt-packaging.git</param>
<param name="subdir">salt</param>
<param name="filename">package</param>
<param name="revision">2018.3.2</param>
<param name="revision">2019.2.0</param>
<param name="scm">git</param>
</service>
<service name="extract_file" mode="disabled">
@ -12,8 +12,8 @@
</service>
<service name="download_url" mode="disabled">
<param name="host">codeload.github.com</param>
<param name="path">saltstack/salt/tar.gz/v2018.3.2</param>
<param name="filename">v2018.3.2.tar.gz</param>
<param name="path">openSUSE/salt/tar.gz/v2019.2.0-suse</param>
<param name="filename">v2019.2.0.tar.gz</param>
</service>
<service name="update_changelog" mode="disabled"></service>
</services>

View File

@ -1,158 +0,0 @@
From 5305ee8bf07e40dc54aefcbb92016ff868135749 Mon Sep 17 00:00:00 2001
From: "Gareth J. Greenaway" <gareth@wiked.org>
Date: Wed, 9 May 2018 09:33:58 -0700
Subject: [PATCH] Accounting for when files in an archive contain
non-ascii characters
Updating integration/modules/test_archive to include filenames with unicode characters.
only convert to bytes when using Python2
Updating with requested changes.
Ensure member names are decoded before adding to various lists.
Adding a test to ensure archive.list returns the right results when a tar file contains a file with unicode in it's name.
---
salt/modules/archive.py | 13 +++---
salt/states/archive.py | 4 +-
tests/integration/modules/test_archive.py | 52 ++++++++++++++++++++++-
3 files changed, 59 insertions(+), 10 deletions(-)
diff --git a/salt/modules/archive.py b/salt/modules/archive.py
index 48f0efa18e..76cd3eeb97 100644
--- a/salt/modules/archive.py
+++ b/salt/modules/archive.py
@@ -186,12 +186,13 @@ def list_(name,
else {'fileobj': cached.stdout, 'mode': 'r|'}
with contextlib.closing(tarfile.open(**open_kwargs)) as tar_archive:
for member in tar_archive.getmembers():
+ _member = salt.utils.data.decode(member.name)
if member.issym():
- links.append(member.name)
+ links.append(_member)
elif member.isdir():
- dirs.append(member.name + '/')
+ dirs.append(_member + '/')
else:
- files.append(member.name)
+ files.append(_member)
return dirs, files, links
except tarfile.ReadError:
@@ -410,9 +411,9 @@ def list_(name,
item.sort()
if verbose:
- ret = {'dirs': sorted(dirs),
- 'files': sorted(files),
- 'links': sorted(links)}
+ ret = {'dirs': sorted(salt.utils.data.decode_list(dirs)),
+ 'files': sorted(salt.utils.data.decode_list(files)),
+ 'links': sorted(salt.utils.data.decode_list(links))}
ret['top_level_dirs'] = [x for x in ret['dirs']
if x.count('/') == 1]
ret['top_level_files'] = [x for x in ret['files']
diff --git a/salt/states/archive.py b/salt/states/archive.py
index 847c5e9914..6838b2202d 100644
--- a/salt/states/archive.py
+++ b/salt/states/archive.py
@@ -1090,7 +1090,7 @@ def extracted(name,
and not stat.S_ISDIR(x)),
(contents['links'], stat.S_ISLNK)):
for path in path_list:
- full_path = os.path.join(name, path)
+ full_path = salt.utils.path.join(name, path)
try:
path_mode = os.lstat(full_path.rstrip(os.sep)).st_mode
if not func(path_mode):
@@ -1259,7 +1259,7 @@ def extracted(name,
if options is None:
try:
with closing(tarfile.open(cached, 'r')) as tar:
- tar.extractall(name)
+ tar.extractall(salt.utils.stringutils.to_str(name))
files = tar.getnames()
if trim_output:
files = files[:trim_output]
diff --git a/tests/integration/modules/test_archive.py b/tests/integration/modules/test_archive.py
index 59fe2f5f61..4301b9e3b0 100644
--- a/tests/integration/modules/test_archive.py
+++ b/tests/integration/modules/test_archive.py
@@ -47,7 +47,7 @@ class ArchiveTest(ModuleCase):
self.arch = os.path.join(self.base_path, 'archive.{0}'.format(arch_fmt))
self.dst = os.path.join(self.base_path, '{0}_dst_dir'.format(arch_fmt))
- def _set_up(self, arch_fmt):
+ def _set_up(self, arch_fmt, unicode_filename=False):
'''
Create source file tree and destination directory
@@ -62,7 +62,11 @@ class ArchiveTest(ModuleCase):
# Create source
os.makedirs(self.src)
- with salt.utils.files.fopen(os.path.join(self.src, 'file'), 'w') as theorem:
+ if unicode_filename:
+ filename = 'file®'
+ else:
+ filename = 'file'
+ with salt.utils.files.fopen(os.path.join(self.src, filename), 'w') as theorem:
theorem.write(textwrap.dedent(salt.utils.stringutils.to_str(r'''\
Compression theorem of computational complexity theory:
@@ -150,6 +154,50 @@ class ArchiveTest(ModuleCase):
self._tear_down()
+ @skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable')
+ def test_tar_pack_unicode(self):
+ '''
+ Validate using the tar function to create archives
+ '''
+ self._set_up(arch_fmt='tar', unicode_filename=True)
+
+ # Test create archive
+ ret = self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
+ self.assertTrue(isinstance(ret, list), six.text_type(ret))
+ self._assert_artifacts_in_ret(ret)
+
+ self._tear_down()
+
+ @skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable')
+ def test_tar_unpack_unicode(self):
+ '''
+ Validate using the tar function to extract archives
+ '''
+ self._set_up(arch_fmt='tar', unicode_filename=True)
+ self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
+
+ # Test extract archive
+ ret = self.run_function('archive.tar', ['-xvf', self.arch], dest=self.dst)
+ self.assertTrue(isinstance(ret, list), six.text_type(ret))
+ self._assert_artifacts_in_ret(ret)
+
+ self._tear_down()
+
+ @skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable')
+ def test_tar_list_unicode(self):
+ '''
+ Validate using the tar function to extract archives
+ '''
+ self._set_up(arch_fmt='tar', unicode_filename=True)
+ self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
+
+ # Test list archive
+ ret = self.run_function('archive.list', name=self.arch)
+ self.assertTrue(isinstance(ret, list), six.text_type(ret))
+ self._assert_artifacts_in_ret(ret)
+
+ self._tear_down()
+
@skipIf(not salt.utils.path.which('gzip'), 'Cannot find gzip executable')
def test_gzip(self):
'''
--
2.17.1

View File

@ -1,4 +1,4 @@
From 9de54cf6f7d8d6da4212842fef8c4c658a2a9b9c Mon Sep 17 00:00:00 2001
From c059d617a77184c3bec8159d5197355f3cab8c4e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 14 May 2018 11:33:13 +0100
@ -19,259 +19,14 @@ Refactor: use dict.setdefault instead if-else statement
Allow removing only specific package versions with zypper and yum
---
salt/modules/rpm.py | 18 ++++++++---
salt/modules/yumpkg.py | 49 ++++++++++++++++++++++--------
salt/modules/zypper.py | 64 ++++++++++++++++++++++++++++++++-------
salt/states/pkg.py | 33 +++++++++++++++++++-
tests/unit/modules/test_yumpkg.py | 50 ++++++++++++++++++++++++++++++
tests/unit/modules/test_zypper.py | 50 ++++++++++++++++++++++++++++++
6 files changed, 236 insertions(+), 28 deletions(-)
salt/states/pkg.py | 21 +++++++++++++++++++++
1 file changed, 21 insertions(+)
diff --git a/salt/modules/rpm.py b/salt/modules/rpm.py
index d065f1e2d9..3683234f59 100644
--- a/salt/modules/rpm.py
+++ b/salt/modules/rpm.py
@@ -453,7 +453,7 @@ def diff(package, path):
return res
-def info(*packages, **attr):
+def info(*packages, **kwargs):
'''
Return a detailed package(s) summary information.
If no packages specified, all packages will be returned.
@@ -467,6 +467,9 @@ def info(*packages, **attr):
version, vendor, release, build_date, build_date_time_t, install_date, install_date_time_t,
build_host, group, source_rpm, arch, epoch, size, license, signature, packager, url, summary, description.
+ :param all_versions:
+ Return information for all installed versions of the packages
+
:return:
CLI example:
@@ -476,7 +479,9 @@ def info(*packages, **attr):
salt '*' lowpkg.info apache2 bash
salt '*' lowpkg.info apache2 bash attr=version
salt '*' lowpkg.info apache2 bash attr=version,build_date_iso,size
+ salt '*' lowpkg.info apache2 bash attr=version,build_date_iso,size all_versions=True
'''
+ all_versions = kwargs.get('all_versions', False)
# LONGSIZE is not a valid tag for all versions of rpm. If LONGSIZE isn't
# available, then we can just use SIZE for older versions. See Issue #31366.
rpm_tags = __salt__['cmd.run_stdout'](
@@ -516,7 +521,7 @@ def info(*packages, **attr):
"edition": "edition: %|EPOCH?{%{EPOCH}:}|%{VERSION}-%{RELEASE}\\n",
}
- attr = attr.get('attr', None) and attr['attr'].split(",") or None
+ attr = kwargs.get('attr', None) and kwargs['attr'].split(",") or None
query = list()
if attr:
for attr_k in attr:
@@ -610,8 +615,13 @@ def info(*packages, **attr):
if pkg_name.startswith('gpg-pubkey'):
continue
if pkg_name not in ret:
- ret[pkg_name] = pkg_data.copy()
- del ret[pkg_name]['edition']
+ if all_versions:
+ ret[pkg_name] = [pkg_data.copy()]
+ else:
+ ret[pkg_name] = pkg_data.copy()
+ del ret[pkg_name]['edition']
+ elif all_versions:
+ ret[pkg_name].append(pkg_data.copy())
return ret
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index 747142264d..9ce4926790 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -994,31 +994,39 @@ def list_downloaded():
return ret
-def info_installed(*names):
+def info_installed(*names, **kwargs):
'''
.. versionadded:: 2015.8.1
Return the information of the named package(s), installed on the system.
+ :param all_versions:
+ Include information for all versions of the packages installed on the minion.
+
CLI example:
.. code-block:: bash
salt '*' pkg.info_installed <package1>
salt '*' pkg.info_installed <package1> <package2> <package3> ...
+ salt '*' pkg.info_installed <package1> <package2> <package3> all_versions=True
'''
+ all_versions = kwargs.get('all_versions', False)
ret = dict()
- for pkg_name, pkg_nfo in __salt__['lowpkg.info'](*names).items():
- t_nfo = dict()
- # Translate dpkg-specific keys to a common structure
- for key, value in pkg_nfo.items():
- if key == 'source_rpm':
- t_nfo['source'] = value
+ for pkg_name, pkgs_nfo in __salt__['lowpkg.info'](*names, **kwargs).items():
+ pkg_nfo = pkgs_nfo if all_versions else [pkgs_nfo]
+ for _nfo in pkg_nfo:
+ t_nfo = dict()
+ # Translate dpkg-specific keys to a common structure
+ for key, value in _nfo.items():
+ if key == 'source_rpm':
+ t_nfo['source'] = value
+ else:
+ t_nfo[key] = value
+ if not all_versions:
+ ret[pkg_name] = t_nfo
else:
- t_nfo[key] = value
-
- ret[pkg_name] = t_nfo
-
+ ret.setdefault(pkg_name, []).append(t_nfo)
return ret
@@ -1919,7 +1927,24 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613
raise CommandExecutionError(exc)
old = list_pkgs()
- targets = [x for x in pkg_params if x in old]
+ targets = []
+ for target in pkg_params:
+ # Check if package version set to be removed is actually installed:
+ # old[target] contains a comma-separated list of installed versions
+ if target in old and not pkg_params[target]:
+ targets.append(target)
+ elif target in old and pkg_params[target] in old[target].split(','):
+ arch = ''
+ pkgname = target
+ try:
+ namepart, archpart = target.rsplit('.', 1)
+ except ValueError:
+ pass
+ else:
+ if archpart in salt.utils.pkg.rpm.ARCHES:
+ arch = '.' + archpart
+ pkgname = namepart
+ targets.append('{0}-{1}{2}'.format(pkgname, pkg_params[target], arch))
if not targets:
return {}
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 668143bdd9..06f8335c18 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -470,28 +470,37 @@ def info_installed(*names, **kwargs):
Valid attributes are:
ignore, report
+ :param all_versions:
+ Include information for all versions of the packages installed on the minion.
+
CLI example:
.. code-block:: bash
salt '*' pkg.info_installed <package1>
salt '*' pkg.info_installed <package1> <package2> <package3> ...
- salt '*' pkg.info_installed <package1> attr=version,vendor
+ salt '*' pkg.info_installed <package1> <package2> <package3> all_versions=True
+ salt '*' pkg.info_installed <package1> attr=version,vendor all_versions=True
salt '*' pkg.info_installed <package1> <package2> <package3> ... attr=version,vendor
salt '*' pkg.info_installed <package1> <package2> <package3> ... attr=version,vendor errors=ignore
salt '*' pkg.info_installed <package1> <package2> <package3> ... attr=version,vendor errors=report
'''
+ all_versions = kwargs.get('all_versions', False)
ret = dict()
- for pkg_name, pkg_nfo in __salt__['lowpkg.info'](*names, **kwargs).items():
- t_nfo = dict()
- # Translate dpkg-specific keys to a common structure
- for key, value in six.iteritems(pkg_nfo):
- if key == 'source_rpm':
- t_nfo['source'] = value
+ for pkg_name, pkgs_nfo in __salt__['lowpkg.info'](*names, **kwargs).items():
+ pkg_nfo = pkgs_nfo if all_versions else [pkgs_nfo]
+ for _nfo in pkg_nfo:
+ t_nfo = dict()
+ # Translate dpkg-specific keys to a common structure
+ for key, value in six.iteritems(_nfo):
+ if key == 'source_rpm':
+ t_nfo['source'] = value
+ else:
+ t_nfo[key] = value
+ if not all_versions:
+ ret[pkg_name] = t_nfo
else:
- t_nfo[key] = value
- ret[pkg_name] = t_nfo
-
+ ret.setdefault(pkg_name, []).append(t_nfo)
return ret
@@ -1494,7 +1503,14 @@ def _uninstall(name=None, pkgs=None):
raise CommandExecutionError(exc)
old = list_pkgs()
- targets = [target for target in pkg_params if target in old]
+ targets = []
+ for target in pkg_params:
+ # Check if package version set to be removed is actually installed:
+ # old[target] contains a comma-separated list of installed versions
+ if target in old and pkg_params[target] in old[target].split(','):
+ targets.append(target + "-" + pkg_params[target])
+ elif target in old and not pkg_params[target]:
+ targets.append(target)
if not targets:
return {}
@@ -1517,6 +1533,32 @@ def _uninstall(name=None, pkgs=None):
return ret
+def normalize_name(name):
+ '''
+ Strips the architecture from the specified package name, if necessary.
+ Circumstances where this would be done include:
+
+ * If the arch is 32 bit and the package name ends in a 32-bit arch.
+ * If the arch matches the OS arch, or is ``noarch``.
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' pkg.normalize_name zsh.x86_64
+ '''
+ try:
+ arch = name.rsplit('.', 1)[-1]
+ if arch not in salt.utils.pkg.rpm.ARCHES + ('noarch',):
+ return name
+ except ValueError:
+ return name
+ if arch in (__grains__['osarch'], 'noarch') \
+ or salt.utils.pkg.rpm.check_32(arch, osarch=__grains__['osarch']):
+ return name[:-(len(arch) + 1)]
+ return name
+
+
def remove(name=None, pkgs=None, **kwargs): # pylint: disable=unused-argument
'''
.. versionchanged:: 2015.8.12,2016.3.3,2016.11.0
diff --git a/salt/states/pkg.py b/salt/states/pkg.py
index 2682ee17f9..ed405cb6b5 100644
index 0aca1e0af8..2034262b23 100644
--- a/salt/states/pkg.py
+++ b/salt/states/pkg.py
@@ -415,6 +415,16 @@ def _find_remove_targets(name=None,
@@ -455,6 +455,16 @@ def _find_remove_targets(name=None,
if __grains__['os'] == 'FreeBSD' and origin:
cver = [k for k, v in six.iteritems(cur_pkgs) if v['origin'] == pkgname]
@ -288,7 +43,7 @@ index 2682ee17f9..ed405cb6b5 100644
else:
cver = cur_pkgs.get(pkgname, [])
@@ -844,6 +854,17 @@ def _verify_install(desired, new_pkgs, ignore_epoch=False, new_caps=None):
@@ -861,6 +871,17 @@ def _verify_install(desired, new_pkgs, ignore_epoch=False, new_caps=None):
cver = new_pkgs.get(pkgname.split('%')[0])
elif __grains__['os_family'] == 'Debian':
cver = new_pkgs.get(pkgname.split('=')[0])
@ -306,145 +61,7 @@ index 2682ee17f9..ed405cb6b5 100644
else:
cver = new_pkgs.get(pkgname)
if not cver and pkgname in new_caps:
@@ -2674,7 +2695,17 @@ def _uninstall(
changes = __salt__['pkg.{0}'.format(action)](name, pkgs=pkgs, version=version, **kwargs)
new = __salt__['pkg.list_pkgs'](versions_as_list=True, **kwargs)
- failed = [x for x in pkg_params if x in new]
+ failed = []
+ for x in pkg_params:
+ if __grains__['os_family'] in ['Suse', 'RedHat']:
+ # Check if the package version set to be removed is actually removed:
+ if x in new and not pkg_params[x]:
+ failed.append(x)
+ elif x in new and pkg_params[x] in new[x]:
+ failed.append(x + "-" + pkg_params[x])
+ elif x in new:
+ failed.append(x)
+
if action == 'purge':
new_removed = __salt__['pkg.list_pkgs'](versions_as_list=True,
removed=True,
diff --git a/tests/unit/modules/test_yumpkg.py b/tests/unit/modules/test_yumpkg.py
index 28b6e1294c..c73f2582b9 100644
--- a/tests/unit/modules/test_yumpkg.py
+++ b/tests/unit/modules/test_yumpkg.py
@@ -601,3 +601,53 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
'--branch=foo', '--exclude=kernel*', 'upgrade'],
output_loglevel='trace',
python_shell=False)
+
+ def test_info_installed_with_all_versions(self):
+ '''
+ Test the return information of all versions for the named package(s), installed on the system.
+
+ :return:
+ '''
+ run_out = {
+ 'virgo-dummy': [
+ {'build_date': '2015-07-09T10:55:19Z',
+ 'vendor': 'openSUSE Build Service',
+ 'description': 'This is the Virgo dummy package used for testing SUSE Manager',
+ 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com',
+ 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)',
+ 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z',
+ 'install_date_time_t': 1456241517, 'summary': 'Virgo dummy package', 'version': '1.0',
+ 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9',
+ 'release': '1.1', 'group': 'Applications/System', 'arch': 'i686', 'size': '17992'},
+ {'build_date': '2015-07-09T10:15:19Z',
+ 'vendor': 'openSUSE Build Service',
+ 'description': 'This is the Virgo dummy package used for testing SUSE Manager',
+ 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com',
+ 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)',
+ 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z',
+ 'install_date_time_t': 14562415127, 'summary': 'Virgo dummy package', 'version': '1.0',
+ 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9',
+ 'release': '1.1', 'group': 'Applications/System', 'arch': 'x86_64', 'size': '13124'}
+ ],
+ 'libopenssl1_0_0': [
+ {'build_date': '2015-11-04T23:20:34Z', 'vendor': 'SUSE LLC <https://www.suse.com/>',
+ 'description': 'The OpenSSL Project is a collaborative effort.',
+ 'license': 'OpenSSL', 'build_host': 'sheep11', 'url': 'https://www.openssl.org/',
+ 'build_date_time_t': 1446675634, 'relocations': '(not relocatable)',
+ 'source_rpm': 'openssl-1.0.1i-34.1.src.rpm', 'install_date': '2016-02-23T16:31:35Z',
+ 'install_date_time_t': 1456241495, 'summary': 'Secure Sockets and Transport Layer Security',
+ 'version': '1.0.1i', 'signature': 'RSA/SHA256, Wed Nov 4 22:21:34 2015, Key ID 70af9e8139db7c82',
+ 'release': '34.1', 'group': 'Productivity/Networking/Security', 'packager': 'https://www.suse.com/',
+ 'arch': 'x86_64', 'size': '2576912'}
+ ]
+ }
+ with patch.dict(yumpkg.__salt__, {'lowpkg.info': MagicMock(return_value=run_out)}):
+ installed = yumpkg.info_installed(all_versions=True)
+ # Test overall products length
+ self.assertEqual(len(installed), 2)
+
+ # Test multiple versions for the same package
+ for pkg_name, pkg_info_list in installed.items():
+ self.assertEqual(len(pkg_info_list), 2 if pkg_name == "virgo-dummy" else 1)
+ for info in pkg_info_list:
+ self.assertTrue(info['arch'] in ('x86_64', 'i686'))
diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py
index 539a950252..6eccee568b 100644
--- a/tests/unit/modules/test_zypper.py
+++ b/tests/unit/modules/test_zypper.py
@@ -327,6 +327,56 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
installed = zypper.info_installed()
self.assertEqual(installed['vīrgô']['description'], 'vīrgô d€šçripţiǫñ')
+ def test_info_installed_with_all_versions(self):
+ '''
+ Test the return information of all versions for the named package(s), installed on the system.
+
+ :return:
+ '''
+ run_out = {
+ 'virgo-dummy': [
+ {'build_date': '2015-07-09T10:55:19Z',
+ 'vendor': 'openSUSE Build Service',
+ 'description': 'This is the Virgo dummy package used for testing SUSE Manager',
+ 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com',
+ 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)',
+ 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z',
+ 'install_date_time_t': 1456241517, 'summary': 'Virgo dummy package', 'version': '1.0',
+ 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9',
+ 'release': '1.1', 'group': 'Applications/System', 'arch': 'i686', 'size': '17992'},
+ {'build_date': '2015-07-09T10:15:19Z',
+ 'vendor': 'openSUSE Build Service',
+ 'description': 'This is the Virgo dummy package used for testing SUSE Manager',
+ 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com',
+ 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)',
+ 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z',
+ 'install_date_time_t': 14562415127, 'summary': 'Virgo dummy package', 'version': '1.0',
+ 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9',
+ 'release': '1.1', 'group': 'Applications/System', 'arch': 'x86_64', 'size': '13124'}
+ ],
+ 'libopenssl1_0_0': [
+ {'build_date': '2015-11-04T23:20:34Z', 'vendor': 'SUSE LLC <https://www.suse.com/>',
+ 'description': 'The OpenSSL Project is a collaborative effort.',
+ 'license': 'OpenSSL', 'build_host': 'sheep11', 'url': 'https://www.openssl.org/',
+ 'build_date_time_t': 1446675634, 'relocations': '(not relocatable)',
+ 'source_rpm': 'openssl-1.0.1i-34.1.src.rpm', 'install_date': '2016-02-23T16:31:35Z',
+ 'install_date_time_t': 1456241495, 'summary': 'Secure Sockets and Transport Layer Security',
+ 'version': '1.0.1i', 'signature': 'RSA/SHA256, Wed Nov 4 22:21:34 2015, Key ID 70af9e8139db7c82',
+ 'release': '34.1', 'group': 'Productivity/Networking/Security', 'packager': 'https://www.suse.com/',
+ 'arch': 'x86_64', 'size': '2576912'}
+ ]
+ }
+ with patch.dict(zypper.__salt__, {'lowpkg.info': MagicMock(return_value=run_out)}):
+ installed = zypper.info_installed(all_versions=True)
+ # Test overall products length
+ self.assertEqual(len(installed), 2)
+
+ # Test multiple versions for the same package
+ for pkg_name, pkg_info_list in installed.items():
+ self.assertEqual(len(pkg_info_list), 2 if pkg_name == "virgo-dummy" else 1)
+ for info in pkg_info_list:
+ self.assertTrue(info['arch'] in ('x86_64', 'i686'))
+
def test_info_available(self):
'''
Test return the information of the named package available for the system.
--
2.13.7
2.17.1

View File

@ -1,4 +1,4 @@
From 3bad9e211c2e76ddac48f7c8ff1632e32e0a256e Mon Sep 17 00:00:00 2001
From c2c002a2b8f106388fda3c1abaf518f2d47ce1cf Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 9 Oct 2018 14:08:50 +0200
Subject: [PATCH] Add CPE_NAME for osversion* grain parsing (U#49946)
@ -25,16 +25,15 @@ Expand unit test to verify part name
Fix proper part name in the string-bound CPE
---
salt/grains/core.py | 43 +++++++++++++++++++++---
tests/unit/grains/test_core.py | 60 +++++++++++++++++++++++++++++-----
2 files changed, 90 insertions(+), 13 deletions(-)
salt/grains/core.py | 28 ++++++++++++++++++++++++++++
1 file changed, 28 insertions(+)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 80eebd1c05..e41ab4e0ae 100644
index 29e8371c2b..d688b6c757 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1355,6 +1355,34 @@ def _parse_os_release(os_release_files):
return data
@@ -1571,6 +1571,34 @@ def _parse_cpe_name(cpe):
return ret
+def _parse_cpe_name(cpe):
@ -68,127 +67,7 @@ index 80eebd1c05..e41ab4e0ae 100644
def os_data():
'''
Return grains pertaining to the operating system
@@ -1554,13 +1582,20 @@ def os_data():
codename = codename_match.group(1)
grains['lsb_distrib_codename'] = codename
if 'CPE_NAME' in os_release:
- if ":suse:" in os_release['CPE_NAME'] or ":opensuse:" in os_release['CPE_NAME']:
+ cpe = _parse_cpe_name(os_release['CPE_NAME'])
+ if not cpe:
+ log.error('Broken CPE_NAME format in /etc/os-release!')
+ elif cpe.get('vendor', '').lower() in ['suse', 'opensuse']:
grains['os'] = "SUSE"
# openSUSE `osfullname` grain normalization
if os_release.get("NAME") == "openSUSE Leap":
grains['osfullname'] = "Leap"
elif os_release.get("VERSION") == "Tumbleweed":
grains['osfullname'] = os_release["VERSION"]
+ # Override VERSION_ID, if CPE_NAME around
+ if cpe.get('version') and cpe.get('vendor') == 'opensuse': # Keep VERSION_ID for SLES
+ grains['lsb_distrib_release'] = cpe['version']
+
elif os.path.isfile('/etc/SuSE-release'):
grains['lsb_distrib_id'] = 'SUSE'
version = ''
@@ -1666,8 +1701,7 @@ def os_data():
# Commit introducing this comment should be reverted after the upstream bug is released.
if 'CentOS Linux 7' in grains.get('lsb_distrib_codename', ''):
grains.pop('lsb_distrib_release', None)
- grains['osrelease'] = \
- grains.get('lsb_distrib_release', osrelease).strip()
+ grains['osrelease'] = grains.get('lsb_distrib_release', osrelease).strip()
grains['oscodename'] = grains.get('lsb_distrib_codename', '').strip() or oscodename
if 'Red Hat' in grains['oscodename']:
grains['oscodename'] = oscodename
@@ -1702,8 +1736,7 @@ def os_data():
r'((?:Open|Oracle )?Solaris|OpenIndiana|OmniOS) (Development)?'
r'\s*(\d+\.?\d*|v\d+)\s?[A-Z]*\s?(r\d+|\d+\/\d+|oi_\S+|snv_\S+)?'
)
- osname, development, osmajorrelease, osminorrelease = \
- release_re.search(rel_data).groups()
+ osname, development, osmajorrelease, osminorrelease = release_re.search(rel_data).groups()
except AttributeError:
# Set a blank osrelease grain and fallback to 'Solaris'
# as the 'os' grain.
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index e973428add..2ab32ef41b 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -62,10 +62,11 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
def test_parse_etc_os_release(self, path_isfile_mock):
path_isfile_mock.side_effect = lambda x: x == "/usr/lib/os-release"
with salt.utils.files.fopen(os.path.join(OS_RELEASE_DIR, "ubuntu-17.10")) as os_release_file:
- os_release_content = os_release_file.readlines()
- with patch("salt.utils.files.fopen", mock_open()) as os_release_file:
- os_release_file.return_value.__iter__.return_value = os_release_content
- os_release = core._parse_os_release(["/etc/os-release", "/usr/lib/os-release"])
+ os_release_content = os_release_file.read()
+ with patch("salt.utils.files.fopen", mock_open(read_data=os_release_content)):
+ os_release = core._parse_os_release(
+ '/etc/os-release',
+ '/usr/lib/os-release')
self.assertEqual(os_release, {
"NAME": "Ubuntu",
"VERSION": "17.10 (Artful Aardvark)",
@@ -81,10 +82,53 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
"UBUNTU_CODENAME": "artful",
})
- @patch("os.path.isfile")
- def test_missing_os_release(self, path_isfile_mock):
- path_isfile_mock.return_value = False
- os_release = core._parse_os_release(["/etc/os-release", "/usr/lib/os-release"])
+ def test_parse_cpe_name_wfn(self):
+ '''
+ Parse correct CPE_NAME data WFN formatted
+ :return:
+ '''
+ for cpe, cpe_ret in [('cpe:/o:opensuse:leap:15.0',
+ {'phase': None, 'version': '15.0', 'product': 'leap',
+ 'vendor': 'opensuse', 'part': 'operating system'}),
+ ('cpe:/o:vendor:product:42:beta',
+ {'phase': 'beta', 'version': '42', 'product': 'product',
+ 'vendor': 'vendor', 'part': 'operating system'})]:
+ ret = core._parse_cpe_name(cpe)
+ for key in cpe_ret:
+ assert key in ret
+ assert cpe_ret[key] == ret[key]
+
+ def test_parse_cpe_name_v23(self):
+ '''
+ Parse correct CPE_NAME data v2.3 formatted
+ :return:
+ '''
+ for cpe, cpe_ret in [('cpe:2.3:o:microsoft:windows_xp:5.1.601:beta:*:*:*:*:*:*',
+ {'phase': 'beta', 'version': '5.1.601', 'product': 'windows_xp',
+ 'vendor': 'microsoft', 'part': 'operating system'}),
+ ('cpe:2.3:h:corellian:millenium_falcon:1.0:*:*:*:*:*:*:*',
+ {'phase': None, 'version': '1.0', 'product': 'millenium_falcon',
+ 'vendor': 'corellian', 'part': 'hardware'}),
+ ('cpe:2.3:*:dark_empire:light_saber:3.0:beta:*:*:*:*:*:*',
+ {'phase': 'beta', 'version': '3.0', 'product': 'light_saber',
+ 'vendor': 'dark_empire', 'part': None})]:
+ ret = core._parse_cpe_name(cpe)
+ for key in cpe_ret:
+ assert key in ret
+ assert cpe_ret[key] == ret[key]
+
+ def test_parse_cpe_name_broken(self):
+ '''
+ Parse broken CPE_NAME data
+ :return:
+ '''
+ for cpe in ['cpe:broken', 'cpe:broken:in:all:ways:*:*:*:*',
+ 'cpe:x:still:broken:123', 'who:/knows:what:is:here']:
+ assert core._parse_cpe_name(cpe) == {}
+
+ def test_missing_os_release(self):
+ with patch('salt.utils.files.fopen', mock_open(read_data={})):
+ os_release = core._parse_os_release('/etc/os-release', '/usr/lib/os-release')
self.assertEqual(os_release, {})
@skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
--
2.19.0
2.17.1

View File

@ -1,894 +0,0 @@
From 5c41a5b8c9925bf788946e334cb3912ca9b09190 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Bosdonnat?= <cbosdonnat@suse.com>
Date: Fri, 9 Mar 2018 15:46:12 +0100
Subject: [PATCH] Add engine relaying libvirt events
Libvirt API offers clients to register callbacks for various events.
libvirt_events engine will listen on a libvirt URI (local or remote)
for events and send them to the salt event bus.
Special thanks to @isbm for the code cleanup help
---
salt/engines/libvirt_events.py | 702 ++++++++++++++++++++++
tests/unit/engines/test_libvirt_events.py | 159 +++++
2 files changed, 861 insertions(+)
create mode 100644 salt/engines/libvirt_events.py
create mode 100644 tests/unit/engines/test_libvirt_events.py
diff --git a/salt/engines/libvirt_events.py b/salt/engines/libvirt_events.py
new file mode 100644
index 0000000000..a1c9d09067
--- /dev/null
+++ b/salt/engines/libvirt_events.py
@@ -0,0 +1,702 @@
+# -*- coding: utf-8 -*-
+
+'''
+An engine that listens for libvirt events and resends them to the salt event bus.
+
+The minimal configuration is the following and will listen to all events on the
+local hypervisor and send them with a tag starting with ``salt/engines/libvirt_events``:
+
+.. code-block:: yaml
+
+ engines:
+ - libvirt_events
+
+Note that the automatically-picked libvirt connection will depend on the value
+of ``uri_default`` in ``/etc/libvirt/libvirt.conf``. To force using another
+connection like the local LXC libvirt driver, set the ``uri`` property as in the
+following example configuration.
+
+.. code-block:: yaml
+
+ engines:
+ - libvirt_events:
+ uri: lxc:///
+ tag_prefix: libvirt
+ filters:
+ - domain/lifecycle
+ - domain/reboot
+ - pool
+
+Filters is a list of event types to relay to the event bus. Items in this list
+can be either one of the main types (``domain``, ``network``, ``pool``,
+``nodedev``, ``secret``), ``all`` or a more precise filter. These can be done
+with values like <main_type>/<subtype>. The possible values are in the
+CALLBACK_DEFS constant. If the filters list contains ``all``, all
+events will be relayed.
+
+Be aware that the list of events increases with libvirt versions, for example
+network events have been added in libvirt 1.2.1.
+
+Running the engine on non-root
+------------------------------
+
+Running this engine as non-root requires a special attention, which is surely
+the case for the master running as user `salt`. The engine is likely to fail
+to connect to libvirt with an error like this one:
+
+ [ERROR ] authentication unavailable: no polkit agent available to authenticate action 'org.libvirt.unix.monitor'
+
+
+To fix this, the user running the engine, for example the salt-master, needs
+to have the rights to connect to libvirt in the machine polkit config.
+A polkit rule like the following one will allow `salt` user to connect to libvirt:
+
+.. code-block:: javascript
+
+ polkit.addRule(function(action, subject) {
+ if (action.id.indexOf("org.libvirt") == 0 &&
+ subject.user == "salt") {
+ return polkit.Result.YES;
+ }
+ });
+
+:depends: libvirt 1.0.0+ python binding
+
+.. versionadded:: Fluorine
+'''
+
+from __future__ import absolute_import, unicode_literals, print_function
+import logging
+
+# Import salt libs
+import salt.utils.event
+
+# pylint: disable=no-name-in-module,import-error
+from salt.ext.six.moves.urllib.parse import urlparse
+# pylint: enable=no-name-in-module,import-error
+
+log = logging.getLogger(__name__)
+
+
+try:
+ import libvirt
+except ImportError:
+ libvirt = None # pylint: disable=invalid-name
+
+
+def __virtual__():
+ '''
+ Only load if libvirt python binding is present
+ '''
+ if libvirt is None:
+ msg = 'libvirt module not found'
+ elif libvirt.getVersion() < 1000000:
+ msg = 'libvirt >= 1.0.0 required'
+ else:
+ msg = ''
+ return not bool(msg), msg
+
+
+REGISTER_FUNCTIONS = {
+ 'domain': 'domainEventRegisterAny',
+ 'network': 'networkEventRegisterAny',
+ 'pool': 'storagePoolEventRegisterAny',
+ 'nodedev': 'nodeDeviceEventRegisterAny',
+ 'secret': 'secretEventRegisterAny'
+}
+
+# Handle either BLOCK_JOB or BLOCK_JOB_2, but prefer the latter
+if hasattr(libvirt, 'VIR_DOMAIN_EVENT_ID_BLOCK_JOB_2'):
+ BLOCK_JOB_ID = 'VIR_DOMAIN_EVENT_ID_BLOCK_JOB_2'
+else:
+ BLOCK_JOB_ID = 'VIR_DOMAIN_EVENT_ID_BLOCK_JOB'
+
+CALLBACK_DEFS = {
+ 'domain': (('lifecycle', None),
+ ('reboot', None),
+ ('rtc_change', None),
+ ('watchdog', None),
+ ('graphics', None),
+ ('io_error', 'VIR_DOMAIN_EVENT_ID_IO_ERROR_REASON'),
+ ('control_error', None),
+ ('disk_change', None),
+ ('tray_change', None),
+ ('pmwakeup', None),
+ ('pmsuspend', None),
+ ('balloon_change', None),
+ ('pmsuspend_disk', None),
+ ('device_removed', None),
+ ('block_job', BLOCK_JOB_ID),
+ ('tunable', None),
+ ('agent_lifecycle', None),
+ ('device_added', None),
+ ('migration_iteration', None),
+ ('job_completed', None),
+ ('device_removal_failed', None),
+ ('metadata_change', None),
+ ('block_threshold', None)),
+ 'network': (('lifecycle', None),),
+ 'pool': (('lifecycle', None),
+ ('refresh', None)),
+ 'nodedev': (('lifecycle', None),
+ ('update', None)),
+ 'secret': (('lifecycle', None),
+ ('value_changed', None))
+}
+
+
+def _compute_subprefix(attr):
+ '''
+ Get the part before the first '_' or the end of attr including
+ the potential '_'
+ '''
+ return ''.join((attr.split('_')[0], '_' if len(attr.split('_')) > 1 else ''))
+
+
+def _get_libvirt_enum_string(prefix, value):
+ '''
+ Convert the libvirt enum integer value into a human readable string.
+
+ :param prefix: start of the libvirt attribute to look for.
+ :param value: integer to convert to string
+ '''
+ attributes = [attr[len(prefix):] for attr in libvirt.__dict__ if attr.startswith(prefix)]
+
+ # Filter out the values starting with a common base as they match another enum
+ prefixes = [_compute_subprefix(p) for p in attributes]
+ counts = {p: prefixes.count(p) for p in prefixes}
+ sub_prefixes = [p for p, count in counts.items() if count > 1]
+ filtered = [attr for attr in attributes if _compute_subprefix(attr) not in sub_prefixes]
+
+ for candidate in filtered:
+ if value == getattr(libvirt, ''.join((prefix, candidate))):
+ name = candidate.lower().replace('_', ' ')
+ return name
+ return 'unknown'
+
+
+def _get_domain_event_detail(event, detail):
+ '''
+ Convert event and detail numeric values into a tuple of human readable strings
+ '''
+ event_name = _get_libvirt_enum_string('VIR_DOMAIN_EVENT_', event)
+ if event_name == 'unknown':
+ return event_name, 'unknown'
+
+ prefix = 'VIR_DOMAIN_EVENT_{0}_'.format(event_name.upper())
+ detail_name = _get_libvirt_enum_string(prefix, detail)
+
+ return event_name, detail_name
+
+
+def _salt_send_event(opaque, conn, data):
+ '''
+ Convenience function adding common data to the event and sending it
+ on the salt event bus.
+
+ :param opaque: the opaque data that is passed to the callback.
+ This is a dict with 'prefix', 'object' and 'event' keys.
+ :param conn: libvirt connection
+ :param data: additional event data dict to send
+ '''
+ tag_prefix = opaque['prefix']
+ object_type = opaque['object']
+ event_type = opaque['event']
+
+ # Prepare the connection URI to fit in the tag
+ # qemu+ssh://user@host:1234/system -> qemu+ssh/user@host:1234/system
+ uri = urlparse(conn.getURI())
+ uri_tag = [uri.scheme]
+ if uri.netloc:
+ uri_tag.append(uri.netloc)
+ path = uri.path.strip('/')
+ if path:
+ uri_tag.append(path)
+ uri_str = "/".join(uri_tag)
+
+ # Append some common data
+ all_data = {
+ 'uri': conn.getURI()
+ }
+ all_data.update(data)
+
+ tag = '/'.join((tag_prefix, uri_str, object_type, event_type))
+
+ # Actually send the event in salt
+ if __opts__.get('__role') == 'master':
+ salt.utils.event.get_master_event(
+ __opts__,
+ __opts__['sock_dir']).fire_event(all_data, tag)
+ else:
+ __salt__['event.send'](tag, all_data)
+
+
+def _salt_send_domain_event(opaque, conn, domain, event, event_data):
+ '''
+ Helper function send a salt event for a libvirt domain.
+
+ :param opaque: the opaque data that is passed to the callback.
+ This is a dict with 'prefix', 'object' and 'event' keys.
+ :param conn: libvirt connection
+ :param domain: name of the domain related to the event
+ :param event: name of the event
+ :param event_data: additional event data dict to send
+ '''
+ data = {
+ 'domain': {
+ 'name': domain.name(),
+ 'id': domain.ID(),
+ 'uuid': domain.UUIDString()
+ },
+ 'event': event
+ }
+ data.update(event_data)
+ _salt_send_event(opaque, conn, data)
+
+
+def _domain_event_lifecycle_cb(conn, domain, event, detail, opaque):
+ '''
+ Domain lifecycle events handler
+ '''
+ event_str, detail_str = _get_domain_event_detail(event, detail)
+
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'event': event_str,
+ 'detail': detail_str
+ })
+
+
+def _domain_event_reboot_cb(conn, domain, opaque):
+ '''
+ Domain reboot events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {})
+
+
+def _domain_event_rtc_change_cb(conn, domain, utcoffset, opaque):
+ '''
+ Domain RTC change events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'utcoffset': utcoffset
+ })
+
+
+def _domain_event_watchdog_cb(conn, domain, action, opaque):
+ '''
+ Domain watchdog events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'action': _get_libvirt_enum_string('VIR_DOMAIN_EVENT_WATCHDOG_', action)
+ })
+
+
+def _domain_event_io_error_cb(conn, domain, srcpath, devalias, action, reason, opaque):
+ '''
+ Domain I/O Error events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'srcPath': srcpath,
+ 'dev': devalias,
+ 'action': _get_libvirt_enum_string('VIR_DOMAIN_EVENT_IO_ERROR_', action),
+ 'reason': reason
+ })
+
+
+def _domain_event_graphics_cb(conn, domain, phase, local, remote, auth, subject, opaque):
+ '''
+ Domain graphics events handler
+ '''
+ prefix = 'VIR_DOMAIN_EVENT_GRAPHICS_'
+
+ def get_address(addr):
+ '''
+ transform address structure into event data piece
+ '''
+ data = {'family': _get_libvirt_enum_string('{0}_ADDRESS_'.format(prefix), addr['family']),
+ 'node': addr['node'],
+ 'service': addr['service']}
+ return addr
+
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'phase': _get_libvirt_enum_string(prefix, phase),
+ 'local': get_address(local),
+ 'remote': get_address(remote),
+ 'authScheme': auth,
+ 'subject': [{'type': item[0], 'name': item[1]} for item in subject]
+ })
+
+
+def _domain_event_control_error_cb(conn, domain, opaque):
+ '''
+ Domain control error events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {})
+
+
+def _domain_event_disk_change_cb(conn, domain, old_src, new_src, dev, reason, opaque):
+ '''
+ Domain disk change events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'oldSrcPath': old_src,
+ 'newSrcPath': new_src,
+ 'dev': dev,
+ 'reason': _get_libvirt_enum_string('VIR_DOMAIN_EVENT_DISK_', reason)
+ })
+
+
+def _domain_event_tray_change_cb(conn, domain, dev, reason, opaque):
+ '''
+ Domain tray change events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'dev': dev,
+ 'reason': _get_libvirt_enum_string('VIR_DOMAIN_EVENT_TRAY_CHANGE_', reason)
+ })
+
+
+def _domain_event_pmwakeup_cb(conn, domain, reason, opaque):
+ '''
+ Domain wakeup events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'reason': 'unknown' # currently unused
+ })
+
+
+def _domain_event_pmsuspend_cb(conn, domain, reason, opaque):
+ '''
+ Domain suspend events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'reason': 'unknown' # currently unused
+ })
+
+
+def _domain_event_balloon_change_cb(conn, domain, actual, opaque):
+ '''
+ Domain balloon change events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'actual': actual
+ })
+
+
+def _domain_event_pmsuspend_disk_cb(conn, domain, reason, opaque):
+ '''
+ Domain disk suspend events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'reason': 'unknown' # currently unused
+ })
+
+
+def _domain_event_block_job_cb(conn, domain, disk, job_type, status, opaque):
+ '''
+ Domain block job events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'disk': disk,
+ 'type': _get_libvirt_enum_string('VIR_DOMAIN_BLOCK_JOB_TYPE_', job_type),
+ 'status': _get_libvirt_enum_string('VIR_DOMAIN_BLOCK_JOB_', status)
+ })
+
+
+def _domain_event_device_removed_cb(conn, domain, dev, opaque):
+ '''
+ Domain device removal events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'dev': dev
+ })
+
+
+def _domain_event_tunable_cb(conn, domain, params, opaque):
+ '''
+ Domain tunable events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'params': params
+ })
+
+
+# pylint: disable=invalid-name
+def _domain_event_agent_lifecycle_cb(conn, domain, state, reason, opaque):
+ '''
+ Domain agent lifecycle events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'state': _get_libvirt_enum_string('VIR_CONNECT_DOMAIN_EVENT_AGENT_LIFECYCLE_STATE_', state),
+ 'reason': _get_libvirt_enum_string('VIR_CONNECT_DOMAIN_EVENT_AGENT_LIFECYCLE_REASON_', reason)
+ })
+
+
+def _domain_event_device_added_cb(conn, domain, dev, opaque):
+ '''
+ Domain device addition events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'dev': dev
+ })
+
+
+# pylint: disable=invalid-name
+def _domain_event_migration_iteration_cb(conn, domain, iteration, opaque):
+ '''
+ Domain migration iteration events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'iteration': iteration
+ })
+
+
+def _domain_event_job_completed_cb(conn, domain, params, opaque):
+ '''
+ Domain job completion events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'params': params
+ })
+
+
+def _domain_event_device_removal_failed_cb(conn, domain, dev, opaque):
+ '''
+ Domain device removal failure events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'dev': dev
+ })
+
+
+def _domain_event_metadata_change_cb(conn, domain, mtype, nsuri, opaque):
+ '''
+ Domain metadata change events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'type': _get_libvirt_enum_string('VIR_DOMAIN_METADATA_', mtype),
+ 'nsuri': nsuri
+ })
+
+
+def _domain_event_block_threshold_cb(conn, domain, dev, path, threshold, excess, opaque):
+ '''
+ Domain block threshold events handler
+ '''
+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
+ 'dev': dev,
+ 'path': path,
+ 'threshold': threshold,
+ 'excess': excess
+ })
+
+
+def _network_event_lifecycle_cb(conn, net, event, detail, opaque):
+ '''
+ Network lifecycle events handler
+ '''
+
+ _salt_send_event(opaque, conn, {
+ 'network': {
+ 'name': net.name(),
+ 'uuid': net.UUIDString()
+ },
+ 'event': _get_libvirt_enum_string('VIR_NETWORK_EVENT_', event),
+ 'detail': 'unknown' # currently unused
+ })
+
+
+def _pool_event_lifecycle_cb(conn, pool, event, detail, opaque):
+ '''
+ Storage pool lifecycle events handler
+ '''
+ _salt_send_event(opaque, conn, {
+ 'pool': {
+ 'name': pool.name(),
+ 'uuid': pool.UUIDString()
+ },
+ 'event': _get_libvirt_enum_string('VIR_STORAGE_POOL_EVENT_', event),
+ 'detail': 'unknown' # currently unused
+ })
+
+
+def _pool_event_refresh_cb(conn, pool, opaque):
+ '''
+ Storage pool refresh events handler
+ '''
+ _salt_send_event(opaque, conn, {
+ 'pool': {
+ 'name': pool.name(),
+ 'uuid': pool.UUIDString()
+ },
+ 'event': opaque['event']
+ })
+
+
+def _nodedev_event_lifecycle_cb(conn, dev, event, detail, opaque):
+ '''
+ Node device lifecycle events handler
+ '''
+ _salt_send_event(opaque, conn, {
+ 'nodedev': {
+ 'name': dev.name()
+ },
+ 'event': _get_libvirt_enum_string('VIR_NODE_DEVICE_EVENT_', event),
+ 'detail': 'unknown' # currently unused
+ })
+
+
+def _nodedev_event_update_cb(conn, dev, opaque):
+ '''
+ Node device update events handler
+ '''
+ _salt_send_event(opaque, conn, {
+ 'nodedev': {
+ 'name': dev.name()
+ },
+ 'event': opaque['event']
+ })
+
+
+def _secret_event_lifecycle_cb(conn, secret, event, detail, opaque):
+ '''
+ Secret lifecycle events handler
+ '''
+ _salt_send_event(opaque, conn, {
+ 'secret': {
+ 'uuid': secret.UUIDString()
+ },
+ 'event': _get_libvirt_enum_string('VIR_SECRET_EVENT_', event),
+ 'detail': 'unknown' # currently unused
+ })
+
+
+def _secret_event_value_changed_cb(conn, secret, opaque):
+ '''
+ Secret value change events handler
+ '''
+ _salt_send_event(opaque, conn, {
+ 'secret': {
+ 'uuid': secret.UUIDString()
+ },
+ 'event': opaque['event']
+ })
+
+
+def _cleanup(cnx):
+ '''
+ Close the libvirt connection
+
+ :param cnx: libvirt connection
+ '''
+ log.debug('Closing libvirt connection: %s', cnx.getURI())
+ cnx.close()
+
+
+def _callbacks_cleanup(cnx, callback_ids):
+ '''
+ Unregister all the registered callbacks
+
+ :param cnx: libvirt connection
+ :param callback_ids: dictionary mapping a libvirt object type to an ID list
+ of callbacks to deregister
+ '''
+ for obj, ids in callback_ids.items():
+ register_name = REGISTER_FUNCTIONS[obj]
+ deregister_name = register_name.replace('Reg', 'Dereg')
+ deregister = getattr(cnx, deregister_name)
+ for callback_id in ids:
+ deregister(callback_id)
+
+
+def _register_callback(cnx, tag_prefix, obj, event, real_id):
+ '''
+ Helper function registering a callback
+
+ :param cnx: libvirt connection
+ :param tag_prefix: salt event tag prefix to use
+ :param obj: the libvirt object name for the event. Needs to
+ be one of the REGISTER_FUNCTIONS keys.
+ :param event: the event type name.
+ :param real_id: the libvirt name of an alternative event id to use or None
+
+ :rtype integer value needed to deregister the callback
+ '''
+ libvirt_name = real_id
+ if real_id is None:
+ libvirt_name = 'VIR_{0}_EVENT_ID_{1}'.format(obj, event).upper()
+
+ if not hasattr(libvirt, libvirt_name):
+ log.warning('Skipping "%s/%s" events: libvirt too old', obj, event)
+ return None
+
+ libvirt_id = getattr(libvirt, libvirt_name)
+ callback_name = "_{0}_event_{1}_cb".format(obj, event)
+ callback = globals().get(callback_name, None)
+ if callback is None:
+ log.error('Missing function %s in engine', callback_name)
+ return None
+
+ register = getattr(cnx, REGISTER_FUNCTIONS[obj])
+ return register(None, libvirt_id, callback,
+ {'prefix': tag_prefix,
+ 'object': obj,
+ 'event': event})
+
+
+def _append_callback_id(ids, obj, callback_id):
+ '''
+ Helper function adding a callback ID to the IDs dict.
+ The callback ids dict maps an object to event callback ids.
+
+ :param ids: dict of callback IDs to update
+ :param obj: one of the keys of REGISTER_FUNCTIONS
+ :param callback_id: the result of _register_callback
+ '''
+ if obj not in ids:
+ ids[obj] = []
+ ids[obj].append(callback_id)
+
+
+def start(uri=None,
+ tag_prefix='salt/engines/libvirt_events',
+ filters=None):
+ '''
+ Listen to libvirt events and forward them to salt.
+
+ :param uri: libvirt URI to listen on.
+ Defaults to None to pick the first available local hypervisor
+ :param tag_prefix: the begining of the salt event tag to use.
+ Defaults to 'salt/engines/libvirt_events'
+ :param filters: the list of event of listen on. Defaults to 'all'
+ '''
+ if filters is None:
+ filters = ['all']
+ try:
+ libvirt.virEventRegisterDefaultImpl()
+
+ cnx = libvirt.openReadOnly(uri)
+ log.debug('Opened libvirt uri: %s', cnx.getURI())
+
+ callback_ids = {}
+ all_filters = "all" in filters
+
+ for obj, event_defs in CALLBACK_DEFS.items():
+ for event, real_id in event_defs:
+ event_filter = "/".join((obj, event))
+ if event_filter not in filters and obj not in filters and not all_filters:
+ continue
+ registered_id = _register_callback(cnx, tag_prefix,
+ obj, event, real_id)
+ if registered_id:
+ _append_callback_id(callback_ids, obj, registered_id)
+
+ exit_loop = False
+ while not exit_loop:
+ exit_loop = libvirt.virEventRunDefaultImpl() < 0
+
+ except Exception as err: # pylint: disable=broad-except
+ log.exception(err)
+ finally:
+ _callbacks_cleanup(cnx, callback_ids)
+ _cleanup(cnx)
diff --git a/tests/unit/engines/test_libvirt_events.py b/tests/unit/engines/test_libvirt_events.py
new file mode 100644
index 0000000000..6608aaf648
--- /dev/null
+++ b/tests/unit/engines/test_libvirt_events.py
@@ -0,0 +1,159 @@
+# -*- coding: utf-8 -*-
+'''
+unit tests for the libvirt_events engine
+'''
+# Import Python libs
+from __future__ import absolute_import, print_function, unicode_literals
+
+# Import Salt Testing Libs
+from tests.support.mixins import LoaderModuleMockMixin
+from tests.support.unit import skipIf, TestCase
+from tests.support.mock import (
+ NO_MOCK,
+ NO_MOCK_REASON,
+ MagicMock,
+ patch)
+
+# Import Salt Libs
+import salt.engines.libvirt_events as libvirt_events
+
+
+# pylint: disable=protected-access,attribute-defined-outside-init,invalid-name,unused-argument,no-self-use
+
+
+@skipIf(NO_MOCK, NO_MOCK_REASON)
+class EngineLibvirtEventTestCase(TestCase, LoaderModuleMockMixin):
+ '''
+ Test cases for salt.engine.libvirt_events
+ '''
+
+ def setup_loader_modules(self):
+ patcher = patch('salt.engines.libvirt_events.libvirt')
+ self.mock_libvirt = patcher.start()
+ self.mock_libvirt.getVersion.return_value = 2000000
+ self.mock_libvirt.virEventRunDefaultImpl.return_value = -1 # Don't loop for ever
+ self.mock_libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE = 0
+ self.mock_libvirt.VIR_DOMAIN_EVENT_ID_REBOOT = 1
+ self.addCleanup(patcher.stop)
+ self.addCleanup(delattr, self, 'mock_libvirt')
+ return {libvirt_events: {}}
+
+ @patch('salt.engines.libvirt_events.libvirt',
+ VIR_PREFIX_NONE=0,
+ VIR_PREFIX_ONE=1,
+ VIR_PREFIX_TWO=2,
+ VIR_PREFIX_SUB_FOO=0,
+ VIR_PREFIX_SUB_BAR=1,
+ VIR_PREFIX_SUB_FOOBAR=2)
+ def test_get_libvirt_enum_string_subprefix(self, libvirt_mock):
+ '''
+ Make sure the libvirt enum value to string works reliably with
+ elements with a sub prefix, eg VIR_PREFIX_SUB_* in this case.
+ '''
+ # Test case with a sub prefix
+
+ assert libvirt_events._get_libvirt_enum_string('VIR_PREFIX_', 2) == 'two'
+
+ @patch('salt.engines.libvirt_events.libvirt',
+ VIR_PREFIX_FOO=0,
+ VIR_PREFIX_FOO_BAR=1,
+ VIR_PREFIX_BAR_FOO=2)
+ def test_get_libvirt_enum_string_underscores(self, libvirt_mock):
+ '''
+ Make sure the libvirt enum value to string works reliably and items
+ with an underscore aren't confused with sub prefixes.
+ '''
+ assert libvirt_events._get_libvirt_enum_string('VIR_PREFIX_', 1) == 'foo bar'
+
+ @patch('salt.engines.libvirt_events.libvirt',
+ VIR_DOMAIN_EVENT_DEFINED=0,
+ VIR_DOMAIN_EVENT_UNDEFINED=1,
+ VIR_DOMAIN_EVENT_DEFINED_ADDED=0,
+ VIR_DOMAIN_EVENT_DEFINED_UPDATED=1)
+ def test_get_domain_event_detail(self, mock_libvirt):
+ '''
+ Test get_domain_event_detail function
+ '''
+ assert libvirt_events._get_domain_event_detail(1, 2) == ('undefined', 'unknown')
+ assert libvirt_events._get_domain_event_detail(0, 1) == ('defined', 'updated')
+ assert libvirt_events._get_domain_event_detail(4, 2) == ('unknown', 'unknown')
+
+ @patch('salt.engines.libvirt_events.libvirt', VIR_NETWORK_EVENT_ID_LIFECYCLE=1000)
+ def test_event_register(self, mock_libvirt):
+ '''
+ Test that the libvirt_events engine actually registers events catch them and cleans
+ before leaving the place.
+ '''
+ mock_cnx = MagicMock()
+ mock_libvirt.openReadOnly.return_value = mock_cnx
+
+ mock_cnx.networkEventRegisterAny.return_value = 10000
+
+ libvirt_events.start('test:///', 'test/prefix')
+
+ # Check that the connection has been opened
+ mock_libvirt.openReadOnly.assert_called_once_with('test:///')
+
+ # Check that the connection has been closed
+ mock_cnx.close.assert_called_once()
+
+ # Check events registration and deregistration
+ mock_cnx.domainEventRegisterAny.assert_any_call(
+ None, mock_libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE,
+ libvirt_events._domain_event_lifecycle_cb,
+ {'prefix': 'test/prefix', 'object': 'domain', 'event': 'lifecycle'})
+ mock_cnx.networkEventRegisterAny.assert_any_call(
+ None, mock_libvirt.VIR_NETWORK_EVENT_ID_LIFECYCLE,
+ libvirt_events._network_event_lifecycle_cb,
+ {'prefix': 'test/prefix', 'object': 'network', 'event': 'lifecycle'})
+
+ # Check that the deregister events are called with the result of register
+ mock_cnx.networkEventDeregisterAny.assert_called_with(
+ mock_cnx.networkEventRegisterAny.return_value)
+
+ # Check that the default 'all' filter actually worked
+ counts = {obj: len(callback_def) for obj, callback_def in libvirt_events.CALLBACK_DEFS.items()}
+ for obj, count in counts.items():
+ register = libvirt_events.REGISTER_FUNCTIONS[obj]
+ assert getattr(mock_cnx, register).call_count == count
+
+ def test_event_skipped(self):
+ '''
+ Test that events are skipped if their ID isn't defined in the libvirt
+ module (older libvirt)
+ '''
+ self.mock_libvirt.mock_add_spec([
+ 'openReadOnly',
+ 'virEventRegisterDefaultImpl',
+ 'virEventRunDefaultImpl',
+ 'VIR_DOMAIN_EVENT_ID_LIFECYCLE'], spec_set=True)
+
+ libvirt_events.start('test:///', 'test/prefix')
+
+ # Check events registration and deregistration
+ mock_cnx = self.mock_libvirt.openReadOnly.return_value
+
+ mock_cnx.domainEventRegisterAny.assert_any_call(
+ None, self.mock_libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE,
+ libvirt_events._domain_event_lifecycle_cb,
+ {'prefix': 'test/prefix', 'object': 'domain', 'event': 'lifecycle'})
+
+ # Network events should have been skipped
+ mock_cnx.networkEventRegisterAny.assert_not_called()
+
+ def test_event_filtered(self):
+ '''
+ Test that events are skipped if their ID isn't defined in the libvirt
+ module (older libvirt)
+ '''
+ libvirt_events.start('test', 'test/prefix', 'domain/lifecycle')
+
+ # Check events registration and deregistration
+ mock_cnx = self.mock_libvirt.openReadOnly.return_value
+
+ mock_cnx.domainEventRegisterAny.assert_any_call(
+ None, 0, libvirt_events._domain_event_lifecycle_cb,
+ {'prefix': 'test/prefix', 'object': 'domain', 'event': 'lifecycle'})
+
+ # Network events should have been filtered out
+ mock_cnx.networkEventRegisterAny.assert_not_called()
--
2.17.1

View File

@ -1,28 +1,18 @@
From 39d9d9fb26f9aff83fce4ce67d5b2a6bd4f60b95 Mon Sep 17 00:00:00 2001
From d9d459f62d53acddd67313d9d66e1fe8caf4fd45 Mon Sep 17 00:00:00 2001
From: Marcelo Chiaradia <mchiaradia@suse.com>
Date: Thu, 7 Jun 2018 10:29:41 +0200
Subject: [PATCH] Add environment variable to know if yum is invoked from
Salt(bsc#1057635)
---
salt/modules/yumpkg.py | 59 +++++++++++++++++++++++++++++++++-----------------
1 file changed, 39 insertions(+), 20 deletions(-)
salt/modules/yumpkg.py | 18 ++++++++++++------
1 file changed, 12 insertions(+), 6 deletions(-)
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index 9ce4926790..51832bf883 100644
index c250b94f0e..a56a2e8366 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -452,7 +452,8 @@ def latest_version(*names, **kwargs):
out = __salt__['cmd.run_all'](cmd,
output_loglevel='trace',
ignore_retcode=True,
- python_shell=False)
+ python_shell=False,
+ env={"SALT_RUNNING": '1'})
if out['retcode'] != 0:
if out['stderr']:
# Check first if this is just a matter of the packages being
@@ -850,7 +851,8 @@ def list_repo_pkgs(*args, **kwargs):
@@ -887,7 +887,8 @@ def list_repo_pkgs(*args, **kwargs):
yum_version = None if _yum() != 'yum' else _LooseVersion(
__salt__['cmd.run'](
['yum', '--version'],
@ -32,133 +22,7 @@ index 9ce4926790..51832bf883 100644
).splitlines()[0].strip()
)
# Really old version of yum; does not even have --showduplicates option
@@ -865,7 +867,8 @@ def list_repo_pkgs(*args, **kwargs):
cmd_prefix + [pkg_src],
output_loglevel='trace',
ignore_retcode=True,
- python_shell=False
+ python_shell=False,
+ env={"SALT_RUNNING": '1'}
)
if out['retcode'] == 0:
_parse_output(out['stdout'], strict=True)
@@ -882,7 +885,8 @@ def list_repo_pkgs(*args, **kwargs):
cmd_prefix + [pkg_src],
output_loglevel='trace',
ignore_retcode=True,
- python_shell=False
+ python_shell=False,
+ env={"SALT_RUNNING": '1'}
)
if out['retcode'] == 0:
_parse_output(out['stdout'], strict=True)
@@ -898,7 +902,8 @@ def list_repo_pkgs(*args, **kwargs):
out = __salt__['cmd.run_all'](cmd,
output_loglevel='trace',
ignore_retcode=True,
- python_shell=False)
+ python_shell=False,
+ env={"SALT_RUNNING": '1'})
if out['retcode'] != 0 and 'Error:' in out['stdout']:
continue
_parse_output(out['stdout'])
@@ -955,7 +960,8 @@ def list_upgrades(refresh=True, **kwargs):
out = __salt__['cmd.run_all'](cmd,
output_loglevel='trace',
ignore_retcode=True,
- python_shell=False)
+ python_shell=False,
+ env={"SALT_RUNNING": '1'})
if out['retcode'] != 0 and 'Error:' in out:
return {}
@@ -1090,12 +1096,13 @@ def refresh_db(**kwargs):
clean_cmd.extend(options)
update_cmd.extend(options)
- __salt__['cmd.run'](clean_cmd, python_shell=False)
+ __salt__['cmd.run'](clean_cmd, python_shell=False, env={"SALT_RUNNING": '1'})
if check_update_:
result = __salt__['cmd.retcode'](update_cmd,
output_loglevel='trace',
ignore_retcode=True,
- python_shell=False)
+ python_shell=False,
+ env={"SALT_RUNNING": '1'})
return retcodes.get(result, False)
return True
@@ -1634,7 +1641,8 @@ def install(name=None,
cmd,
output_loglevel='trace',
python_shell=False,
- redirect_stderr=True
+ redirect_stderr=True,
+ env={"SALT_RUNNING": '1'}
)
if out['retcode'] != 0:
errors.append(out['stdout'])
@@ -1654,7 +1662,8 @@ def install(name=None,
cmd,
output_loglevel='trace',
python_shell=False,
- redirect_stderr=True
+ redirect_stderr=True,
+ env={"SALT_RUNNING": '1'}
)
if out['retcode'] != 0:
errors.append(out['stdout'])
@@ -1674,7 +1683,8 @@ def install(name=None,
cmd,
output_loglevel='trace',
python_shell=False,
- redirect_stderr=True
+ redirect_stderr=True,
+ env={"SALT_RUNNING": '1'}
)
if out['retcode'] != 0:
errors.append(out['stdout'])
@@ -1866,7 +1876,8 @@ def upgrade(name=None,
result = __salt__['cmd.run_all'](cmd,
output_loglevel='trace',
- python_shell=False)
+ python_shell=False,
+ env={"SALT_RUNNING": '1'})
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
ret = salt.utils.data.compare_dicts(old, new)
@@ -1957,7 +1968,8 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613
out = __salt__['cmd.run_all'](
[_yum(), '-y', 'remove'] + targets,
output_loglevel='trace',
- python_shell=False
+ python_shell=False,
+ env={"SALT_RUNNING": '1'}
)
if out['retcode'] != 0 and out['stderr']:
@@ -2094,7 +2106,8 @@ def hold(name=None, pkgs=None, sources=None, normalize=True, **kwargs): # pylin
else:
out = __salt__['cmd.run_all'](
[_yum(), 'versionlock', target],
- python_shell=False
+ python_shell=False,
+ env={"SALT_RUNNING": '1'}
)
if out['retcode'] == 0:
@@ -2203,7 +2216,8 @@ def unhold(name=None, pkgs=None, sources=None, **kwargs): # pylint: disable=W06
else:
out = __salt__['cmd.run_all'](
[_yum(), 'versionlock', 'delete'] + search_locks,
- python_shell=False
+ python_shell=False,
+ env={"SALT_RUNNING": '1'}
)
if out['retcode'] == 0:
@@ -2254,7 +2268,8 @@ def list_holds(pattern=__HOLD_PATTERN, full=True):
@@ -2298,7 +2299,8 @@ def list_holds(pattern=__HOLD_PATTERN, full=True):
_check_versionlock()
out = __salt__['cmd.run']([_yum(), 'versionlock', 'list'],
@ -168,7 +32,7 @@ index 9ce4926790..51832bf883 100644
ret = []
for line in salt.utils.itertools.split(out, '\n'):
match = _get_hold(line, pattern=pattern, full=full)
@@ -2319,7 +2334,8 @@ def group_list():
@@ -2364,7 +2366,8 @@ def group_list():
out = __salt__['cmd.run_stdout'](
[_yum(), 'grouplist', 'hidden'],
output_loglevel='trace',
@ -178,7 +42,7 @@ index 9ce4926790..51832bf883 100644
)
key = None
for line in salt.utils.itertools.split(out, '\n'):
@@ -2386,7 +2402,8 @@ def group_info(name, expand=False):
@@ -2431,7 +2434,8 @@ def group_info(name, expand=False):
out = __salt__['cmd.run_stdout'](
cmd,
output_loglevel='trace',
@ -188,7 +52,7 @@ index 9ce4926790..51832bf883 100644
)
g_info = {}
@@ -3055,7 +3072,8 @@ def download(*packages):
@@ -3100,7 +3104,8 @@ def download(*packages):
__salt__['cmd.run'](
cmd,
output_loglevel='trace',
@ -198,7 +62,7 @@ index 9ce4926790..51832bf883 100644
)
ret = {}
for dld_result in os.listdir(CACHE_DIR):
@@ -3130,7 +3148,8 @@ def _get_patches(installed_only=False):
@@ -3175,7 +3180,8 @@ def _get_patches(installed_only=False):
cmd = [_yum(), '--quiet', 'updateinfo', 'list', 'all']
ret = __salt__['cmd.run_stdout'](
cmd,
@ -209,6 +73,6 @@ index 9ce4926790..51832bf883 100644
for line in salt.utils.itertools.split(ret, os.linesep):
inst, advisory_id, sev, pkg = re.match(r'([i|\s]) ([^\s]+) +([^\s]+) +([^\s]+)',
--
2.13.7
2.17.1

View File

@ -1,4 +1,4 @@
From ba5171ce35b733a1f7997b4ea038998802b67298 Mon Sep 17 00:00:00 2001
From 4219d3d69799bc20f88eed0a02ef15c932e6782e Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 6 Dec 2018 16:26:23 +0100
Subject: [PATCH] Add hold/unhold functions
@ -7,22 +7,22 @@ Add unhold function
Add warnings
---
salt/modules/zypper.py | 88 +++++++++++++++++++++++++++++++++++++++++-
salt/modules/zypperpkg.py | 88 ++++++++++++++++++++++++++++++++++++++-
1 file changed, 87 insertions(+), 1 deletion(-)
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 6845e44ab6..773354b2f3 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 001b852fc4..0c26e2214c 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -41,6 +41,7 @@ import salt.utils.pkg
import salt.utils.pkg.rpm
import salt.utils.stringutils
import salt.utils.systemd
+import salt.utils.versions
from salt.utils.versions import LooseVersion
import salt.utils.environment
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError
@@ -1738,7 +1739,7 @@ def clean_locks():
@@ -1742,7 +1743,7 @@ def clean_locks():
return out
@ -31,7 +31,7 @@ index 6845e44ab6..773354b2f3 100644
'''
Remove specified package lock.
@@ -1750,7 +1751,47 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
@@ -1754,7 +1755,47 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
salt '*' pkg.remove_lock <package1>,<package2>,<package3>
salt '*' pkg.remove_lock pkgs='["foo", "bar"]'
'''
@ -79,7 +79,7 @@ index 6845e44ab6..773354b2f3 100644
locks = list_locks()
try:
packages = list(__salt__['pkg_resource.parse_targets'](packages)[0].keys())
@@ -1771,6 +1812,50 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
@@ -1775,6 +1816,50 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
return {'removed': len(removed), 'not_found': missing}
@ -130,7 +130,7 @@ index 6845e44ab6..773354b2f3 100644
def add_lock(packages, **kwargs): # pylint: disable=unused-argument
'''
Add a package lock. Specify packages to lock by exact name.
@@ -1783,6 +1868,7 @@ def add_lock(packages, **kwargs): # pylint: disable=unused-argument
@@ -1787,6 +1872,7 @@ def add_lock(packages, **kwargs): # pylint: disable=unused-argument
salt '*' pkg.add_lock <package1>,<package2>,<package3>
salt '*' pkg.add_lock pkgs='["foo", "bar"]'
'''

View File

@ -1,294 +0,0 @@
From cc8d6eaddf59973a94512779853558789b56ca3e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 25 Apr 2018 12:55:36 +0100
Subject: [PATCH] Add 'other' attribute to GECOS fields to avoid
inconsistencies with chfn
Fix unsupported chars checking on GECOS fields
Add unit test for new method 'user.chother'
Do make comparisons in a single line
Add 'other' as valid kwargs for 'user.add' method
---
salt/modules/useradd.py | 41 ++++++++++++++++++++++++++++----------
salt/states/user.py | 28 ++++++++++++++++++--------
tests/unit/modules/test_useradd.py | 36 +++++++++++++++++++++++++++++++--
3 files changed, 84 insertions(+), 21 deletions(-)
diff --git a/salt/modules/useradd.py b/salt/modules/useradd.py
index a61ba0e960..fc3c82a8bc 100644
--- a/salt/modules/useradd.py
+++ b/salt/modules/useradd.py
@@ -60,17 +60,18 @@ def _get_gecos(name):
Retrieve GECOS field info and return it in dictionary form
'''
gecos_field = salt.utils.stringutils.to_unicode(
- pwd.getpwnam(_quote_username(name)).pw_gecos).split(',', 3)
+ pwd.getpwnam(_quote_username(name)).pw_gecos).split(',', 4)
if not gecos_field:
return {}
else:
# Assign empty strings for any unspecified trailing GECOS fields
- while len(gecos_field) < 4:
+ while len(gecos_field) < 5:
gecos_field.append('')
return {'fullname': salt.utils.locales.sdecode(gecos_field[0]),
'roomnumber': salt.utils.locales.sdecode(gecos_field[1]),
'workphone': salt.utils.locales.sdecode(gecos_field[2]),
- 'homephone': salt.utils.locales.sdecode(gecos_field[3])}
+ 'homephone': salt.utils.locales.sdecode(gecos_field[3]),
+ 'other': salt.utils.locales.sdecode(gecos_field[4])}
def _build_gecos(gecos_dict):
@@ -78,10 +79,11 @@ def _build_gecos(gecos_dict):
Accepts a dictionary entry containing GECOS field names and their values,
and returns a full GECOS comment string, to be used with usermod.
'''
- return '{0},{1},{2},{3}'.format(gecos_dict.get('fullname', ''),
- gecos_dict.get('roomnumber', ''),
- gecos_dict.get('workphone', ''),
- gecos_dict.get('homephone', '')).rstrip(',')
+ return '{0},{1},{2},{3},{4}'.format(gecos_dict.get('fullname', ''),
+ gecos_dict.get('roomnumber', ''),
+ gecos_dict.get('workphone', ''),
+ gecos_dict.get('homephone', ''),
+ gecos_dict.get('other', ''),).rstrip(',')
def _update_gecos(name, key, value, root=None):
@@ -124,6 +126,7 @@ def add(name,
roomnumber='',
workphone='',
homephone='',
+ other='',
createhome=True,
loginclass=None,
root=None,
@@ -237,6 +240,8 @@ def add(name,
chworkphone(name, workphone)
if homephone:
chhomephone(name, homephone)
+ if other:
+ chother(name, other)
return True
@@ -507,6 +512,19 @@ def chhomephone(name, homephone):
return _update_gecos(name, 'homephone', homephone)
+def chother(name, other):
+ '''
+ Change the user's other GECOS attribute
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' user.chother foobar
+ '''
+ return _update_gecos(name, 'other', other)
+
+
def chloginclass(name, loginclass, root=None):
'''
Change the default login class of the user
@@ -588,9 +606,9 @@ def _format_info(data):
Return user information in a pretty way
'''
# Put GECOS info into a list
- gecos_field = salt.utils.stringutils.to_unicode(data.pw_gecos).split(',', 3)
- # Make sure our list has at least four elements
- while len(gecos_field) < 4:
+ gecos_field = salt.utils.stringutils.to_unicode(data.pw_gecos).split(',', 4)
+ # Make sure our list has at least five elements
+ while len(gecos_field) < 5:
gecos_field.append('')
return {'gid': data.pw_gid,
@@ -603,7 +621,8 @@ def _format_info(data):
'fullname': gecos_field[0],
'roomnumber': gecos_field[1],
'workphone': gecos_field[2],
- 'homephone': gecos_field[3]}
+ 'homephone': gecos_field[3],
+ 'other': gecos_field[4]}
@salt.utils.decorators.path.which('id')
diff --git a/salt/states/user.py b/salt/states/user.py
index f4ae81dd31..34f5a9d541 100644
--- a/salt/states/user.py
+++ b/salt/states/user.py
@@ -68,6 +68,7 @@ def _changes(name,
roomnumber='',
workphone='',
homephone='',
+ other='',
loginclass=None,
date=None,
mindays=0,
@@ -170,24 +171,26 @@ def _changes(name,
# MacOS doesn't have full GECOS support, so check for the "ch" functions
# and ignore these parameters if these functions do not exist.
- if 'user.chroomnumber' in __salt__ \
- and roomnumber is not None:
+ if 'user.chroomnumber' in __salt__ and roomnumber is not None:
roomnumber = sdecode_if_string(roomnumber)
lusr['roomnumber'] = sdecode_if_string(lusr['roomnumber'])
if lusr['roomnumber'] != roomnumber:
change['roomnumber'] = roomnumber
- if 'user.chworkphone' in __salt__ \
- and workphone is not None:
+ if 'user.chworkphone' in __salt__ and workphone is not None:
workphone = sdecode_if_string(workphone)
lusr['workphone'] = sdecode_if_string(lusr['workphone'])
if lusr['workphone'] != workphone:
change['workphone'] = workphone
- if 'user.chhomephone' in __salt__ \
- and homephone is not None:
+ if 'user.chhomephone' in __salt__ and homephone is not None:
homephone = sdecode_if_string(homephone)
lusr['homephone'] = sdecode_if_string(lusr['homephone'])
if lusr['homephone'] != homephone:
change['homephone'] = homephone
+ if 'user.chother' in __salt__ and other is not None:
+ other = sdecode_if_string(other)
+ lusr['other'] = sdecode_if_string(lusr['other'])
+ if lusr['other'] != other:
+ change['other'] = other
# OpenBSD/FreeBSD login class
if __grains__['kernel'] in ('OpenBSD', 'FreeBSD'):
if loginclass:
@@ -236,6 +239,7 @@ def present(name,
roomnumber=None,
workphone=None,
homephone=None,
+ other=None,
loginclass=None,
date=None,
mindays=None,
@@ -377,7 +381,10 @@ def present(name,
homephone
The user's home phone number (not supported in MacOS)
- If GECOS field contains more than 3 commas, this field will have the rest of 'em
+
+ other
+ The user's other attribute (not supported in MacOS)
+ If GECOS field contains more than 4 commas, this field will have the rest of 'em
.. versionchanged:: 2014.7.0
Shadow attribute support added.
@@ -448,6 +455,8 @@ def present(name,
workphone = sdecode(workphone)
if homephone is not None:
homephone = sdecode(homephone)
+ if other is not None:
+ other = sdecode(other)
# createhome not supported on Windows or Mac
if __grains__['kernel'] in ('Darwin', 'Windows'):
@@ -460,7 +469,7 @@ def present(name,
# the comma is used to separate field in GECOS, thus resulting into
# salt adding the end of fullname each time this function is called
- for gecos_field in ['fullname', 'roomnumber', 'workphone']:
+ for gecos_field in [fullname, roomnumber, workphone]:
if isinstance(gecos_field, string_types) and ',' in gecos_field:
ret['comment'] = "Unsupported char ',' in {0}".format(gecos_field)
ret['result'] = False
@@ -519,6 +528,7 @@ def present(name,
roomnumber,
workphone,
homephone,
+ other,
loginclass,
date,
mindays,
@@ -654,6 +664,7 @@ def present(name,
roomnumber,
workphone,
homephone,
+ other,
loginclass,
date,
mindays,
@@ -705,6 +716,7 @@ def present(name,
'roomnumber': roomnumber,
'workphone': workphone,
'homephone': homephone,
+ 'other': other,
'createhome': createhome,
'nologinit': nologinit,
'loginclass': loginclass}
diff --git a/tests/unit/modules/test_useradd.py b/tests/unit/modules/test_useradd.py
index fa30a0df71..e79c78c663 100644
--- a/tests/unit/modules/test_useradd.py
+++ b/tests/unit/modules/test_useradd.py
@@ -46,7 +46,8 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin):
'fullname': 'root',
'roomnumber': '',
'workphone': '',
- 'homephone': ''}
+ 'homephone': '',
+ 'other': ''}
@classmethod
def tearDownClass(cls):
@@ -96,7 +97,8 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin):
'fullname': 'root',
'roomnumber': '',
'workphone': '',
- 'homephone': ''}]
+ 'homephone': '',
+ 'other': ''}]
with patch('salt.modules.useradd._format_info', MagicMock(return_value=self.mock_pwall)):
self.assertEqual(useradd.getent(), ret)
@@ -330,6 +332,36 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin):
with patch.object(useradd, 'info', mock):
self.assertFalse(useradd.chhomephone('salt', 1))
+ # 'chother' function tests: 1
+
+ def test_chother(self):
+ '''
+ Test if the user's other GECOS attribute is changed
+ '''
+ mock = MagicMock(return_value=False)
+ with patch.object(useradd, '_get_gecos', mock):
+ self.assertFalse(useradd.chother('salt', 1))
+
+ mock = MagicMock(return_value={'other': 'foobar'})
+ with patch.object(useradd, '_get_gecos', mock):
+ self.assertTrue(useradd.chother('salt', 'foobar'))
+
+ mock = MagicMock(return_value={'other': 'foobar2'})
+ with patch.object(useradd, '_get_gecos', mock):
+ mock = MagicMock(return_value=None)
+ with patch.dict(useradd.__salt__, {'cmd.run': mock}):
+ mock = MagicMock(return_value={'other': 'foobar3'})
+ with patch.object(useradd, 'info', mock):
+ self.assertFalse(useradd.chother('salt', 'foobar'))
+
+ mock = MagicMock(return_value={'other': 'foobar3'})
+ with patch.object(useradd, '_get_gecos', mock):
+ mock = MagicMock(return_value=None)
+ with patch.dict(useradd.__salt__, {'cmd.run': mock}):
+ mock = MagicMock(return_value={'other': 'foobar3'})
+ with patch.object(useradd, 'info', mock):
+ self.assertFalse(useradd.chother('salt', 'foobar'))
+
# 'info' function tests: 1
@skipIf(HAS_PWD is False, 'The pwd module is not available')
--
2.13.7

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,214 @@
From 0fd1e40e7149dd1a33f9a4497fa4e31c78ddfba7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Bosdonnat?= <cbosdonnat@suse.com>
Date: Thu, 18 Oct 2018 13:32:59 +0200
Subject: [PATCH] Add virt.all_capabilities
In order to get all possible capabilities from a host, the user has to
call virt.capabilities, and then loop over the guests and domains
before calling virt.domain_capabilities for each of them.
This commit embeds all this logic to get them all in a single
virt.all_capabilities call.
---
salt/modules/virt.py | 107 +++++++++++++++++++++++---------
tests/unit/modules/test_virt.py | 56 +++++++++++++++++
2 files changed, 134 insertions(+), 29 deletions(-)
diff --git a/salt/modules/virt.py b/salt/modules/virt.py
index b45c5f522d..0921122a8a 100644
--- a/salt/modules/virt.py
+++ b/salt/modules/virt.py
@@ -4094,37 +4094,10 @@ def _parse_caps_loader(node):
return result
-def domain_capabilities(emulator=None, arch=None, machine=None, domain=None, **kwargs):
+def _parse_domain_caps(caps):
'''
- Return the domain capabilities given an emulator, architecture, machine or virtualization type.
-
- .. versionadded:: 2019.2.0
-
- :param emulator: return the capabilities for the given emulator binary
- :param arch: return the capabilities for the given CPU architecture
- :param machine: return the capabilities for the given emulated machine type
- :param domain: return the capabilities for the given virtualization type.
- :param connection: libvirt connection URI, overriding defaults
- :param username: username to connect with, overriding defaults
- :param password: password to connect with, overriding defaults
-
- The list of the possible emulator, arch, machine and domain can be found in
- the host capabilities output.
-
- If none of the parameters is provided the libvirt default domain capabilities
- will be returned.
-
- CLI Example:
-
- .. code-block:: bash
-
- salt '*' virt.domain_capabilities arch='x86_64' domain='kvm'
-
+ Parse the XML document of domain capabilities into a structure.
'''
- conn = __get_conn(**kwargs)
- caps = ElementTree.fromstring(conn.getDomainCapabilities(emulator, arch, machine, domain, 0))
- conn.close()
-
result = {
'emulator': caps.find('path').text if caps.find('path') is not None else None,
'domain': caps.find('domain').text if caps.find('domain') is not None else None,
@@ -4164,6 +4137,82 @@ def domain_capabilities(emulator=None, arch=None, machine=None, domain=None, **k
return result
+def domain_capabilities(emulator=None, arch=None, machine=None, domain=None, **kwargs):
+ '''
+ Return the domain capabilities given an emulator, architecture, machine or virtualization type.
+
+ .. versionadded:: Fluorine
+
+ :param emulator: return the capabilities for the given emulator binary
+ :param arch: return the capabilities for the given CPU architecture
+ :param machine: return the capabilities for the given emulated machine type
+ :param domain: return the capabilities for the given virtualization type.
+ :param connection: libvirt connection URI, overriding defaults
+ :param username: username to connect with, overriding defaults
+ :param password: password to connect with, overriding defaults
+
+ The list of the possible emulator, arch, machine and domain can be found in
+ the host capabilities output.
+
+ If none of the parameters is provided, the libvirt default one is returned.
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' virt.domain_capabilities arch='x86_64' domain='kvm'
+
+ '''
+ conn = __get_conn(**kwargs)
+ result = []
+ try:
+ caps = ElementTree.fromstring(conn.getDomainCapabilities(emulator, arch, machine, domain, 0))
+ result = _parse_domain_caps(caps)
+ finally:
+ conn.close()
+
+ return result
+
+
+def all_capabilities(**kwargs):
+ '''
+ Return the host and domain capabilities in a single call.
+
+ .. versionadded:: Neon
+
+ :param connection: libvirt connection URI, overriding defaults
+ :param username: username to connect with, overriding defaults
+ :param password: password to connect with, overriding defaults
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' virt.all_capabilities
+
+ '''
+ conn = __get_conn(**kwargs)
+ result = {}
+ try:
+ host_caps = ElementTree.fromstring(conn.getCapabilities())
+ domains = [[(guest.get('arch', {}).get('name', None), key)
+ for key in guest.get('arch', {}).get('domains', {}).keys()]
+ for guest in [_parse_caps_guest(guest) for guest in host_caps.findall('guest')]]
+ flattened = [pair for item in (x for x in domains) for pair in item]
+ result = {
+ 'host': {
+ 'host': _parse_caps_host(host_caps.find('host')),
+ 'guests': [_parse_caps_guest(guest) for guest in host_caps.findall('guest')]
+ },
+ 'domains': [_parse_domain_caps(ElementTree.fromstring(
+ conn.getDomainCapabilities(None, arch, None, domain)))
+ for (arch, domain) in flattened]}
+ finally:
+ conn.close()
+
+ return result
+
+
def cpu_baseline(full=False, migratable=False, out='libvirt', **kwargs):
'''
Return the optimal 'custom' CPU baseline config for VM's on this minion
diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py
index 3a69adece1..bd34962a6a 100644
--- a/tests/unit/modules/test_virt.py
+++ b/tests/unit/modules/test_virt.py
@@ -2204,6 +2204,62 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(expected, caps)
+ def test_all_capabilities(self):
+ '''
+ Test the virt.domain_capabilities default output
+ '''
+ domainXml = '''
+<domainCapabilities>
+ <path>/usr/bin/qemu-system-x86_64</path>
+ <domain>kvm</domain>
+ <machine>virt-2.12</machine>
+ <arch>x86_64</arch>
+ <vcpu max='255'/>
+ <iothreads supported='yes'/>
+</domainCapabilities>
+ '''
+ hostXml = '''
+<capabilities>
+ <host>
+ <uuid>44454c4c-3400-105a-8033-b3c04f4b344a</uuid>
+ <cpu>
+ <arch>x86_64</arch>
+ <model>Nehalem</model>
+ <vendor>Intel</vendor>
+ <microcode version='25'/>
+ <topology sockets='1' cores='4' threads='2'/>
+ </cpu>
+ </host>
+ <guest>
+ <os_type>hvm</os_type>
+ <arch name='x86_64'>
+ <wordsize>64</wordsize>
+ <emulator>/usr/bin/qemu-system-x86_64</emulator>
+ <machine maxCpus='255'>pc-i440fx-2.6</machine>
+ <machine canonical='pc-i440fx-2.6' maxCpus='255'>pc</machine>
+ <machine maxCpus='255'>pc-0.12</machine>
+ <domain type='qemu'/>
+ <domain type='kvm'>
+ <emulator>/usr/bin/qemu-kvm</emulator>
+ <machine maxCpus='255'>pc-i440fx-2.6</machine>
+ <machine canonical='pc-i440fx-2.6' maxCpus='255'>pc</machine>
+ <machine maxCpus='255'>pc-0.12</machine>
+ </domain>
+ </arch>
+ </guest>
+</capabilities>
+ '''
+
+ # pylint: disable=no-member
+ self.mock_conn.getCapabilities.return_value = hostXml
+ self.mock_conn.getDomainCapabilities.side_effect = [
+ domainXml, domainXml.replace('<domain>kvm', '<domain>qemu')]
+ # pylint: enable=no-member
+
+ caps = virt.all_capabilities()
+ self.assertEqual('44454c4c-3400-105a-8033-b3c04f4b344a', caps['host']['host']['uuid'])
+ self.assertEqual(set(['qemu', 'kvm']), set([domainCaps['domain'] for domainCaps in caps['domains']]))
+
def test_network_tag(self):
'''
Test virt._get_net_xml() with VLAN tag
--
2.20.1

View File

@ -0,0 +1,334 @@
From 2536ee56bd0060c024994f97388f9975ccbe1ee1 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Bosdonnat?= <cbosdonnat@suse.com>
Date: Fri, 15 Feb 2019 17:28:00 +0100
Subject: [PATCH] Add virt.volume_infos() and virt.volume_delete()
Expose more functions to handle libvirt storage volumes.
virt.volume_infos() expose informations of the volumes, either for one or
all the volumes. Among the provided data, this function exposes the
names of the virtual machines using the volumes of file type.
virt.volume_delete() allows removing a given volume.
---
salt/modules/virt.py | 113 ++++++++++++++++++++
tests/unit/modules/test_virt.py | 184 ++++++++++++++++++++++++++++++++
2 files changed, 297 insertions(+)
diff --git a/salt/modules/virt.py b/salt/modules/virt.py
index 0921122a8a..4a301f289c 100644
--- a/salt/modules/virt.py
+++ b/salt/modules/virt.py
@@ -4988,3 +4988,116 @@ def pool_list_volumes(name, **kwargs):
return pool.listVolumes()
finally:
conn.close()
+
+
+def _get_storage_vol(conn, pool, vol):
+ '''
+ Helper function getting a storage volume. Will throw a libvirtError
+ if the pool or the volume couldn't be found.
+ '''
+ pool_obj = conn.storagePoolLookupByName(pool)
+ return pool_obj.storageVolLookupByName(vol)
+
+
+def _get_all_volumes_paths(conn):
+ '''
+ Extract the path and backing stores path of all volumes.
+
+ :param conn: libvirt connection to use
+ '''
+ volumes = [vol for l in [obj.listAllVolumes() for obj in conn.listAllStoragePools()] for vol in l]
+ return {vol.path(): [path.text for path in ElementTree.fromstring(vol.XMLDesc()).findall('.//backingStore/path')]
+ for vol in volumes}
+
+
+def volume_infos(pool, volume, **kwargs):
+ '''
+ Provide details on a storage volume. If no volume name is provided, the infos
+ all the volumes contained in the pool are provided. If no pool is provided,
+ the infos of the volumes of all pools are output.
+
+ :param pool: libvirt storage pool name
+ :param volume: name of the volume to get infos from
+ :param connection: libvirt connection URI, overriding defaults
+ :param username: username to connect with, overriding defaults
+ :param password: password to connect with, overriding defaults
+
+ .. versionadded:: Neon
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt "*" virt.volume_infos <pool> <volume>
+ '''
+ result = {}
+ conn = __get_conn(**kwargs)
+ try:
+ backing_stores = _get_all_volumes_paths(conn)
+ disks = {domain.name():
+ {node.get('file') for node
+ in ElementTree.fromstring(domain.XMLDesc(0)).findall('.//disk/source/[@file]')}
+ for domain in _get_domain(conn)}
+
+ def _volume_extract_infos(vol):
+ '''
+ Format the volume info dictionary
+
+ :param vol: the libvirt storage volume object.
+ '''
+ types = ['file', 'block', 'dir', 'network', 'netdir', 'ploop']
+ infos = vol.info()
+
+ # If we have a path, check its use.
+ used_by = []
+ if vol.path():
+ as_backing_store = {path for (path, all_paths) in backing_stores.items() if vol.path() in all_paths}
+ used_by = [vm_name for (vm_name, vm_disks) in disks.items()
+ if vm_disks & as_backing_store or vol.path() in vm_disks]
+
+ return {
+ 'type': types[infos[0]] if infos[0] < len(types) else 'unknown',
+ 'key': vol.key(),
+ 'path': vol.path(),
+ 'capacity': infos[1],
+ 'allocation': infos[2],
+ 'used_by': used_by,
+ }
+
+ pools = [obj for obj in conn.listAllStoragePools() if pool is None or obj.name() == pool]
+ vols = {pool_obj.name(): {vol.name(): _volume_extract_infos(vol)
+ for vol in pool_obj.listAllVolumes()
+ if volume is None or vol.name() == volume}
+ for pool_obj in pools}
+ return {pool_name: volumes for (pool_name, volumes) in vols.items() if volumes}
+ except libvirt.libvirtError as err:
+ log.debug('Silenced libvirt error: %s', str(err))
+ finally:
+ conn.close()
+ return result
+
+
+def volume_delete(pool, volume, **kwargs):
+ '''
+ Delete a libvirt managed volume.
+
+ :param pool: libvirt storage pool name
+ :param volume: name of the volume to delete
+ :param connection: libvirt connection URI, overriding defaults
+ :param username: username to connect with, overriding defaults
+ :param password: password to connect with, overriding defaults
+
+ .. versionadded:: Neon
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt "*" virt.volume_delete <pool> <volume>
+ '''
+ conn = __get_conn(**kwargs)
+ try:
+ vol = _get_storage_vol(conn, pool, volume)
+ return not bool(vol.delete())
+ finally:
+ conn.close()
diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py
index bd34962a6a..55005f1d04 100644
--- a/tests/unit/modules/test_virt.py
+++ b/tests/unit/modules/test_virt.py
@@ -2698,3 +2698,187 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
self.mock_conn.storagePoolLookupByName.return_value = mock_pool
# pylint: enable=no-member
self.assertEqual(names, virt.pool_list_volumes('default'))
+
+ def test_volume_infos(self):
+ '''
+ Test virt.volume_infos
+ '''
+ vms_disks = [
+ '''
+ <disk type='file' device='disk'>
+ <driver name='qemu' type='qcow2'/>
+ <source file='/path/to/vol0.qcow2'/>
+ <target dev='vda' bus='virtio'/>
+ </disk>
+ ''',
+ '''
+ <disk type='file' device='disk'>
+ <driver name='qemu' type='qcow2'/>
+ <source file='/path/to/vol3.qcow2'/>
+ <target dev='vda' bus='virtio'/>
+ </disk>
+ ''',
+ '''
+ <disk type='file' device='disk'>
+ <driver name='qemu' type='qcow2'/>
+ <source file='/path/to/vol2.qcow2'/>
+ <target dev='vda' bus='virtio'/>
+ </disk>
+ '''
+ ]
+ mock_vms = []
+ for idx, disk in enumerate(vms_disks):
+ vm = MagicMock()
+ # pylint: disable=no-member
+ vm.name.return_value = 'vm{0}'.format(idx)
+ vm.XMLDesc.return_value = '''
+ <domain type='kvm' id='1'>
+ <name>vm{0}</name>
+ <devices>{1}</devices>
+ </domain>
+ '''.format(idx, disk)
+ # pylint: enable=no-member
+ mock_vms.append(vm)
+
+ mock_pool_data = [
+ {
+ 'name': 'pool0',
+ 'volumes': [
+ {
+ 'key': '/key/of/vol0',
+ 'name': 'vol0',
+ 'path': '/path/to/vol0.qcow2',
+ 'info': [0, 123456789, 123456],
+ 'backingStore': None
+ }
+ ]
+ },
+ {
+ 'name': 'pool1',
+ 'volumes': [
+ {
+ 'key': '/key/of/vol1',
+ 'name': 'vol1',
+ 'path': '/path/to/vol1.qcow2',
+ 'info': [0, 12345, 1234],
+ 'backingStore': None
+ },
+ {
+ 'key': '/key/of/vol2',
+ 'name': 'vol2',
+ 'path': '/path/to/vol2.qcow2',
+ 'info': [0, 12345, 1234],
+ 'backingStore': '/path/to/vol0.qcow2'
+ },
+ ],
+ }
+ ]
+ mock_pools = []
+ for pool_data in mock_pool_data:
+ mock_pool = MagicMock()
+ mock_pool.name.return_value = pool_data['name'] # pylint: disable=no-member
+ mock_volumes = []
+ for vol_data in pool_data['volumes']:
+ mock_volume = MagicMock()
+ # pylint: disable=no-member
+ mock_volume.name.return_value = vol_data['name']
+ mock_volume.key.return_value = vol_data['key']
+ mock_volume.path.return_value = '/path/to/{0}.qcow2'.format(vol_data['name'])
+ mock_volume.info.return_value = vol_data['info']
+ backing_store = '''
+ <backingStore>
+ <format>qcow2</format>
+ <path>{0}</path>
+ </backingStore>
+ '''.format(vol_data['backingStore']) if vol_data['backingStore'] else '<backingStore/>'
+ mock_volume.XMLDesc.return_value = '''
+ <volume type='file'>
+ <name>{0}</name>
+ <target>
+ <format>qcow2</format>
+ <path>/path/to/{0}.qcow2</path>
+ </target>
+ {1}
+ </volume>
+ '''.format(vol_data['name'], backing_store)
+ mock_volumes.append(mock_volume)
+ # pylint: enable=no-member
+ mock_pool.listAllVolumes.return_value = mock_volumes # pylint: disable=no-member
+ mock_pools.append(mock_pool)
+
+ self.mock_conn.listAllStoragePools.return_value = mock_pools # pylint: disable=no-member
+
+ with patch('salt.modules.virt._get_domain', MagicMock(return_value=mock_vms)):
+ actual = virt.volume_infos('pool0', 'vol0')
+ self.assertEqual(1, len(actual.keys()))
+ self.assertEqual(1, len(actual['pool0'].keys()))
+ self.assertEqual(['vm0', 'vm2'], sorted(actual['pool0']['vol0']['used_by']))
+ self.assertEqual('/path/to/vol0.qcow2', actual['pool0']['vol0']['path'])
+ self.assertEqual('file', actual['pool0']['vol0']['type'])
+ self.assertEqual('/key/of/vol0', actual['pool0']['vol0']['key'])
+ self.assertEqual(123456789, actual['pool0']['vol0']['capacity'])
+ self.assertEqual(123456, actual['pool0']['vol0']['allocation'])
+
+ self.assertEqual(virt.volume_infos('pool1', None), {
+ 'pool1': {
+ 'vol1': {
+ 'type': 'file',
+ 'key': '/key/of/vol1',
+ 'path': '/path/to/vol1.qcow2',
+ 'capacity': 12345,
+ 'allocation': 1234,
+ 'used_by': [],
+ },
+ 'vol2': {
+ 'type': 'file',
+ 'key': '/key/of/vol2',
+ 'path': '/path/to/vol2.qcow2',
+ 'capacity': 12345,
+ 'allocation': 1234,
+ 'used_by': ['vm2'],
+ }
+ }
+ })
+
+ self.assertEqual(virt.volume_infos(None, 'vol2'), {
+ 'pool1': {
+ 'vol2': {
+ 'type': 'file',
+ 'key': '/key/of/vol2',
+ 'path': '/path/to/vol2.qcow2',
+ 'capacity': 12345,
+ 'allocation': 1234,
+ 'used_by': ['vm2'],
+ }
+ }
+ })
+
+ def test_volume_delete(self):
+ '''
+ Test virt.volume_delete
+ '''
+ mock_delete = MagicMock(side_effect=[0, 1])
+ mock_volume = MagicMock()
+ mock_volume.delete = mock_delete # pylint: disable=no-member
+ mock_pool = MagicMock()
+ # pylint: disable=no-member
+ mock_pool.storageVolLookupByName.side_effect = [
+ mock_volume,
+ mock_volume,
+ self.mock_libvirt.libvirtError("Missing volume"),
+ mock_volume,
+ ]
+ self.mock_conn.storagePoolLookupByName.side_effect = [
+ mock_pool,
+ mock_pool,
+ mock_pool,
+ self.mock_libvirt.libvirtError("Missing pool"),
+ ]
+
+ # pylint: enable=no-member
+ self.assertTrue(virt.volume_delete('default', 'test_volume'))
+ self.assertFalse(virt.volume_delete('default', 'test_volume'))
+ with self.assertRaises(self.mock_libvirt.libvirtError):
+ virt.volume_delete('default', 'missing')
+ virt.volume_delete('missing', 'test_volume')
+ self.assertEqual(mock_delete.call_count, 2)
--
2.20.1

View File

@ -1,28 +0,0 @@
From 816c7ec3b72510346deef17deb2990a09ddab03a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 31 May 2018 10:58:16 +0100
Subject: [PATCH] Align SUSE salt-master.service 'LimitNOFILES' limit
with upstream Salt
---
pkg/suse/salt-master.service | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pkg/suse/salt-master.service b/pkg/suse/salt-master.service
index c0ea4606d8..b31c1a1373 100644
--- a/pkg/suse/salt-master.service
+++ b/pkg/suse/salt-master.service
@@ -4,7 +4,7 @@ Documentation=man:salt-master(1) file:///usr/share/doc/salt/html/contents.html h
After=network.target
[Service]
-LimitNOFILE=16384
+LimitNOFILE=100000
Type=simple
ExecStart=/usr/bin/salt-master
TasksMax=infinity
--
2.13.7

View File

@ -0,0 +1,960 @@
From dfd16dd5968aae96e36e0dee412864fc765b62fb Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Fri, 16 Nov 2018 17:05:29 +0100
Subject: [PATCH] Async batch implementation
Add find_job checks
Check if should close on all events
Make batch_delay a request parameter
Allow multiple event handlers
Use config value for gather_job_timeout when not in payload
Add async batch unittests
Allow metadata to pass
Pass metadata only to batch jobs
Add the metadata to the start/done events
Pass only metadata not all **kwargs
Add separate batch presence_ping timeout
---
salt/auth/__init__.py | 4 +-
salt/cli/batch.py | 91 ++++++--
salt/cli/batch_async.py | 227 +++++++++++++++++++
salt/client/__init__.py | 44 +---
salt/master.py | 25 ++
salt/netapi/__init__.py | 3 +-
salt/transport/ipc.py | 11 +-
salt/utils/event.py | 11 +-
tests/unit/cli/test_batch_async.py | 351 +++++++++++++++++++++++++++++
9 files changed, 707 insertions(+), 60 deletions(-)
create mode 100644 salt/cli/batch_async.py
create mode 100644 tests/unit/cli/test_batch_async.py
diff --git a/salt/auth/__init__.py b/salt/auth/__init__.py
index 61fbb018fd..a8aefa7091 100644
--- a/salt/auth/__init__.py
+++ b/salt/auth/__init__.py
@@ -51,7 +51,9 @@ AUTH_INTERNAL_KEYWORDS = frozenset([
'metadata',
'print_event',
'raw',
- 'yield_pub_data'
+ 'yield_pub_data',
+ 'batch',
+ 'batch_delay'
])
diff --git a/salt/cli/batch.py b/salt/cli/batch.py
index e3a7bf9bcf..4bd07f584a 100644
--- a/salt/cli/batch.py
+++ b/salt/cli/batch.py
@@ -26,6 +26,79 @@ import logging
log = logging.getLogger(__name__)
+def get_bnum(opts, minions, quiet):
+ '''
+ Return the active number of minions to maintain
+ '''
+ partition = lambda x: float(x) / 100.0 * len(minions)
+ try:
+ if '%' in opts['batch']:
+ res = partition(float(opts['batch'].strip('%')))
+ if res < 1:
+ return int(math.ceil(res))
+ else:
+ return int(res)
+ else:
+ return int(opts['batch'])
+ except ValueError:
+ if not quiet:
+ salt.utils.stringutils.print_cli('Invalid batch data sent: {0}\nData must be in the '
+ 'form of %10, 10% or 3'.format(opts['batch']))
+
+
+def batch_get_opts(
+ tgt,
+ fun,
+ batch,
+ parent_opts,
+ arg=(),
+ tgt_type='glob',
+ ret='',
+ kwarg=None,
+ **kwargs):
+ # We need to re-import salt.utils.args here
+ # even though it has already been imported.
+ # when cmd_batch is called via the NetAPI
+ # the module is unavailable.
+ import salt.utils.args
+
+ arg = salt.utils.args.condition_input(arg, kwarg)
+ opts = {'tgt': tgt,
+ 'fun': fun,
+ 'arg': arg,
+ 'tgt_type': tgt_type,
+ 'ret': ret,
+ 'batch': batch,
+ 'failhard': kwargs.get('failhard', False),
+ 'raw': kwargs.get('raw', False)}
+
+ if 'timeout' in kwargs:
+ opts['timeout'] = kwargs['timeout']
+ if 'gather_job_timeout' in kwargs:
+ opts['gather_job_timeout'] = kwargs['gather_job_timeout']
+ if 'batch_wait' in kwargs:
+ opts['batch_wait'] = int(kwargs['batch_wait'])
+
+ for key, val in six.iteritems(parent_opts):
+ if key not in opts:
+ opts[key] = val
+
+ return opts
+
+
+def batch_get_eauth(kwargs):
+ eauth = {}
+ if 'eauth' in kwargs:
+ eauth['eauth'] = kwargs.pop('eauth')
+ if 'username' in kwargs:
+ eauth['username'] = kwargs.pop('username')
+ if 'password' in kwargs:
+ eauth['password'] = kwargs.pop('password')
+ if 'token' in kwargs:
+ eauth['token'] = kwargs.pop('token')
+ return eauth
+
+
class Batch(object):
'''
Manage the execution of batch runs
@@ -80,23 +153,7 @@ class Batch(object):
return (list(fret), ping_gen, nret.difference(fret))
def get_bnum(self):
- '''
- Return the active number of minions to maintain
- '''
- partition = lambda x: float(x) / 100.0 * len(self.minions)
- try:
- if '%' in self.opts['batch']:
- res = partition(float(self.opts['batch'].strip('%')))
- if res < 1:
- return int(math.ceil(res))
- else:
- return int(res)
- else:
- return int(self.opts['batch'])
- except ValueError:
- if not self.quiet:
- salt.utils.stringutils.print_cli('Invalid batch data sent: {0}\nData must be in the '
- 'form of %10, 10% or 3'.format(self.opts['batch']))
+ return get_bnum(self.opts, self.minions, self.quiet)
def __update_wait(self, wait):
now = datetime.now()
diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py
new file mode 100644
index 0000000000..3160d46d8b
--- /dev/null
+++ b/salt/cli/batch_async.py
@@ -0,0 +1,227 @@
+# -*- coding: utf-8 -*-
+'''
+Execute a job on the targeted minions by using a moving window of fixed size `batch`.
+'''
+
+# Import python libs
+from __future__ import absolute_import, print_function, unicode_literals
+import tornado
+
+# Import salt libs
+import salt.client
+
+# pylint: enable=import-error,no-name-in-module,redefined-builtin
+import logging
+import fnmatch
+
+log = logging.getLogger(__name__)
+
+from salt.cli.batch import get_bnum, batch_get_opts, batch_get_eauth
+
+
+class BatchAsync(object):
+ '''
+ Run a job on the targeted minions by using a moving window of fixed size `batch`.
+
+ ``BatchAsync`` is used to execute a job on the targeted minions by keeping
+ the number of concurrent running minions to the size of `batch` parameter.
+
+ The control parameters are:
+ - batch: number/percentage of concurrent running minions
+ - batch_delay: minimum wait time between batches
+ - batch_presence_ping_timeout: time to wait for presence pings before starting the batch
+ - gather_job_timeout: `find_job` timeout
+ - timeout: time to wait before firing a `find_job`
+
+ When the batch stars, a `start` event is fired:
+ - tag: salt/batch/<batch-jid>/start
+ - data: {
+ "available_minions": self.minions,
+ "down_minions": self.down_minions
+ }
+
+ When the batch ends, an `done` event is fired:
+ - tag: salt/batch/<batch-jid>/done
+ - data: {
+ "available_minions": self.minions,
+ "down_minions": self.down_minions,
+ "done_minions": self.done_minions,
+ "timedout_minions": self.timedout_minions
+ }
+ '''
+ def __init__(self, parent_opts, jid_gen, clear_load):
+ ioloop = tornado.ioloop.IOLoop.current()
+ self.local = salt.client.get_local_client(parent_opts['conf_file'])
+ if 'gather_job_timeout' in clear_load['kwargs']:
+ clear_load['gather_job_timeout'] = clear_load['kwargs'].pop('gather_job_timeout')
+ else:
+ clear_load['gather_job_timeout'] = self.local.opts['gather_job_timeout']
+ self.batch_presence_ping_timeout = clear_load['kwargs'].get('batch_presence_ping_timeout', None)
+ self.batch_delay = clear_load['kwargs'].get('batch_delay', 1)
+ self.opts = batch_get_opts(
+ clear_load.pop('tgt'),
+ clear_load.pop('fun'),
+ clear_load['kwargs'].pop('batch'),
+ self.local.opts,
+ **clear_load)
+ self.eauth = batch_get_eauth(clear_load['kwargs'])
+ self.metadata = clear_load['kwargs'].get('metadata', {})
+ self.minions = set()
+ self.down_minions = set()
+ self.timedout_minions = set()
+ self.done_minions = set()
+ self.active = set()
+ self.initialized = False
+ self.ping_jid = jid_gen()
+ self.batch_jid = jid_gen()
+ self.find_job_jid = jid_gen()
+ self.find_job_returned = set()
+ self.event = salt.utils.event.get_event(
+ 'master',
+ self.opts['sock_dir'],
+ self.opts['transport'],
+ opts=self.opts,
+ listen=True,
+ io_loop=ioloop,
+ keep_loop=True)
+
+ def __set_event_handler(self):
+ ping_return_pattern = 'salt/job/{0}/ret/*'.format(self.ping_jid)
+ batch_return_pattern = 'salt/job/{0}/ret/*'.format(self.batch_jid)
+ find_job_return_pattern = 'salt/job/{0}/ret/*'.format(self.find_job_jid)
+ self.event.subscribe(ping_return_pattern, match_type='glob')
+ self.event.subscribe(batch_return_pattern, match_type='glob')
+ self.event.subscribe(find_job_return_pattern, match_type='glob')
+ self.event.patterns = {
+ (ping_return_pattern, 'ping_return'),
+ (batch_return_pattern, 'batch_run'),
+ (find_job_return_pattern, 'find_job_return')
+ }
+ self.event.set_event_handler(self.__event_handler)
+
+ def __event_handler(self, raw):
+ if not self.event:
+ return
+ mtag, data = self.event.unpack(raw, self.event.serial)
+ for (pattern, op) in self.event.patterns:
+ if fnmatch.fnmatch(mtag, pattern):
+ minion = data['id']
+ if op == 'ping_return':
+ self.minions.add(minion)
+ self.down_minions.remove(minion)
+ if not self.down_minions:
+ self.event.io_loop.spawn_callback(self.start_batch)
+ elif op == 'find_job_return':
+ self.find_job_returned.add(minion)
+ elif op == 'batch_run':
+ if minion in self.active:
+ self.active.remove(minion)
+ self.done_minions.add(minion)
+ # call later so that we maybe gather more returns
+ self.event.io_loop.call_later(self.batch_delay, self.schedule_next)
+
+ if self.initialized and self.done_minions == self.minions.difference(self.timedout_minions):
+ self.end_batch()
+
+ def _get_next(self):
+ to_run = self.minions.difference(
+ self.done_minions).difference(
+ self.active).difference(
+ self.timedout_minions)
+ next_batch_size = min(
+ len(to_run), # partial batch (all left)
+ self.batch_size - len(self.active) # full batch or available slots
+ )
+ return set(list(to_run)[:next_batch_size])
+
+ @tornado.gen.coroutine
+ def check_find_job(self, minions):
+ did_not_return = minions.difference(self.find_job_returned)
+ if did_not_return:
+ for minion in did_not_return:
+ if minion in self.find_job_returned:
+ self.find_job_returned.remove(minion)
+ if minion in self.active:
+ self.active.remove(minion)
+ self.timedout_minions.add(minion)
+ running = minions.difference(did_not_return).difference(self.done_minions).difference(self.timedout_minions)
+ if running:
+ self.event.io_loop.add_callback(self.find_job, running)
+
+ @tornado.gen.coroutine
+ def find_job(self, minions):
+ not_done = minions.difference(self.done_minions)
+ ping_return = yield self.local.run_job_async(
+ not_done,
+ 'saltutil.find_job',
+ [self.batch_jid],
+ 'list',
+ gather_job_timeout=self.opts['gather_job_timeout'],
+ jid=self.find_job_jid,
+ **self.eauth)
+ self.event.io_loop.call_later(
+ self.opts['gather_job_timeout'],
+ self.check_find_job,
+ not_done)
+
+ @tornado.gen.coroutine
+ def start(self):
+ self.__set_event_handler()
+ #start batching even if not all minions respond to ping
+ self.event.io_loop.call_later(
+ self.batch_presence_ping_timeout or self.opts['gather_job_timeout'],
+ self.start_batch)
+ ping_return = yield self.local.run_job_async(
+ self.opts['tgt'],
+ 'test.ping',
+ [],
+ self.opts.get(
+ 'selected_target_option',
+ self.opts.get('tgt_type', 'glob')
+ ),
+ gather_job_timeout=self.opts['gather_job_timeout'],
+ jid=self.ping_jid,
+ metadata=self.metadata,
+ **self.eauth)
+ self.down_minions = set(ping_return['minions'])
+
+ @tornado.gen.coroutine
+ def start_batch(self):
+ if not self.initialized:
+ self.batch_size = get_bnum(self.opts, self.minions, True)
+ self.initialized = True
+ data = {
+ "available_minions": self.minions,
+ "down_minions": self.down_minions,
+ "metadata": self.metadata
+ }
+ self.event.fire_event(data, "salt/batch/{0}/start".format(self.batch_jid))
+ yield self.schedule_next()
+
+ def end_batch(self):
+ data = {
+ "available_minions": self.minions,
+ "down_minions": self.down_minions,
+ "done_minions": self.done_minions,
+ "timedout_minions": self.timedout_minions,
+ "metadata": self.metadata
+ }
+ self.event.fire_event(data, "salt/batch/{0}/done".format(self.batch_jid))
+ self.event.remove_event_handler(self.__event_handler)
+
+ @tornado.gen.coroutine
+ def schedule_next(self):
+ next_batch = self._get_next()
+ if next_batch:
+ yield self.local.run_job_async(
+ next_batch,
+ self.opts['fun'],
+ self.opts['arg'],
+ 'list',
+ raw=self.opts.get('raw', False),
+ ret=self.opts.get('return', ''),
+ gather_job_timeout=self.opts['gather_job_timeout'],
+ jid=self.batch_jid,
+ metadata=self.metadata)
+ self.event.io_loop.call_later(self.opts['timeout'], self.find_job, set(next_batch))
+ self.active = self.active.union(next_batch)
diff --git a/salt/client/__init__.py b/salt/client/__init__.py
index 9f0903c7f0..8b37422cbf 100644
--- a/salt/client/__init__.py
+++ b/salt/client/__init__.py
@@ -531,45 +531,14 @@ class LocalClient(object):
{'dave': {...}}
{'stewart': {...}}
'''
- # We need to re-import salt.utils.args here
- # even though it has already been imported.
- # when cmd_batch is called via the NetAPI
- # the module is unavailable.
- import salt.utils.args
-
# Late import - not used anywhere else in this file
import salt.cli.batch
+ opts = salt.cli.batch.batch_get_opts(
+ tgt, fun, batch, self.opts,
+ arg=arg, tgt_type=tgt_type, ret=ret, kwarg=kwarg, **kwargs)
+
+ eauth = salt.cli.batch.batch_get_eauth(kwargs)
- arg = salt.utils.args.condition_input(arg, kwarg)
- opts = {'tgt': tgt,
- 'fun': fun,
- 'arg': arg,
- 'tgt_type': tgt_type,
- 'ret': ret,
- 'batch': batch,
- 'failhard': kwargs.get('failhard', False),
- 'raw': kwargs.get('raw', False)}
-
- if 'timeout' in kwargs:
- opts['timeout'] = kwargs['timeout']
- if 'gather_job_timeout' in kwargs:
- opts['gather_job_timeout'] = kwargs['gather_job_timeout']
- if 'batch_wait' in kwargs:
- opts['batch_wait'] = int(kwargs['batch_wait'])
-
- eauth = {}
- if 'eauth' in kwargs:
- eauth['eauth'] = kwargs.pop('eauth')
- if 'username' in kwargs:
- eauth['username'] = kwargs.pop('username')
- if 'password' in kwargs:
- eauth['password'] = kwargs.pop('password')
- if 'token' in kwargs:
- eauth['token'] = kwargs.pop('token')
-
- for key, val in six.iteritems(self.opts):
- if key not in opts:
- opts[key] = val
batch = salt.cli.batch.Batch(opts, eauth=eauth, quiet=True)
for ret in batch.run():
yield ret
@@ -1732,7 +1701,8 @@ class LocalClient(object):
if listen and not self.event.connect_pub(timeout=timeout):
raise SaltReqTimeoutError()
payload = channel.send(payload_kwargs, timeout=timeout)
- except SaltReqTimeoutError:
+ except SaltReqTimeoutError as err:
+ log.error(err)
raise SaltReqTimeoutError(
'Salt request timed out. The master is not responding. You '
'may need to run your command with `--async` in order to '
diff --git a/salt/master.py b/salt/master.py
index 6881aae137..f08c126280 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -32,6 +32,7 @@ import tornado.gen # pylint: disable=F0401
# Import salt libs
import salt.crypt
+import salt.cli.batch_async
import salt.client
import salt.client.ssh.client
import salt.exceptions
@@ -2039,6 +2040,27 @@ class ClearFuncs(object):
return False
return self.loadauth.get_tok(clear_load['token'])
+ def publish_batch(self, clear_load, minions, missing):
+ batch_load = {}
+ batch_load.update(clear_load)
+ import salt.cli.batch_async
+ batch = salt.cli.batch_async.BatchAsync(
+ self.local.opts,
+ functools.partial(self._prep_jid, clear_load, {}),
+ batch_load
+ )
+ ioloop = tornado.ioloop.IOLoop.current()
+ ioloop.add_callback(batch.start)
+
+ return {
+ 'enc': 'clear',
+ 'load': {
+ 'jid': batch.batch_jid,
+ 'minions': minions,
+ 'missing': missing
+ }
+ }
+
def publish(self, clear_load):
'''
This method sends out publications to the minions, it can only be used
@@ -2130,6 +2152,9 @@ class ClearFuncs(object):
'error': 'Master could not resolve minions for target {0}'.format(clear_load['tgt'])
}
}
+ if extra.get('batch', None):
+ return self.publish_batch(clear_load, minions, missing)
+
jid = self._prep_jid(clear_load, extra)
if jid is None:
return {'enc': 'clear',
diff --git a/salt/netapi/__init__.py b/salt/netapi/__init__.py
index 95f6384889..43b6e943a7 100644
--- a/salt/netapi/__init__.py
+++ b/salt/netapi/__init__.py
@@ -88,7 +88,8 @@ class NetapiClient(object):
:return: job ID
'''
local = salt.client.get_local_client(mopts=self.opts)
- return local.run_job(*args, **kwargs)
+ ret = local.run_job(*args, **kwargs)
+ return ret
def local(self, *args, **kwargs):
'''
diff --git a/salt/transport/ipc.py b/salt/transport/ipc.py
index 40a172991d..8235f104ef 100644
--- a/salt/transport/ipc.py
+++ b/salt/transport/ipc.py
@@ -669,6 +669,8 @@ class IPCMessageSubscriber(IPCClient):
self._sync_ioloop_running = False
self.saved_data = []
self._sync_read_in_progress = Semaphore()
+ self.callbacks = set()
+ self.reading = False
@tornado.gen.coroutine
def _read_sync(self, timeout):
@@ -756,6 +758,7 @@ class IPCMessageSubscriber(IPCClient):
while not self.stream.closed():
try:
self._read_stream_future = self.stream.read_bytes(4096, partial=True)
+ self.reading = True
wire_bytes = yield self._read_stream_future
self._read_stream_future = None
self.unpacker.feed(wire_bytes)
@@ -768,8 +771,12 @@ class IPCMessageSubscriber(IPCClient):
except Exception as exc:
log.error('Exception occurred while Subscriber handling stream: %s', exc)
+ def __run_callbacks(self, raw):
+ for callback in self.callbacks:
+ self.io_loop.spawn_callback(callback, raw)
+
@tornado.gen.coroutine
- def read_async(self, callback):
+ def read_async(self):
'''
Asynchronously read messages and invoke a callback when they are ready.
@@ -784,7 +791,7 @@ class IPCMessageSubscriber(IPCClient):
except Exception as exc:
log.error('Exception occurred while Subscriber connecting: %s', exc)
yield tornado.gen.sleep(1)
- yield self._read_async(callback)
+ yield self._read_async(self.__run_callbacks)
def close(self):
'''
diff --git a/salt/utils/event.py b/salt/utils/event.py
index 296a296084..d2700bd2a0 100644
--- a/salt/utils/event.py
+++ b/salt/utils/event.py
@@ -863,6 +863,10 @@ class SaltEvent(object):
# Minion fired a bad retcode, fire an event
self._fire_ret_load_specific_fun(load)
+ def remove_event_handler(self, event_handler):
+ if event_handler in self.subscriber.callbacks:
+ self.subscriber.callbacks.remove(event_handler)
+
def set_event_handler(self, event_handler):
'''
Invoke the event_handler callback each time an event arrives.
@@ -871,8 +875,11 @@ class SaltEvent(object):
if not self.cpub:
self.connect_pub()
- # This will handle reconnects
- return self.subscriber.read_async(event_handler)
+
+ self.subscriber.callbacks.add(event_handler)
+ if not self.subscriber.reading:
+ # This will handle reconnects
+ self.subscriber.read_async()
def __del__(self):
# skip exceptions in destroy-- since destroy() doesn't cover interpreter
diff --git a/tests/unit/cli/test_batch_async.py b/tests/unit/cli/test_batch_async.py
new file mode 100644
index 0000000000..f65b6a06c3
--- /dev/null
+++ b/tests/unit/cli/test_batch_async.py
@@ -0,0 +1,351 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import absolute_import
+
+# Import Salt Libs
+from salt.cli.batch_async import BatchAsync
+
+import tornado
+from tornado.testing import AsyncTestCase
+from tests.support.unit import skipIf, TestCase
+from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON
+
+
+@skipIf(NO_MOCK, NO_MOCK_REASON)
+class AsyncBatchTestCase(AsyncTestCase, TestCase):
+
+ def setUp(self):
+ self.io_loop = self.get_new_ioloop()
+ opts = {'batch': '1',
+ 'conf_file': {},
+ 'tgt': '*',
+ 'timeout': 5,
+ 'gather_job_timeout': 5,
+ 'batch_presence_ping_timeout': 1,
+ 'transport': None,
+ 'sock_dir': ''}
+
+ with patch('salt.client.get_local_client', MagicMock(return_value=MagicMock())):
+ with patch('salt.cli.batch_async.batch_get_opts',
+ MagicMock(return_value=opts)
+ ):
+ self.batch = BatchAsync(
+ opts,
+ MagicMock(side_effect=['1234', '1235', '1236']),
+ {
+ 'tgt': '',
+ 'fun': '',
+ 'kwargs': {
+ 'batch': '',
+ 'batch_presence_ping_timeout': 1
+ }
+ })
+
+ def test_ping_jid(self):
+ self.assertEqual(self.batch.ping_jid, '1234')
+
+ def test_batch_jid(self):
+ self.assertEqual(self.batch.batch_jid, '1235')
+
+ def test_find_job_jid(self):
+ self.assertEqual(self.batch.find_job_jid, '1236')
+
+ def test_batch_size(self):
+ '''
+ Tests passing batch value as a number
+ '''
+ self.batch.opts = {'batch': '2', 'timeout': 5}
+ self.batch.minions = set(['foo', 'bar'])
+ self.batch.start_batch()
+ self.assertEqual(self.batch.batch_size, 2)
+
+ @tornado.testing.gen_test
+ def test_batch_start_on_batch_presence_ping_timeout(self):
+ self.batch.event = MagicMock()
+ future = tornado.gen.Future()
+ future.set_result({'minions': ['foo', 'bar']})
+ self.batch.local.run_job_async.return_value = future
+ ret = self.batch.start()
+ # assert start_batch is called later with batch_presence_ping_timeout as param
+ self.assertEqual(
+ self.batch.event.io_loop.call_later.call_args[0],
+ (self.batch.batch_presence_ping_timeout, self.batch.start_batch))
+ # assert test.ping called
+ self.assertEqual(
+ self.batch.local.run_job_async.call_args[0],
+ ('*', 'test.ping', [], 'glob')
+ )
+ # assert down_minions == all minions matched by tgt
+ self.assertEqual(self.batch.down_minions, set(['foo', 'bar']))
+
+ @tornado.testing.gen_test
+ def test_batch_start_on_gather_job_timeout(self):
+ self.batch.event = MagicMock()
+ future = tornado.gen.Future()
+ future.set_result({'minions': ['foo', 'bar']})
+ self.batch.local.run_job_async.return_value = future
+ self.batch.batch_presence_ping_timeout = None
+ ret = self.batch.start()
+ # assert start_batch is called later with gather_job_timeout as param
+ self.assertEqual(
+ self.batch.event.io_loop.call_later.call_args[0],
+ (self.batch.opts['gather_job_timeout'], self.batch.start_batch))
+
+ def test_batch_fire_start_event(self):
+ self.batch.minions = set(['foo', 'bar'])
+ self.batch.opts = {'batch': '2', 'timeout': 5}
+ self.batch.event = MagicMock()
+ self.batch.metadata = {'mykey': 'myvalue'}
+ self.batch.start_batch()
+ self.assertEqual(
+ self.batch.event.fire_event.call_args[0],
+ (
+ {
+ 'available_minions': set(['foo', 'bar']),
+ 'down_minions': set(),
+ 'metadata': self.batch.metadata
+ },
+ "salt/batch/1235/start"
+ )
+ )
+
+ @tornado.testing.gen_test
+ def test_start_batch_calls_next(self):
+ self.batch.schedule_next = MagicMock(return_value=MagicMock())
+ self.batch.event = MagicMock()
+ future = tornado.gen.Future()
+ future.set_result(None)
+ self.batch.schedule_next = MagicMock(return_value=future)
+ self.batch.start_batch()
+ self.assertEqual(self.batch.initialized, True)
+ self.assertEqual(len(self.batch.schedule_next.mock_calls), 1)
+
+ def test_batch_fire_done_event(self):
+ self.batch.minions = set(['foo', 'bar'])
+ self.batch.event = MagicMock()
+ self.batch.metadata = {'mykey': 'myvalue'}
+ self.batch.end_batch()
+ self.assertEqual(
+ self.batch.event.fire_event.call_args[0],
+ (
+ {
+ 'available_minions': set(['foo', 'bar']),
+ 'done_minions': set(),
+ 'down_minions': set(),
+ 'timedout_minions': set(),
+ 'metadata': self.batch.metadata
+ },
+ "salt/batch/1235/done"
+ )
+ )
+ self.assertEqual(
+ len(self.batch.event.remove_event_handler.mock_calls), 1)
+
+ @tornado.testing.gen_test
+ def test_batch_next(self):
+ self.batch.event = MagicMock()
+ self.batch.opts['fun'] = 'my.fun'
+ self.batch.opts['arg'] = []
+ self.batch._get_next = MagicMock(return_value={'foo', 'bar'})
+ self.batch.batch_size = 2
+ future = tornado.gen.Future()
+ future.set_result({'minions': ['foo', 'bar']})
+ self.batch.local.run_job_async.return_value = future
+ ret = self.batch.schedule_next().result()
+ self.assertEqual(
+ self.batch.local.run_job_async.call_args[0],
+ ({'foo', 'bar'}, 'my.fun', [], 'list')
+ )
+ self.assertEqual(
+ self.batch.event.io_loop.call_later.call_args[0],
+ (self.batch.opts['timeout'], self.batch.find_job, {'foo', 'bar'})
+ )
+ self.assertEqual(self.batch.active, {'bar', 'foo'})
+
+ def test_next_batch(self):
+ self.batch.minions = {'foo', 'bar'}
+ self.batch.batch_size = 2
+ self.assertEqual(self.batch._get_next(), {'foo', 'bar'})
+
+ def test_next_batch_one_done(self):
+ self.batch.minions = {'foo', 'bar'}
+ self.batch.done_minions = {'bar'}
+ self.batch.batch_size = 2
+ self.assertEqual(self.batch._get_next(), {'foo'})
+
+ def test_next_batch_one_done_one_active(self):
+ self.batch.minions = {'foo', 'bar', 'baz'}
+ self.batch.done_minions = {'bar'}
+ self.batch.active = {'baz'}
+ self.batch.batch_size = 2
+ self.assertEqual(self.batch._get_next(), {'foo'})
+
+ def test_next_batch_one_done_one_active_one_timedout(self):
+ self.batch.minions = {'foo', 'bar', 'baz', 'faz'}
+ self.batch.done_minions = {'bar'}
+ self.batch.active = {'baz'}
+ self.batch.timedout_minions = {'faz'}
+ self.batch.batch_size = 2
+ self.assertEqual(self.batch._get_next(), {'foo'})
+
+ def test_next_batch_bigger_size(self):
+ self.batch.minions = {'foo', 'bar'}
+ self.batch.batch_size = 3
+ self.assertEqual(self.batch._get_next(), {'foo', 'bar'})
+
+ def test_next_batch_all_done(self):
+ self.batch.minions = {'foo', 'bar'}
+ self.batch.done_minions = {'foo', 'bar'}
+ self.batch.batch_size = 2
+ self.assertEqual(self.batch._get_next(), set())
+
+ def test_next_batch_all_active(self):
+ self.batch.minions = {'foo', 'bar'}
+ self.batch.active = {'foo', 'bar'}
+ self.batch.batch_size = 2
+ self.assertEqual(self.batch._get_next(), set())
+
+ def test_next_batch_all_timedout(self):
+ self.batch.minions = {'foo', 'bar'}
+ self.batch.timedout_minions = {'foo', 'bar'}
+ self.batch.batch_size = 2
+ self.assertEqual(self.batch._get_next(), set())
+
+ def test_batch__event_handler_ping_return(self):
+ self.batch.down_minions = {'foo'}
+ self.batch.event = MagicMock(
+ unpack=MagicMock(return_value=('salt/job/1234/ret/foo', {'id': 'foo'})))
+ self.batch.start()
+ self.assertEqual(self.batch.minions, set())
+ self.batch._BatchAsync__event_handler(MagicMock())
+ self.assertEqual(self.batch.minions, {'foo'})
+ self.assertEqual(self.batch.done_minions, set())
+
+ def test_batch__event_handler_call_start_batch_when_all_pings_return(self):
+ self.batch.down_minions = {'foo'}
+ self.batch.event = MagicMock(
+ unpack=MagicMock(return_value=('salt/job/1234/ret/foo', {'id': 'foo'})))
+ self.batch.start()
+ self.batch._BatchAsync__event_handler(MagicMock())
+ self.assertEqual(
+ self.batch.event.io_loop.spawn_callback.call_args[0],
+ (self.batch.start_batch,))
+
+ def test_batch__event_handler_not_call_start_batch_when_not_all_pings_return(self):
+ self.batch.down_minions = {'foo', 'bar'}
+ self.batch.event = MagicMock(
+ unpack=MagicMock(return_value=('salt/job/1234/ret/foo', {'id': 'foo'})))
+ self.batch.start()
+ self.batch._BatchAsync__event_handler(MagicMock())
+ self.assertEqual(
+ len(self.batch.event.io_loop.spawn_callback.mock_calls), 0)
+
+ def test_batch__event_handler_batch_run_return(self):
+ self.batch.event = MagicMock(
+ unpack=MagicMock(return_value=('salt/job/1235/ret/foo', {'id': 'foo'})))
+ self.batch.start()
+ self.batch.active = {'foo'}
+ self.batch._BatchAsync__event_handler(MagicMock())
+ self.assertEqual(self.batch.active, set())
+ self.assertEqual(self.batch.done_minions, {'foo'})
+ self.assertEqual(
+ self.batch.event.io_loop.call_later.call_args[0],
+ (self.batch.batch_delay, self.batch.schedule_next))
+
+ def test_batch__event_handler_find_job_return(self):
+ self.batch.event = MagicMock(
+ unpack=MagicMock(return_value=('salt/job/1236/ret/foo', {'id': 'foo'})))
+ self.batch.start()
+ self.batch._BatchAsync__event_handler(MagicMock())
+ self.assertEqual(self.batch.find_job_returned, {'foo'})
+
+ @tornado.testing.gen_test
+ def test_batch__event_handler_end_batch(self):
+ self.batch.event = MagicMock(
+ unpack=MagicMock(return_value=('salt/job/not-my-jid/ret/foo', {'id': 'foo'})))
+ future = tornado.gen.Future()
+ future.set_result({'minions': ['foo', 'bar', 'baz']})
+ self.batch.local.run_job_async.return_value = future
+ self.batch.start()
+ self.batch.initialized = True
+ self.assertEqual(self.batch.down_minions, {'foo', 'bar', 'baz'})
+ self.batch.end_batch = MagicMock()
+ self.batch.minions = {'foo', 'bar', 'baz'}
+ self.batch.done_minions = {'foo', 'bar'}
+ self.batch.timedout_minions = {'baz'}
+ self.batch._BatchAsync__event_handler(MagicMock())
+ self.assertEqual(len(self.batch.end_batch.mock_calls), 1)
+
+ @tornado.testing.gen_test
+ def test_batch_find_job(self):
+ self.batch.event = MagicMock()
+ future = tornado.gen.Future()
+ future.set_result({})
+ self.batch.local.run_job_async.return_value = future
+ self.batch.find_job({'foo', 'bar'})
+ self.assertEqual(
+ self.batch.event.io_loop.call_later.call_args[0],
+ (self.batch.opts['gather_job_timeout'], self.batch.check_find_job, {'foo', 'bar'})
+ )
+
+ @tornado.testing.gen_test
+ def test_batch_find_job_with_done_minions(self):
+ self.batch.done_minions = {'bar'}
+ self.batch.event = MagicMock()
+ future = tornado.gen.Future()
+ future.set_result({})
+ self.batch.local.run_job_async.return_value = future
+ self.batch.find_job({'foo', 'bar'})
+ self.assertEqual(
+ self.batch.event.io_loop.call_later.call_args[0],
+ (self.batch.opts['gather_job_timeout'], self.batch.check_find_job, {'foo'})
+ )
+
+ def test_batch_check_find_job_did_not_return(self):
+ self.batch.event = MagicMock()
+ self.batch.active = {'foo'}
+ self.batch.find_job_returned = set()
+ self.batch.check_find_job({'foo'})
+ self.assertEqual(self.batch.find_job_returned, set())
+ self.assertEqual(self.batch.active, set())
+ self.assertEqual(len(self.batch.event.io_loop.add_callback.mock_calls), 0)
+
+ def test_batch_check_find_job_did_return(self):
+ self.batch.event = MagicMock()
+ self.batch.find_job_returned = {'foo'}
+ self.batch.check_find_job({'foo'})
+ self.assertEqual(
+ self.batch.event.io_loop.add_callback.call_args[0],
+ (self.batch.find_job, {'foo'})
+ )
+
+ def test_batch_check_find_job_multiple_states(self):
+ self.batch.event = MagicMock()
+ # currently running minions
+ self.batch.active = {'foo', 'bar'}
+
+ # minion is running and find_job returns
+ self.batch.find_job_returned = {'foo'}
+
+ # minion started running but find_job did not return
+ self.batch.timedout_minions = {'faz'}
+
+ # minion finished
+ self.batch.done_minions = {'baz'}
+
+ # both not yet done but only 'foo' responded to find_job
+ not_done = {'foo', 'bar'}
+
+ self.batch.check_find_job(not_done)
+
+ # assert 'bar' removed from active
+ self.assertEqual(self.batch.active, {'foo'})
+
+ # assert 'bar' added to timedout_minions
+ self.assertEqual(self.batch.timedout_minions, {'bar', 'faz'})
+
+ # assert 'find_job' schedueled again only for 'foo'
+ self.assertEqual(
+ self.batch.event.io_loop.add_callback.call_args[0],
+ (self.batch.find_job, {'foo'})
+ )
--
2.20.1

View File

@ -1,57 +0,0 @@
From c4d9227b6da4407348e181f092445f17e3c14b51 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 26 Jul 2018 16:42:10 +0100
Subject: [PATCH] Avoid incomprehensive message if crashes
Check dmidecoder executable on each call to avoid crashing
Fix pylint issues
---
salt/modules/smbios.py | 11 +++++++++++
1 file changed, 11 insertions(+)
diff --git a/salt/modules/smbios.py b/salt/modules/smbios.py
index c8a0e54a5c..c0b94c2a65 100644
--- a/salt/modules/smbios.py
+++ b/salt/modules/smbios.py
@@ -19,6 +19,7 @@ import re
# Import salt libs
import salt.utils.path
+from salt.exceptions import CommandExecutionError
# Solve the Chicken and egg problem where grains need to run before any
# of the modules are loaded and are generally available for any usage.
@@ -32,10 +33,16 @@ log = logging.getLogger(__name__)
DMIDECODER = salt.utils.path.which_bin(['dmidecode', 'smbios'])
+def _refresh_dmidecoder():
+ global DMIDECODER
+ DMIDECODER = salt.utils.path.which_bin(['dmidecode', 'smbios'])
+
+
def __virtual__():
'''
Only work when dmidecode is installed.
'''
+ _refresh_dmidecoder()
if DMIDECODER is None:
log.debug('SMBIOS: neither dmidecode nor smbios found!')
return (False, 'The smbios execution module failed to load: neither dmidecode nor smbios in the path.')
@@ -327,6 +334,10 @@ def _dmidecoder(args=None):
'''
Call DMIdecode
'''
+ _refresh_dmidecoder()
+ if DMIDECODER is None:
+ raise CommandExecutionError('SMBIOS: neither dmidecode nor smbios found!')
+
if args is None:
return salt.modules.cmdmod._run_quiet(DMIDECODER)
else:
--
2.17.1

View File

@ -1,4 +1,4 @@
From 326e649ef1f14b609916f0e9ce75e29a5e7f4d05 Mon Sep 17 00:00:00 2001
From d914a1e952e393f3e72aee2cb8d9056533f490cc Mon Sep 17 00:00:00 2001
From: Robert Munteanu <rombert@apache.org>
Date: Mon, 19 Nov 2018 17:52:34 +0100
Subject: [PATCH] azurefs: gracefully handle AttributeError
@ -13,7 +13,7 @@ Problem was encountered on openSUSE Tumbleweed.
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/fileserver/azurefs.py b/salt/fileserver/azurefs.py
index c266323fbe..a42c10c594 100644
index 547a681016..032739d160 100644
--- a/salt/fileserver/azurefs.py
+++ b/salt/fileserver/azurefs.py
@@ -68,7 +68,7 @@ try:
@ -26,6 +26,6 @@ index c266323fbe..a42c10c594 100644
# Import third party libs
--
2.20.1
2.17.1

View File

@ -1,4 +1,4 @@
From e82dc4c556497b612d31b65e60b34c979c957424 Mon Sep 17 00:00:00 2001
From 8fc3419db49497ca33f99d7bbc3a251d7b07ff09 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 5 Oct 2018 12:02:08 +0200
Subject: [PATCH] Bugfix: any unicode string of length 16 will raise
@ -9,11 +9,11 @@ Subject: [PATCH] Bugfix: any unicode string of length 16 will raise
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/_compat.py b/salt/_compat.py
index 0576210afc..71963a4ead 100644
index 8628833dcf..98931c6cce 100644
--- a/salt/_compat.py
+++ b/salt/_compat.py
@@ -192,7 +192,7 @@ class IPv6AddressScoped(ipaddress.IPv6Address):
if len(data) == 16 and ':' not in data:
@@ -191,7 +191,7 @@ class IPv6AddressScoped(ipaddress.IPv6Address):
if isinstance(data, bytes) and len(data) == 16 and b':' not in data:
try:
packed = bool(int(str(bytearray(data)).encode('hex'), 16))
- except ValueError:
@ -22,6 +22,6 @@ index 0576210afc..71963a4ead 100644
return packed
--
2.17.1
2.20.1

View File

@ -1,57 +0,0 @@
From a0d5af98c8d2a22c5eb56943ff320ca287fa79ea Mon Sep 17 00:00:00 2001
From: Florian Bergmann <bergmannf@users.noreply.github.com>
Date: Tue, 11 Sep 2018 14:03:33 +0200
Subject: [PATCH] Change StringIO import in python2 to import the class.
(#107)
Instead of using StringIO in python3, use the correct BytesIO class instead.
---
salt/modules/hashutil.py | 11 ++++++-----
1 file changed, 6 insertions(+), 5 deletions(-)
diff --git a/salt/modules/hashutil.py b/salt/modules/hashutil.py
index 721957973d..5123cc7cd7 100644
--- a/salt/modules/hashutil.py
+++ b/salt/modules/hashutil.py
@@ -17,9 +17,10 @@ import salt.utils.hashutils
import salt.utils.stringutils
if six.PY2:
- import StringIO
+ from StringIO import StringIO
+ BytesIO = StringIO
elif six.PY3:
- from io import StringIO
+ from io import BytesIO, StringIO
def digest(instr, checksum='md5'):
@@ -155,13 +156,13 @@ def base64_encodefile(fname):
salt '*' hashutil.base64_encodefile /path/to/binary_file
'''
- encoded_f = StringIO.StringIO()
+ encoded_f = BytesIO()
with salt.utils.files.fopen(fname, 'rb') as f:
base64.encode(f, encoded_f)
encoded_f.seek(0)
- return encoded_f.read()
+ return salt.utils.stringutils.to_str(encoded_f.read())
def base64_decodestring(instr):
@@ -192,7 +193,7 @@ def base64_decodefile(instr, outfile):
salt '*' hashutil.base64_decodefile instr='Z2V0IHNhbHRlZAo=' outfile='/path/to/binary_file'
'''
- encoded_f = StringIO.StringIO(instr)
+ encoded_f = StringIO(instr)
with salt.utils.files.fopen(outfile, 'wb') as f:
base64.decode(encoded_f, f)
--
2.19.0

View File

@ -1,4 +1,4 @@
From 9b2473001dcf25c53dff469d3ffb38113e0402eb Mon Sep 17 00:00:00 2001
From afdfd35222223d81c304854b5ae7af60f3820ed3 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 20 Nov 2018 16:06:31 +0100
Subject: [PATCH] Debian info_installed compatibility (#50453)
@ -49,17 +49,17 @@ Adjust architecture getter according to the lowpkg info
Fix wrong Git merge: missing function signature
---
salt/modules/aptpkg.py | 20 +++-
salt/modules/dpkg.py | 93 +++++++++++++--
tests/unit/modules/test_aptpkg.py | 189 +++++++++++++++++-------------
tests/unit/modules/test_dpkg.py | 69 +++++++++++
4 files changed, 274 insertions(+), 97 deletions(-)
salt/modules/aptpkg.py | 20 +++-
salt/modules/dpkg_lowpkg.py | 93 +++++++++++++--
tests/unit/modules/test_aptpkg.py | 151 ++++++++++++++++---------
tests/unit/modules/test_dpkg_lowpkg.py | 69 +++++++++++
4 files changed, 267 insertions(+), 66 deletions(-)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 90b99c44b9..dc27903230 100644
index 6b3a921a82..64620647c2 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -2800,6 +2800,15 @@ def info_installed(*names, **kwargs):
@@ -2776,6 +2776,15 @@ def info_installed(*names, **kwargs):
.. versionadded:: 2016.11.3
@ -75,7 +75,7 @@ index 90b99c44b9..dc27903230 100644
CLI example:
.. code-block:: bash
@@ -2810,11 +2819,15 @@ def info_installed(*names, **kwargs):
@@ -2786,11 +2795,15 @@ def info_installed(*names, **kwargs):
'''
kwargs = salt.utils.args.clean_kwargs(**kwargs)
failhard = kwargs.pop('failhard', True)
@ -92,7 +92,7 @@ index 90b99c44b9..dc27903230 100644
t_nfo = dict()
# Translate dpkg-specific keys to a common structure
for key, value in pkg_nfo.items():
@@ -2831,7 +2844,10 @@ def info_installed(*names, **kwargs):
@@ -2807,7 +2820,10 @@ def info_installed(*names, **kwargs):
else:
t_nfo[key] = value
@ -104,10 +104,10 @@ index 90b99c44b9..dc27903230 100644
return ret
diff --git a/salt/modules/dpkg.py b/salt/modules/dpkg.py
diff --git a/salt/modules/dpkg_lowpkg.py b/salt/modules/dpkg_lowpkg.py
index 03be5f821a..26ca5dcf5a 100644
--- a/salt/modules/dpkg.py
+++ b/salt/modules/dpkg.py
--- a/salt/modules/dpkg_lowpkg.py
+++ b/salt/modules/dpkg_lowpkg.py
@@ -252,6 +252,38 @@ def file_dict(*packages):
return {'errors': errors, 'packages': ret}
@ -257,26 +257,19 @@ index 03be5f821a..26ca5dcf5a 100644
return ret
diff --git a/tests/unit/modules/test_aptpkg.py b/tests/unit/modules/test_aptpkg.py
index c0e26cfcd4..5352e39982 100644
index 1e963ee5db..580b840197 100644
--- a/tests/unit/modules/test_aptpkg.py
+++ b/tests/unit/modules/test_aptpkg.py
@@ -13,12 +13,14 @@ import copy
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
-from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON
+from tests.support.mock import Mock, MagicMock, patch, NO_MOCK, NO_MOCK_REASON
# Import Salt Libs
@@ -20,6 +20,8 @@ from tests.support.mock import Mock, MagicMock, patch, NO_MOCK, NO_MOCK_REASON
from salt.ext import six
from salt.exceptions import CommandExecutionError, SaltInvocationError
import salt.modules.aptpkg as aptpkg
+import pytest
+import textwrap
APT_KEY_LIST = r'''
@@ -142,51 +144,39 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
try:
import pytest
@@ -148,51 +150,39 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {aptpkg: {}}
@ -341,7 +334,7 @@ index c0e26cfcd4..5352e39982 100644
def test_get_repo_keys(self):
'''
@@ -199,35 +189,31 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
@@ -205,35 +195,31 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock}):
self.assertEqual(aptpkg.get_repo_keys(), REPO_KEYS)
@ -388,7 +381,7 @@ index c0e26cfcd4..5352e39982 100644
def test_info_installed(self):
'''
Test - Return the information of the named package(s) installed on the system.
@@ -243,19 +229,72 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
@@ -249,19 +235,72 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
if installed['wget'].get(names[name], False):
installed['wget'][name] = installed['wget'].pop(names[name])
@ -435,7 +428,7 @@ index c0e26cfcd4..5352e39982 100644
+ assert isinstance(ret, dict)
+ assert 'wget' in ret
+ assert isinstance(ret['wget'], list)
+
+ pkgs = ret['wget']
+
+ assert len(pkgs) == 1
@ -449,7 +442,7 @@ index c0e26cfcd4..5352e39982 100644
+ for k in wget_pkg:
+ assert k in expected_pkg
+ assert wget_pkg[k] == expected_pkg[k]
+
+ @patch('salt.modules.aptpkg.__salt__', {'cmd.run_stdout': MagicMock(return_value='wget: /usr/bin/wget')})
def test_owner(self):
'''
@ -468,18 +461,9 @@ index c0e26cfcd4..5352e39982 100644
def test_refresh_db(self):
'''
Test - Updates the APT database to latest packages based upon repositories.
@@ -267,26 +306,20 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
'http://security.ubuntu.com trusty-security/main amd64 Packages': True,
'http://security.ubuntu.com trusty-security/main i386 Packages': True
}
- mock = MagicMock(return_value={
- 'retcode': 0,
- 'stdout': APT_Q_UPDATE
- })
- with patch('salt.utils.pkg.clear_rtag', MagicMock()):
- with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock}):
- self.assertEqual(aptpkg.refresh_db(), refresh_db)
+ assert aptpkg.refresh_db() == refresh_db
@@ -281,6 +320,10 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock, 'config.get': MagicMock(return_value=False)}):
self.assertEqual(aptpkg.refresh_db(), refresh_db)
+ @patch('salt.utils.pkg.clear_rtag', MagicMock())
+ @patch('salt.modules.aptpkg.__salt__', {'cmd.run_all': MagicMock(return_value={'retcode': 0,
@ -488,25 +472,9 @@ index c0e26cfcd4..5352e39982 100644
def test_refresh_db_failed(self):
'''
Test - Update the APT database using unreachable repositories.
'''
- kwargs = {'failhard': True}
- mock = MagicMock(return_value={
- 'retcode': 0,
- 'stdout': APT_Q_UPDATE_ERROR
- })
- with patch('salt.utils.pkg.clear_rtag', MagicMock()):
- with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock}):
- self.assertRaises(CommandExecutionError, aptpkg.refresh_db, **kwargs)
+ with pytest.raises(CommandExecutionError) as err:
+ aptpkg.refresh_db(failhard=True)
+ assert 'Error getting repos' in str(err)
+ assert 'http://security.ubuntu.com trusty InRelease, http://security.ubuntu.com trusty Release.gpg' in str(err)
def test_autoremove(self):
'''
@@ -306,38 +339,26 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(aptpkg.autoremove(list_only=True), list())
self.assertEqual(aptpkg.autoremove(list_only=True, purge=True), list())
@@ -312,22 +355,24 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
assert aptpkg.autoremove(list_only=True) == []
assert aptpkg.autoremove(list_only=True, purge=True) == []
+ @patch('salt.modules.aptpkg._uninstall', MagicMock(return_value=UNINSTALL))
def test_remove(self):
@ -535,27 +503,10 @@ index c0e26cfcd4..5352e39982 100644
def test_upgrade(self):
'''
Test - Upgrades all packages.
'''
- with patch('salt.utils.pkg.clear_rtag', MagicMock()):
- with patch('salt.modules.aptpkg.list_pkgs',
- MagicMock(return_value=UNINSTALL)):
- mock_cmd = MagicMock(return_value={
- 'retcode': 0,
- 'stdout': UPGRADE
- })
- patch_kwargs = {
- '__salt__': {
- 'config.get': MagicMock(return_value=True),
- 'cmd.run_all': mock_cmd
- }
- }
- with patch.multiple(aptpkg, **patch_kwargs):
- self.assertEqual(aptpkg.upgrade(), dict())
+ assert aptpkg.upgrade() == {}
diff --git a/tests/unit/modules/test_dpkg.py b/tests/unit/modules/test_dpkg.py
index fcfa7caf77..1acfd89ccf 100644
--- a/tests/unit/modules/test_dpkg.py
+++ b/tests/unit/modules/test_dpkg.py
diff --git a/tests/unit/modules/test_dpkg_lowpkg.py b/tests/unit/modules/test_dpkg_lowpkg.py
index bdcb7eec89..d16ce3cc1a 100644
--- a/tests/unit/modules/test_dpkg_lowpkg.py
+++ b/tests/unit/modules/test_dpkg_lowpkg.py
@@ -25,6 +25,30 @@ class DpkgTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.modules.dpkg
@ -637,6 +588,6 @@ index fcfa7caf77..1acfd89ccf 100644
+ assert ret['emacs']['license'] == 'BSD v3'
+ assert ret['emacs']['version'] == '46.1'
--
2.19.1
2.20.1

View File

@ -1,27 +0,0 @@
From 58913b6801b92bd59374cd53fa48fa74171abb73 Mon Sep 17 00:00:00 2001
From: Abid Mehmood <amehmood@suse.de>
Date: Wed, 1 Aug 2018 17:19:11 +0200
Subject: [PATCH] Decode file contents for python2(bsc#1102013)
---
salt/states/file.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/salt/states/file.py b/salt/states/file.py
index e1d247ae4f..db82098a33 100644
--- a/salt/states/file.py
+++ b/salt/states/file.py
@@ -1105,8 +1105,7 @@ def _get_template_texts(source_list=None,
tmplines = None
with salt.utils.files.fopen(rndrd_templ_fn, 'rb') as fp_:
tmplines = fp_.read()
- if six.PY3:
- tmplines = tmplines.decode(__salt_system_encoding__)
+ tmplines = tmplines.decode(__salt_system_encoding__)
tmplines = tmplines.splitlines(True)
if not tmplines:
msg = 'Failed to read rendered template file {0} ({1})'
--
2.17.1

View File

@ -1,4 +1,4 @@
From 7727ab13e3492b722b316469cc912d9dd64f063e Mon Sep 17 00:00:00 2001
From ab7d69b3438c719f7ad6b4b346e56556e8a7bd10 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 21 Sep 2018 17:31:39 +0200
Subject: [PATCH] Do not load pip state if there is no 3rd party
@ -6,12 +6,11 @@ Subject: [PATCH] Do not load pip state if there is no 3rd party
Safe import 3rd party dependency
---
salt/modules/pip.py | 12 ++++++++++--
salt/states/pip_state.py | 9 +++++----
2 files changed, 15 insertions(+), 6 deletions(-)
salt/modules/pip.py | 12 ++++++++++--
1 file changed, 10 insertions(+), 2 deletions(-)
diff --git a/salt/modules/pip.py b/salt/modules/pip.py
index f1a2e42433..85844f098b 100644
index eac40c719c..988ae695a7 100644
--- a/salt/modules/pip.py
+++ b/salt/modules/pip.py
@@ -79,7 +79,10 @@ from __future__ import absolute_import, print_function, unicode_literals
@ -40,34 +39,7 @@ index f1a2e42433..85844f098b 100644
def _clear_context(bin_env=None):
diff --git a/salt/states/pip_state.py b/salt/states/pip_state.py
index ab58fbd5fc..afe41d7fc8 100644
--- a/salt/states/pip_state.py
+++ b/salt/states/pip_state.py
@@ -23,7 +23,10 @@ requisite to a pkg.installed state for the package which provides pip
from __future__ import absolute_import, print_function, unicode_literals
import re
import logging
-import pkg_resources
+try:
+ import pkg_resources
+except ImportError:
+ pkg_resources = None
# Import salt libs
import salt.utils.versions
@@ -71,9 +74,7 @@ def __virtual__():
'''
Only load if the pip module is available in __salt__
'''
- if 'pip.list' in __salt__:
- return __virtualname__
- return False
+ return 'pip.list' in __salt__ and __virtualname__ or False
def _find_key(prefix, pip_list):
--
2.19.0
2.17.1

View File

@ -1,38 +0,0 @@
From 911d61d1479d89ed31b23b038874505b731c6d86 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 4 May 2018 09:34:13 +0100
Subject: [PATCH] Do not override jid on returners, only sending back to
master (bsc#1092373)
---
salt/utils/schedule.py | 12 +++++++-----
1 file changed, 7 insertions(+), 5 deletions(-)
diff --git a/salt/utils/schedule.py b/salt/utils/schedule.py
index 65c2e3fbda..32fdae9786 100644
--- a/salt/utils/schedule.py
+++ b/salt/utils/schedule.py
@@ -755,11 +755,13 @@ class Schedule(object):
else:
# Send back to master so the job is included in the job list
mret = ret.copy()
- mret['jid'] = 'req'
- if data.get('return_job') == 'nocache':
- # overwrite 'req' to signal to master that
- # this job shouldn't be stored
- mret['jid'] = 'nocache'
+ # No returners defined, so we're only sending back to the master
+ if not data_returner and not self.schedule_returner:
+ mret['jid'] = 'req'
+ if data.get('return_job') == 'nocache':
+ # overwrite 'req' to signal to master that
+ # this job shouldn't be stored
+ mret['jid'] = 'nocache'
load = {'cmd': '_return', 'id': self.opts['id']}
for key, value in six.iteritems(mret):
load[key] = value
--
2.13.7

View File

@ -0,0 +1,42 @@
From 1c3f8f32d475701e8b7fab64b8cb9dcd44b587d4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Bosdonnat?= <cbosdonnat@suse.com>
Date: Tue, 29 Jan 2019 09:44:03 +0100
Subject: [PATCH] Don't call zypper with more than one --no-refresh
Now zypper started being picky and errors out when --no-refresh is
passed twice. Make sure we won't hit this.
---
salt/modules/zypperpkg.py | 2 +-
tests/unit/modules/test_zypperpkg.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index c442337c58..bab9e22dec 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -291,7 +291,7 @@ class _Zypper(object):
self.__called = True
if self.__xml:
self.__cmd.append('--xmlout')
- if not self.__refresh:
+ if not self.__refresh and '--no-refresh' not in args:
self.__cmd.append('--no-refresh')
if self.__root:
self.__cmd.extend(['--root', self.__root])
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index e7474ff777..9d109a431d 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -141,7 +141,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(zypper.__zypper__.call('foo'), stdout_xml_snippet)
self.assertEqual(len(sniffer.calls), 1)
- zypper.__zypper__.call('bar')
+ zypper.__zypper__.call('--no-refresh', 'bar')
self.assertEqual(len(sniffer.calls), 2)
self.assertEqual(sniffer.calls[0]['args'][0], ['zypper', '--non-interactive', '--no-refresh', 'foo'])
self.assertEqual(sniffer.calls[1]['args'][0], ['zypper', '--non-interactive', '--no-refresh', 'bar'])
--
2.20.1

View File

@ -1,33 +0,0 @@
From 34089db15e7d3a1e361789f04613d0a13138dea0 Mon Sep 17 00:00:00 2001
From: rallytime <nicole@saltstack.com>
Date: Fri, 13 Jul 2018 12:42:46 -0400
Subject: [PATCH] Don't error on retcode 0 in
libcrypto.OPENSSL_init_crypto call
Fixes #46884
---
salt/utils/rsax931.py | 7 +++----
1 file changed, 3 insertions(+), 4 deletions(-)
diff --git a/salt/utils/rsax931.py b/salt/utils/rsax931.py
index 168c02734b..6bfef41bd3 100644
--- a/salt/utils/rsax931.py
+++ b/salt/utils/rsax931.py
@@ -71,10 +71,9 @@ def _init_libcrypto():
libcrypto.RSA_public_decrypt.argtypes = (c_int, c_char_p, c_char_p, c_void_p, c_int)
try:
- if libcrypto.OPENSSL_init_crypto(OPENSSL_INIT_NO_LOAD_CONFIG |
- OPENSSL_INIT_ADD_ALL_CIPHERS |
- OPENSSL_INIT_ADD_ALL_DIGESTS, None) != 1:
- raise OSError("Failed to initialize OpenSSL library (OPENSSL_init_crypto failed)")
+ libcrypto.OPENSSL_init_crypto(OPENSSL_INIT_NO_LOAD_CONFIG |
+ OPENSSL_INIT_ADD_ALL_CIPHERS |
+ OPENSSL_INIT_ADD_ALL_DIGESTS, None)
except AttributeError:
# Support for OpenSSL < 1.1 (OPENSSL_API_COMPAT < 0x10100000L)
libcrypto.OPENSSL_no_config()
--
2.19.2

View File

@ -1,4 +1,4 @@
From 9e0c0bbc1b48fa7065a9d0f50bd7111789712e2d Mon Sep 17 00:00:00 2001
From d3b2f157643845d2659a226ba72ce24ce1d2a73d Mon Sep 17 00:00:00 2001
From: Maximilian Meister <mmeister@suse.de>
Date: Thu, 5 Apr 2018 13:23:23 +0200
Subject: [PATCH] fall back to PyMySQL
@ -7,311 +7,32 @@ same is already done in modules (see #26803)
Signed-off-by: Maximilian Meister <mmeister@suse.de>
---
salt/auth/mysql.py | 25 ++++++++++++++++++++++---
salt/cache/mysql_cache.py | 28 +++++++++++++++++++---------
salt/modules/mysql.py | 22 ++++++++++------------
salt/pillar/mysql.py | 21 ++++++++++++++++-----
salt/returners/mysql.py | 29 +++++++++++++++++++++--------
tests/unit/pillar/test_mysql.py | 2 +-
6 files changed, 89 insertions(+), 38 deletions(-)
salt/modules/mysql.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/salt/auth/mysql.py b/salt/auth/mysql.py
index 8bc18a4101..86d00a4373 100644
--- a/salt/auth/mysql.py
+++ b/salt/auth/mysql.py
@@ -55,10 +55,29 @@ import logging
log = logging.getLogger(__name__)
try:
+ # Trying to import MySQLdb
import MySQLdb
- HAS_MYSQL = True
+ import MySQLdb.cursors
+ import MySQLdb.converters
+ from MySQLdb.connections import OperationalError
except ImportError:
- HAS_MYSQL = False
+ try:
+ # MySQLdb import failed, try to import PyMySQL
+ import pymysql
+ pymysql.install_as_MySQLdb()
+ import MySQLdb
+ import MySQLdb.cursors
+ import MySQLdb.converters
+ from MySQLdb.err import OperationalError
+ except ImportError:
+ MySQLdb = None
+
+
+def __virtual__():
+ '''
+ Confirm that a python mysql client is installed.
+ '''
+ return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else ''
def __get_connection_info():
@@ -95,7 +114,7 @@ def auth(username, password):
_info['username'],
_info['password'],
_info['database'])
- except MySQLdb.OperationalError as e:
+ except OperationalError as e:
log.error(e)
return False
diff --git a/salt/cache/mysql_cache.py b/salt/cache/mysql_cache.py
index 9d6aa17987..8b0a942310 100644
--- a/salt/cache/mysql_cache.py
+++ b/salt/cache/mysql_cache.py
@@ -46,11 +46,24 @@ value to ``mysql``:
from __future__ import absolute_import, print_function, unicode_literals
from time import sleep
import logging
+
try:
+ # Trying to import MySQLdb
import MySQLdb
- HAS_MYSQL = True
+ import MySQLdb.cursors
+ import MySQLdb.converters
+ from MySQLdb.connections import OperationalError
except ImportError:
- HAS_MYSQL = False
+ try:
+ # MySQLdb import failed, try to import PyMySQL
+ import pymysql
+ pymysql.install_as_MySQLdb()
+ import MySQLdb
+ import MySQLdb.cursors
+ import MySQLdb.converters
+ from MySQLdb.err import OperationalError
+ except ImportError:
+ MySQLdb = None
from salt.exceptions import SaltCacheError
@@ -71,12 +84,9 @@ __func_alias__ = {'ls': 'list'}
def __virtual__():
'''
- Confirm that python-mysql package is installed.
+ Confirm that a python mysql client is installed.
'''
- if not HAS_MYSQL:
- return (False, "Please install python-mysql package to use mysql data "
- "cache driver")
- return __virtualname__
+ return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else ''
def run_query(conn, query, retries=3):
@@ -84,13 +94,13 @@ def run_query(conn, query, retries=3):
Get a cursor and run a query. Reconnect up to `retries` times if
needed.
Returns: cursor, affected rows counter
- Raises: SaltCacheError, AttributeError, MySQLdb.OperationalError
+ Raises: SaltCacheError, AttributeError, OperationalError
'''
try:
cur = conn.cursor()
out = cur.execute(query)
return cur, out
- except (AttributeError, MySQLdb.OperationalError) as e:
+ except (AttributeError, OperationalError) as e:
if retries == 0:
raise
# reconnect creating new client
diff --git a/salt/modules/mysql.py b/salt/modules/mysql.py
index 833a766a97..a5965f3a25 100644
index de8916f4f2..64c773f40a 100644
--- a/salt/modules/mysql.py
+++ b/salt/modules/mysql.py
@@ -51,13 +51,14 @@ import salt.utils.stringutils
from salt.ext import six
# pylint: disable=import-error
from salt.ext.six.moves import range, zip # pylint: disable=no-name-in-module,redefined-builtin
+
try:
- # Try to import MySQLdb
+ # Trying to import MySQLdb
import MySQLdb
@@ -58,7 +58,7 @@ try:
import MySQLdb.cursors
import MySQLdb.converters
from MySQLdb.constants import FIELD_TYPE, FLAG
- HAS_MYSQLDB = True
- from MySQLdb import OperationalError
+ from MySQLdb.connections import OperationalError
except ImportError:
try:
# MySQLdb import failed, try to import PyMySQL
@@ -67,10 +68,9 @@ except ImportError:
@@ -68,7 +68,7 @@ except ImportError:
import MySQLdb.cursors
import MySQLdb.converters
from MySQLdb.constants import FIELD_TYPE, FLAG
- HAS_MYSQLDB = True
- from MySQLdb import OperationalError
+ from MySQLdb.err import OperationalError
except ImportError:
- # No MySQL Connector installed, return False
- HAS_MYSQLDB = False
+ MySQLdb = None
log = logging.getLogger(__name__)
@@ -195,11 +195,9 @@ And theses could be mixed, in a like query value with args: 'f\_o\%%o`b\'a"r'
def __virtual__():
'''
- Only load this module if the mysql libraries exist
+ Confirm that a python mysql client is installed.
'''
- if HAS_MYSQLDB:
- return True
- return (False, 'The mysql execution module cannot be loaded: neither MySQLdb nor PyMySQL is available.')
+ return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else ''
def __check_table(name, table, **connection_args):
@@ -331,7 +329,7 @@ def _connect(**kwargs):
connargs.pop('passwd')
try:
dbc = MySQLdb.connect(**connargs)
- except MySQLdb.OperationalError as exc:
+ except OperationalError as exc:
err = 'MySQL Error {0}: {1}'.format(*exc)
__context__['mysql.error'] = err
log.error(err)
@@ -647,7 +645,7 @@ def query(database, query, **connection_args):
log.debug('Using db: %s to run query %s', database, query)
try:
affected = _execute(cur, query)
- except MySQLdb.OperationalError as exc:
+ except OperationalError as exc:
err = 'MySQL Error {0}: {1}'.format(*exc)
__context__['mysql.error'] = err
log.error(err)
@@ -772,7 +770,7 @@ def status(**connection_args):
qry = 'SHOW STATUS'
try:
_execute(cur, qry)
- except MySQLdb.OperationalError as exc:
+ except OperationalError as exc:
err = 'MySQL Error {0}: {1}'.format(*exc)
__context__['mysql.error'] = err
log.error(err)
diff --git a/salt/pillar/mysql.py b/salt/pillar/mysql.py
index 8029e5c197..d3f9619ad5 100644
--- a/salt/pillar/mysql.py
+++ b/salt/pillar/mysql.py
@@ -59,16 +59,27 @@ log = logging.getLogger(__name__)
# Import third party libs
try:
+ # Trying to import MySQLdb
import MySQLdb
- HAS_MYSQL = True
+ import MySQLdb.cursors
+ import MySQLdb.converters
except ImportError:
- HAS_MYSQL = False
+ try:
+ # MySQLdb import failed, try to import PyMySQL
+ import pymysql
+ pymysql.install_as_MySQLdb()
+ import MySQLdb
+ import MySQLdb.cursors
+ import MySQLdb.converters
+ except ImportError:
+ MySQLdb = None
def __virtual__():
- if not HAS_MYSQL:
- return False
- return True
+ '''
+ Confirm that a python mysql client is installed.
+ '''
+ return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else ''
class MySQLExtPillar(SqlBaseExtPillar):
diff --git a/salt/returners/mysql.py b/salt/returners/mysql.py
index af6698142b..85892cb06c 100644
--- a/salt/returners/mysql.py
+++ b/salt/returners/mysql.py
@@ -155,11 +155,24 @@ import salt.exceptions
# Import 3rd-party libs
from salt.ext import six
+
try:
+ # Trying to import MySQLdb
import MySQLdb
- HAS_MYSQL = True
+ import MySQLdb.cursors
+ import MySQLdb.converters
+ from MySQLdb.connections import OperationalError
except ImportError:
- HAS_MYSQL = False
+ try:
+ # MySQLdb import failed, try to import PyMySQL
+ import pymysql
+ pymysql.install_as_MySQLdb()
+ import MySQLdb
+ import MySQLdb.cursors
+ import MySQLdb.converters
+ from MySQLdb.err import OperationalError
+ except ImportError:
+ MySQLdb = None
log = logging.getLogger(__name__)
@@ -168,10 +181,10 @@ __virtualname__ = 'mysql'
def __virtual__():
- if not HAS_MYSQL:
- return False, 'Could not import mysql returner; ' \
- 'mysql python client is not installed.'
- return True
+ '''
+ Confirm that a python mysql client is installed.
+ '''
+ return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else ''
def _get_options(ret=None):
@@ -228,7 +241,7 @@ def _get_serv(ret=None, commit=False):
conn = __context__['mysql_returner_conn']
conn.ping()
connect = False
- except MySQLdb.connections.OperationalError as exc:
+ except OperationalError as exc:
log.debug('OperationalError on ping: %s', exc)
if connect:
@@ -254,7 +267,7 @@ def _get_serv(ret=None, commit=False):
__context__['mysql_returner_conn'] = conn
except TypeError:
pass
- except MySQLdb.connections.OperationalError as exc:
+ except OperationalError as exc:
raise salt.exceptions.SaltMasterError('MySQL returner could not connect to database: {exc}'.format(exc=exc))
cursor = conn.cursor()
diff --git a/tests/unit/pillar/test_mysql.py b/tests/unit/pillar/test_mysql.py
index a242eac1a1..f6a2d0a44b 100644
--- a/tests/unit/pillar/test_mysql.py
+++ b/tests/unit/pillar/test_mysql.py
@@ -12,7 +12,7 @@ import salt.pillar.mysql as mysql
@skipIf(NO_MOCK, NO_MOCK_REASON)
-@skipIf(not mysql.HAS_MYSQL, 'MySQL-python module not installed')
+@skipIf(mysql.MySQLdb is None, 'MySQL-python module not installed')
class MysqlPillarTestCase(TestCase):
maxDiff = None
MySQLdb = None
--
2.13.7
2.17.1

View File

@ -1,88 +0,0 @@
From 6e5f0fbbe3c232c7d5212d4fddfe52b5a5a71597 Mon Sep 17 00:00:00 2001
From: Michele Bologna <michele.bologna@suse.com>
Date: Thu, 14 Dec 2017 18:20:02 +0100
Subject: [PATCH] Feat: add grain for all FQDNs
This PR adds a grain named fqdns to the grains.
fqdns represents all the FQDNs known for the system on all available interfaces (excluding lo).
Note: hostname != FQDN
hostname is the UNIX name of the machine. A machine can have one and only one hostname.
FQDN is host.domain that resolves to an IP address that the machines is answering to.
A machine can have 1+ FQDNs.
Upstream PR:
https://github.com/saltstack/salt/pull/45060
---
salt/grains/core.py | 27 +++++++++++++++++++++++++++
tests/integration/modules/test_grains.py | 1 +
tests/unit/grains/test_core.py | 1 +
3 files changed, 29 insertions(+)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 8545d4368c..24de3cff6b 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1886,6 +1886,33 @@ def append_domain():
return grain
+def fqdns():
+ '''
+ Return all known FQDNs for the system by enumerating all interfaces and
+ then trying to reverse resolve them (excluding 'lo' interface).
+ '''
+ # Provides:
+ # fqdns
+
+ grains = {}
+ fqdns = set()
+
+ addresses = salt.utils.network.ip_addrs(include_loopback=False,
+ interface_data=_INTERFACES)
+ addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False,
+ interface_data=_INTERFACES))
+
+ for ip in addresses:
+ try:
+ fqdns.add(socket.gethostbyaddr(ip)[0])
+ except (socket.error, socket.herror,
+ socket.gaierror, socket.timeout) as e:
+ log.error("Exception during resolving address: " + str(e))
+
+ grains['fqdns'] = list(fqdns)
+ return grains
+
+
def ip_fqdn():
'''
Return ip address and FQDN grains
diff --git a/tests/integration/modules/test_grains.py b/tests/integration/modules/test_grains.py
index 616e07d455..dfa70afa03 100644
--- a/tests/integration/modules/test_grains.py
+++ b/tests/integration/modules/test_grains.py
@@ -51,6 +51,7 @@ class TestModulesGrains(ModuleCase):
'cpuarch',
'domain',
'fqdn',
+ 'fqdns',
'gid',
'groupname',
'host',
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index 54c8293dcf..616c62e658 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -7,6 +7,7 @@
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
+import socket
# Import Salt Testing Libs
try:
--
2.13.7

View File

@ -1,35 +0,0 @@
From b276ee7373e88d05c01912a9d9d3a44a5d17bab6 Mon Sep 17 00:00:00 2001
From: Daniel Wallace <danielwallace@gtmanfred.com>
Date: Mon, 13 Aug 2018 13:55:37 -0500
Subject: [PATCH] fix async call to process manager
---
salt/minion.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/salt/minion.py b/salt/minion.py
index 9c05a646ea..8b8fd797d1 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -923,7 +923,7 @@ class MinionManager(MinionBase):
install_zmq()
self.io_loop = ZMQDefaultLoop.current()
self.process_manager = ProcessManager(name='MultiMinionProcessManager')
- self.io_loop.spawn_callback(self.process_manager.run, **{'async': True}) # Tornado backward compat
+ self.io_loop.spawn_callback(self.process_manager.run, **{'asynchronous': True}) # Tornado backward compat
def __del__(self):
self.destroy()
@@ -1120,7 +1120,7 @@ class Minion(MinionBase):
time.sleep(sleep_time)
self.process_manager = ProcessManager(name='MinionProcessManager')
- self.io_loop.spawn_callback(self.process_manager.run, **{'async': True})
+ self.io_loop.spawn_callback(self.process_manager.run, **{'asynchronous': True})
# We don't have the proxy setup yet, so we can't start engines
# Engines need to be able to access __proxy__
if not salt.utils.platform.is_proxy():
--
2.17.1

View File

@ -1,72 +0,0 @@
From 5d12b612b1f7b05a13e7b8da02e50ec471a72187 Mon Sep 17 00:00:00 2001
From: Michele Bologna <michele.bologna@suse.com>
Date: Tue, 20 Mar 2018 19:27:36 +0100
Subject: [PATCH] Fix: decrease loglevel when unable to resolve addr
Upstream PR: https://github.com/saltstack/salt/pull/46575
It occurs that when the machine has multiple interfaces without an associated FQDN, Salt logs are polluted by this error.
Some examples:
```
caasp-admin:~ # uptime
09:08am up 0:13, 2 users, load average: 1.30, 1.37, 0.98
caasp-admin:~ # docker logs $(docker ps | grep salt-master | awk '{print $1}') 2>&1 | grep "Exception during resolving address" | wc -l
528
```
```
caasp-admin:~ # docker exec -it $(docker ps | grep salt-master | awk '{print $1}') salt '*' cmd.run uptime
b24f41eb4cc94624862ca0c9e8afcd15:
09:08am up 0:11, 0 users, load average: 1.26, 0.83, 0.40
admin:
09:08am up 0:13, 2 users, load average: 1.33, 1.37, 0.99
ba8c76af029043a39ba917f7ab2af796:
09:08am up 0:12, 0 users, load average: 0.84, 0.63, 0.32
7b7aa52158524556a0c46ae57569ce93:
09:08am up 0:11, 1 user, load average: 1.05, 0.77, 0.38
5ab0e18cbd084e9088a928a17edb86cb:
09:08am up 0:10, 0 users, load average: 0.12, 0.25, 0.20
1756c9cd9a9a402b91d8636400d1e512:
09:08am up 0:09, 0 users, load average: 0.12, 0.23, 0.14
ca:
09:08am up 0:13, 0 users, load average: 1.33, 1.37, 0.99
caasp-admin:~ # docker exec -it $(docker ps | grep salt-master | awk '{print $1}') salt '*' cmd.run "bash -c 'cat /var/log/salt/minion | grep \"Exception during resolving address\" | wc -l'"
admin:
63
ba8c76af029043a39ba917f7ab2af796:
47
5ab0e18cbd084e9088a928a17edb86cb:
55
7b7aa52158524556a0c46ae57569ce93:
59
b24f41eb4cc94624862ca0c9e8afcd15:
47
1756c9cd9a9a402b91d8636400d1e512:
59
ca:
25
```
This patch changes the log level of the exception to INFO, since the resolve-unable problem is not blocking.
---
salt/grains/core.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 24de3cff6b..c166a43d7c 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1907,7 +1907,7 @@ def fqdns():
fqdns.add(socket.gethostbyaddr(ip)[0])
except (socket.error, socket.herror,
socket.gaierror, socket.timeout) as e:
- log.error("Exception during resolving address: " + str(e))
+ log.info("Exception during resolving address: " + str(e))
grains['fqdns'] = list(fqdns)
return grains
--
2.13.7

View File

@ -1,26 +0,0 @@
From 9289e1607ebf6f397c027d4a6edcf35c59bd600c Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Wed, 6 Jun 2018 15:47:45 +0200
Subject: [PATCH] Fix deprecation warning (bsc#1095507)
---
salt/utils/thin.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/utils/thin.py b/salt/utils/thin.py
index e4b878eb19..b99e407583 100644
--- a/salt/utils/thin.py
+++ b/salt/utils/thin.py
@@ -546,7 +546,7 @@ def thin_sum(cachedir, form='sha1'):
thintar = gen_thin(cachedir)
code_checksum_path = os.path.join(cachedir, 'thin', 'code-checksum')
if os.path.isfile(code_checksum_path):
- with salt.utils.fopen(code_checksum_path, 'r') as fh:
+ with salt.utils.files.fopen(code_checksum_path, 'r') as fh:
code_checksum = "'{0}'".format(fh.read().strip())
else:
code_checksum = "'0'"
--
2.13.7

View File

@ -1,27 +0,0 @@
From 7bda1dcd4f14da55abe38b1739b1e46ad0f5213c Mon Sep 17 00:00:00 2001
From: Erik Johnson <palehose@gmail.com>
Date: Fri, 13 Apr 2018 11:25:24 -0500
Subject: [PATCH] Fix diffing binary files in file.get_diff (bsc#1098394)
---
salt/modules/file.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/salt/modules/file.py b/salt/modules/file.py
index 1b4b7e0e46..95bca7fb1b 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -5008,8 +5008,7 @@ def get_diff(file1,
*salt.utils.data.decode(args)
)
)
- return ret
- return ''
+ return ret
def manage_file(name,
--
2.13.7

View File

@ -1,66 +0,0 @@
From 88a99b5beeaa51eaf646eb92d8f546f65f654008 Mon Sep 17 00:00:00 2001
From: Daniel Wallace <gtmanfred@users.noreply.github.com>
Date: Wed, 25 Apr 2018 11:13:15 -0500
Subject: [PATCH] Fix for EC2 Rate Limit Failures
Fix for ec2 rate limit failures described here: https://bugzilla.suse.com/show_bug.cgi?id=1088888
---
salt/utils/aws.py | 22 ++++++++++++++++------
1 file changed, 16 insertions(+), 6 deletions(-)
diff --git a/salt/utils/aws.py b/salt/utils/aws.py
index 059450e7ca..912f1466ba 100644
--- a/salt/utils/aws.py
+++ b/salt/utils/aws.py
@@ -20,6 +20,7 @@ import hmac
import logging
import salt.config
import re
+import random
from salt.ext import six
# Import Salt libs
@@ -442,8 +443,9 @@ def query(params=None, setname=None, requesturl=None, location=None,
)
headers = {}
- attempts = 5
- while attempts > 0:
+ MAX_RETRIES = 6
+ attempts = 0
+ while attempts < MAX_RETRIES:
log.debug('AWS Request: %s', requesturl)
log.trace('AWS Request Parameters: %s', params_with_headers)
try:
@@ -461,15 +463,23 @@ def query(params=None, setname=None, requesturl=None, location=None,
# check to see if we should retry the query
err_code = data.get('Errors', {}).get('Error', {}).get('Code', '')
- if attempts > 0 and err_code and err_code in AWS_RETRY_CODES:
- attempts -= 1
+ if attempts < MAX_RETRIES and err_code and err_code in AWS_RETRY_CODES:
+ attempts += 1
log.error(
'AWS Response Status Code and Error: [%s %s] %s; '
'Attempts remaining: %s',
exc.response.status_code, exc, data, attempts
)
- # Wait a bit before continuing to prevent throttling
- time.sleep(2)
+ # backoff an exponential amount of time to throttle requests
+ # during "API Rate Exceeded" failures as suggested by the AWS documentation here:
+ # https://docs.aws.amazon.com/AWSEC2/latest/APIReference/query-api-troubleshooting.html
+ # and also here:
+ # https://docs.aws.amazon.com/general/latest/gr/api-retries.html
+ # Failure to implement this approach results in a failure rate of >30% when using salt-cloud with
+ # "--parallel" when creating 50 or more instances with a fixed delay of 2 seconds.
+ # A failure rate of >10% is observed when using the salt-api with an asyncronous client
+ # specified (runner_async).
+ time.sleep(random.uniform(1, 2**attempts))
continue
log.error(
--
2.13.7

View File

@ -1,81 +0,0 @@
From 826194be2a036fee80d3ca546822023416ac3a7d Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 21 Mar 2018 11:10:23 +0100
Subject: [PATCH] Fix for [Errno 0] Resolver Error 0 (no error)
(bsc#1087581)
* Lintfix: PEP8 ident
* Use proper levels of the error handling, use proper log formatting.
* Fix unit test for reversed fqdns return data
---
salt/grains/core.py | 19 ++++++++++++-------
tests/unit/grains/test_core.py | 22 ++++++++++++++++++++++
2 files changed, 34 insertions(+), 7 deletions(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index c166a43d7c..dc472a6c0a 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1898,16 +1898,21 @@ def fqdns():
fqdns = set()
addresses = salt.utils.network.ip_addrs(include_loopback=False,
- interface_data=_INTERFACES)
+ interface_data=_INTERFACES)
addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False,
- interface_data=_INTERFACES))
-
+ interface_data=_INTERFACES))
+ err_message = 'Exception during resolving address: %s'
for ip in addresses:
try:
- fqdns.add(socket.gethostbyaddr(ip)[0])
- except (socket.error, socket.herror,
- socket.gaierror, socket.timeout) as e:
- log.info("Exception during resolving address: " + str(e))
+ fqdns.add(socket.getfqdn(socket.gethostbyaddr(ip)[0]))
+ except socket.herror as err:
+ if err.errno == 0:
+ # No FQDN for this IP address, so we don't need to know this all the time.
+ log.debug("Unable to resolve address %s: %s", ip, err)
+ else:
+ log.error(err_message, err)
+ except (socket.error, socket.gaierror, socket.timeout) as err:
+ log.error(err_message, err)
grains['fqdns'] = list(fqdns)
return grains
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index 616c62e658..dd7d5b06f8 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -877,3 +877,25 @@ SwapTotal: 4789244 kB'''
osdata = {'kernel': 'test', }
ret = core._virtual(osdata)
self.assertEqual(ret['virtual'], virt)
+
+ @skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
+ @patch.object(salt.utils, 'is_windows', MagicMock(return_value=False))
+ @patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '5.6.7.8']))
+ @patch('salt.utils.network.ip_addrs6',
+ MagicMock(return_value=['fe80::a8b2:93ff:fe00:0', 'fe80::a8b2:93ff:dead:beef']))
+ @patch('salt.utils.network.socket.getfqdn', MagicMock(side_effect=lambda v: v)) # Just pass-through
+ def test_fqdns_return(self):
+ '''
+ test the return for a dns grain. test for issue:
+ https://github.com/saltstack/salt/issues/41230
+ '''
+ reverse_resolv_mock = [('foo.bar.baz', [], ['1.2.3.4']),
+ ('rinzler.evil-corp.com', [], ['5.6.7.8']),
+ ('foo.bar.baz', [], ['fe80::a8b2:93ff:fe00:0']),
+ ('bluesniff.foo.bar', [], ['fe80::a8b2:93ff:dead:beef'])]
+ ret = {'fqdns': ['bluesniff.foo.bar', 'foo.bar.baz', 'rinzler.evil-corp.com']}
+ with patch.object(socket, 'gethostbyaddr', side_effect=reverse_resolv_mock):
+ fqdns = core.fqdns()
+ self.assertIn('fqdns', fqdns)
+ self.assertEqual(len(fqdns['fqdns']), len(ret['fqdns']))
+ self.assertEqual(set(fqdns['fqdns']), set(ret['fqdns']))
--
2.13.7

View File

@ -1,35 +0,0 @@
From f0f63dc8dd5979b51db71cf759d4350da1078383 Mon Sep 17 00:00:00 2001
From: Jochen Breuer <jbreuer@suse.de>
Date: Wed, 13 Jun 2018 17:51:13 +0200
Subject: [PATCH] Fix for sorting of multi-version packages (bsc#1097174
and bsc#1097413)
---
salt/modules/rpm.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/salt/modules/rpm.py b/salt/modules/rpm.py
index 3683234f59..8e71992f81 100644
--- a/salt/modules/rpm.py
+++ b/salt/modules/rpm.py
@@ -9,6 +9,7 @@ import logging
import os
import re
import datetime
+from distutils.version import LooseVersion
# Import Salt libs
import salt.utils.decorators.path
@@ -609,7 +610,7 @@ def info(*packages, **kwargs):
# pick only latest versions
# (in case multiple packages installed, e.g. kernel)
ret = dict()
- for pkg_data in reversed(sorted(_ret, key=lambda x: x['edition'])):
+ for pkg_data in reversed(sorted(_ret, key=lambda x: LooseVersion(x['edition']))):
pkg_name = pkg_data.pop('name')
# Filter out GPG public keys packages
if pkg_name.startswith('gpg-pubkey'):
--
2.17.1

View File

@ -1,4 +1,4 @@
From 1c9cba3a397d53e399b82320507fb5141234c67f Mon Sep 17 00:00:00 2001
From 616750ad4b2b2b8d55d19b81500dbd4f0aba1f74 Mon Sep 17 00:00:00 2001
From: Jochen Breuer <jbreuer@suse.de>
Date: Thu, 6 Sep 2018 17:15:18 +0200
Subject: [PATCH] Fix for SUSE Expanded Support detection
@ -10,20 +10,17 @@ CentOS installation, this lead to SUSE ES being detected as CentOS.
This change also adds a check for redhat-release and then marks the
'lsb_distrib_id' as RedHat.
---
salt/grains/core.py | 13 +++++++++++--
1 file changed, 11 insertions(+), 2 deletions(-)
salt/grains/core.py | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index dc472a6c0a..a5c3a6a8cf 100644
index f0f1bd17c4..b2c1d475b0 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1593,8 +1593,17 @@ def os_data():
grains['lsb_distrib_codename'] = \
comps[3].replace('(', '').replace(')', '')
elif os.path.isfile('/etc/centos-release'):
- # CentOS Linux
- grains['lsb_distrib_id'] = 'CentOS'
+ log.trace('Parsing distrib info from /etc/centos-release')
@@ -1821,6 +1821,15 @@ def os_data():
log.trace('Parsing distrib info from /etc/centos-release')
# CentOS Linux
grains['lsb_distrib_id'] = 'CentOS'
+ # Maybe CentOS Linux; could also be SUSE Expanded Support.
+ # SUSE ES has both, centos-release and redhat-release.
+ if os.path.isfile('/etc/redhat-release'):
@ -33,11 +30,10 @@ index dc472a6c0a..a5c3a6a8cf 100644
+ # This is a SUSE Expanded Support Rhel installation
+ grains['lsb_distrib_id'] = 'RedHat'
+ break
+ grains.setdefault('lsb_distrib_id', 'CentOS')
with salt.utils.files.fopen('/etc/centos-release') as ifile:
for line in ifile:
# Need to pull out the version and codename
--
2.19.0
2.17.1

View File

@ -1,4 +1,4 @@
From 49f8f296edf4655e2be7e564745931692ae939b7 Mon Sep 17 00:00:00 2001
From 6747243babde058762428f9bdb0e3ef16402eadd Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 6 Nov 2018 16:38:54 +0000
@ -11,85 +11,29 @@ Test git ext_pillar across multiple repos using __env__
Remove unicode references
---
salt/utils/gitfs.py | 2 +-
tests/integration/pillar/test_git_pillar.py | 144 ++++++++++++++++++++
tests/support/gitfs.py | 66 ++++++++-
3 files changed, 209 insertions(+), 3 deletions(-)
tests/integration/pillar/test_git_pillar.py | 45 +++++++++++++++++++++
1 file changed, 45 insertions(+)
diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py
index 6963f40226..11af741e35 100644
--- a/salt/utils/gitfs.py
+++ b/salt/utils/gitfs.py
@@ -2975,7 +2975,7 @@ class GitPillar(GitBase):
if repo.env:
env = repo.env
else:
- env = 'base' if repo.branch == repo.base else repo.branch
+ env = 'base' if repo.branch == repo.base else repo.get_checkout_target()
if repo._mountpoint:
if self.link_mountpoint(repo):
self.pillar_dirs[repo.linkdir] = env
diff --git a/tests/integration/pillar/test_git_pillar.py b/tests/integration/pillar/test_git_pillar.py
index e97e720bab..e052782311 100644
index 5d9a374f6e..4a9553d1a1 100644
--- a/tests/integration/pillar/test_git_pillar.py
+++ b/tests/integration/pillar/test_git_pillar.py
@@ -358,6 +358,38 @@ class GitPythonMixin(object):
"available on the salt master"]}
@@ -1361,6 +1361,51 @@ class TestPygit2SSH(GitPillarSSHTestBase):
'nested_dict': {'master': True}}}
)
+ def test_includes_enabled_solves___env___with_mountpoint(self):
+ '''
+ Test with git_pillar_includes enabled and using "__env__" as the branch
+ name for the configured repositories.
+ The "gitinfo" repository contains top.sls file with a local reference
+ and also referencing external "nowhere.foo" which is provided by "webinfo"
+ repository mounted as "nowhere".
+ '''
+ ret = self.get_pillar('''\
+ file_ignore_regex: []
+ file_ignore_glob: []
+ git_pillar_provider: gitpython
+ cachedir: {cachedir}
+ extension_modules: {extmods}
+ ext_pillar:
+ - git:
+ - __env__ {url_extra_repo}:
+ - name: gitinfo
+ - __env__ {url}:
+ - name: webinfo
+ - mountpoint: nowhere
+ ''')
+ self.assertEqual(
+ ret,
+ {'branch': 'master',
+ 'motd': 'The force will be with you. Always.',
+ 'mylist': ['master'],
+ 'mydict': {'master': True,
+ 'nested_list': ['master'],
+ 'nested_dict': {'master': True}}}
+ )
+
@destructiveTest
@skipIf(NO_MOCK, NO_MOCK_REASON)
@@ -413,7 +445,12 @@ class TestGitPythonAuthenticatedHTTP(TestGitPythonHTTP, GitPythonMixin):
username=cls.username,
password=cls.password,
port=cls.nginx_port)
+ cls.url_extra_repo = 'http://{username}:{password}@127.0.0.1:{port}/extra_repo.git'.format(
+ username=cls.username,
+ password=cls.password,
+ port=cls.nginx_port)
cls.ext_opts['url'] = cls.url
+ cls.ext_opts['url_extra_repo'] = cls.url_extra_repo
cls.ext_opts['username'] = cls.username
cls.ext_opts['password'] = cls.password
@@ -1192,6 +1229,40 @@ class TestPygit2SSH(GitPillarSSHTestBase):
''')
self.assertEqual(ret, expected)
+ def test_includes_enabled_solves___env___with_mountpoint(self):
+@skipIf(NO_MOCK, NO_MOCK_REASON)
+@skipIf(_windows_or_mac(), 'minion is windows or mac')
+@skip_if_not_root
+@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} and libgit2 >= {1} required'.format(PYGIT2_MINVER, LIBGIT2_MINVER))
+@skipIf(not HAS_NGINX, 'nginx not present')
+@skipIf(not HAS_VIRTUALENV, 'virtualenv not present')
+class TestPygit2HTTP(GitPillarHTTPTestBase):
+ '''
+ Test git_pillar with pygit2 using SSH authentication
+ '''
+ def test_single_source(self):
+ '''
+ Test with git_pillar_includes enabled and using "__env__" as the branch
+ name for the configured repositories.
@ -123,205 +67,9 @@ index e97e720bab..e052782311 100644
+ 'nested_dict': {'master': True}}}
+ )
+
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(_windows_or_mac(), 'minion is windows or mac')
@@ -1439,6 +1510,38 @@ class TestPygit2HTTP(GitPillarHTTPTestBase):
''')
self.assertEqual(ret, expected)
+ def test_includes_enabled_solves___env___with_mountpoint(self):
+ '''
+ Test with git_pillar_includes enabled and using "__env__" as the branch
+ name for the configured repositories.
+ The "gitinfo" repository contains top.sls file with a local reference
+ and also referencing external "nowhere.foo" which is provided by "webinfo"
+ repository mounted as "nowhere".
+ '''
+ ret = self.get_pillar('''\
+ file_ignore_regex: []
+ file_ignore_glob: []
+ git_pillar_provider: pygit2
+ cachedir: {cachedir}
+ extension_modules: {extmods}
+ ext_pillar:
+ - git:
+ - __env__ {url_extra_repo}:
+ - name: gitinfo
+ - __env__ {url}:
+ - name: webinfo
+ - mountpoint: nowhere
+ ''')
+ self.assertEqual(
+ ret,
+ {'branch': 'master',
+ 'motd': 'The force will be with you. Always.',
+ 'mylist': ['master'],
+ 'mydict': {'master': True,
+ 'nested_list': ['master'],
+ 'nested_dict': {'master': True}}}
+ )
+
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(_windows_or_mac(), 'minion is windows or mac')
@@ -1887,3 +1990,44 @@ class TestPygit2AuthenticatedHTTP(GitPillarHTTPTestBase):
- env: base
''')
self.assertEqual(ret, expected)
+
+ def test_includes_enabled_solves___env___with_mountpoint(self):
+ '''
+ Test with git_pillar_includes enabled and using "__env__" as the branch
+ name for the configured repositories.
+ The "gitinfo" repository contains top.sls file with a local reference
+ and also referencing external "nowhere.foo" which is provided by "webinfo"
+ repository mounted as "nowhere".
+ '''
+ ret = self.get_pillar('''\
+ file_ignore_regex: []
+ file_ignore_glob: []
+ git_pillar_provider: pygit2
+ git_pillar_user: {user}
+ git_pillar_password: {password}
+ git_pillar_insecure_auth: True
+ cachedir: {cachedir}
+ extension_modules: {extmods}
+ ext_pillar:
+ - git:
+ - __env__ {url_extra_repo}:
+ - name: gitinfo
+ - user: {user}
+ - password: {password}
+ - insecure_auth: True
+ - __env__ {url}:
+ - name: webinfo
+ - mountpoint: nowhere
+ - user: {user}
+ - password: {password}
+ - insecure_auth: True
+ ''')
+ self.assertEqual(
+ ret,
+ {'branch': 'master',
+ 'motd': 'The force will be with you. Always.',
+ 'mylist': ['master'],
+ 'mydict': {'master': True,
+ 'nested_list': ['master'],
+ 'nested_dict': {'master': True}}}
+ )
diff --git a/tests/support/gitfs.py b/tests/support/gitfs.py
index 2afd31539d..e645c50a86 100644
--- a/tests/support/gitfs.py
+++ b/tests/support/gitfs.py
@@ -133,9 +133,13 @@ class SSHDMixin(ModuleCase, ProcessManager, SaltReturnAssertsMixin):
cls.url = 'ssh://{username}@127.0.0.1:{port}/~/repo.git'.format(
username=cls.username,
port=cls.sshd_port)
+ cls.url_extra_repo = 'ssh://{username}@127.0.0.1:{port}/~/extra_repo.git'.format(
+ username=cls.username,
+ port=cls.sshd_port)
home = '/root/.ssh'
cls.ext_opts = {
'url': cls.url,
+ 'url_extra_repo': cls.url_extra_repo,
'privkey_nopass': os.path.join(home, cls.id_rsa_nopass),
'pubkey_nopass': os.path.join(home, cls.id_rsa_nopass + '.pub'),
'privkey_withpass': os.path.join(home, cls.id_rsa_withpass),
@@ -193,7 +197,8 @@ class WebserverMixin(ModuleCase, ProcessManager, SaltReturnAssertsMixin):
# get_unused_localhost_port() return identical port numbers.
cls.uwsgi_port = get_unused_localhost_port()
cls.url = 'http://127.0.0.1:{port}/repo.git'.format(port=cls.nginx_port)
- cls.ext_opts = {'url': cls.url}
+ cls.url_extra_repo = 'http://127.0.0.1:{port}/extra_repo.git'.format(port=cls.nginx_port)
+ cls.ext_opts = {'url': cls.url, 'url_extra_repo': cls.url_extra_repo}
# Add auth params if present (if so this will trigger the spawned
# server to turn on HTTP basic auth).
for credential_param in ('user', 'password'):
@@ -250,7 +255,7 @@ class GitTestBase(ModuleCase):
Base class for all gitfs/git_pillar tests. Must be subclassed and paired
with either SSHDMixin or WebserverMixin to provide the server.
'''
- case = port = bare_repo = admin_repo = None
+ case = port = bare_repo = base_extra_repo = admin_repo = admin_extra_repo = None
maxDiff = None
git_opts = '-c user.name="Foo Bar" -c user.email=foo@bar.com'
ext_opts = {}
@@ -465,6 +470,61 @@ class GitPillarTestBase(GitTestBase, LoaderModuleMockMixin):
'''))
_push('top_only', 'add top_only branch')
+ def make_extra_repo(self, root_dir, user='root'):
+ self.bare_extra_repo = os.path.join(root_dir, 'extra_repo.git')
+ self.admin_extra_repo = os.path.join(root_dir, 'admin_extra')
+
+ for dirname in (self.bare_extra_repo, self.admin_extra_repo):
+ shutil.rmtree(dirname, ignore_errors=True)
+
+ # Create bare extra repo
+ self.run_function(
+ 'git.init',
+ [self.bare_extra_repo],
+ user=user,
+ bare=True)
+
+ # Clone bare repo
+ self.run_function(
+ 'git.clone',
+ [self.admin_extra_repo],
+ url=self.bare_extra_repo,
+ user=user)
+
+ def _push(branch, message):
+ self.run_function(
+ 'git.add',
+ [self.admin_extra_repo, '.'],
+ user=user)
+ self.run_function(
+ 'git.commit',
+ [self.admin_extra_repo, message],
+ user=user,
+ git_opts=self.git_opts,
+ )
+ self.run_function(
+ 'git.push',
+ [self.admin_extra_repo],
+ remote='origin',
+ ref=branch,
+ user=user,
+ )
+
+ with salt.utils.files.fopen(
+ os.path.join(self.admin_extra_repo, 'top.sls'), 'w') as fp_:
+ fp_.write(textwrap.dedent('''\
+ "{{saltenv}}":
+ '*':
+ - motd
+ - nowhere.foo
+ '''))
+ with salt.utils.files.fopen(
+ os.path.join(self.admin_extra_repo, 'motd.sls'), 'w') as fp_:
+ fp_.write(textwrap.dedent('''\
+ motd: The force will be with you. Always.
+ '''))
+ _push('master', 'initial commit')
+
class GitPillarSSHTestBase(GitPillarTestBase, SSHDMixin):
'''
@@ -533,6 +593,7 @@ class GitPillarSSHTestBase(GitPillarTestBase, SSHDMixin):
)
)
self.make_repo(root_dir, user=self.username)
+ self.make_extra_repo(root_dir, user=self.username)
def get_pillar(self, ext_pillar_conf):
@requires_system_grains
def test_root_parameter(self, grains):
'''
@@ -579,3 +640,4 @@ class GitPillarHTTPTestBase(GitPillarTestBase, WebserverMixin):
self.spawn_server() # pylint: disable=E1120
self.make_repo(self.repo_dir)
+ self.make_extra_repo(self.repo_dir)
--
2.17.1

View File

@ -1,37 +0,0 @@
From 5502f05fac89330ab26d04e29d3aa6d36ab928c5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 20 Sep 2018 11:51:58 +0100
Subject: [PATCH] Fix index error when running on Python 3
Fix wrong queryformat for zypper list_provides
---
salt/modules/zypper.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 695bce4f4e..e4423cf1fc 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -2314,7 +2314,7 @@ def list_provides(**kwargs):
'''
ret = __context__.get('pkg.list_provides')
if not ret:
- cmd = ['rpm', '-qa', '--queryformat', '[%{PROVIDES}_|-%{NAME}\n]']
+ cmd = ['rpm', '-qa', '--queryformat', '%{PROVIDES}_|-%{NAME}\n']
ret = dict()
for line in __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False).splitlines():
provide, realname = line.split('_|-')
@@ -2379,7 +2379,7 @@ def resolve_capabilities(pkgs, refresh, **kwargs):
try:
result = search(name, provides=True, match='exact')
if len(result) == 1:
- name = result.keys()[0]
+ name = next(iter(result.keys()))
elif len(result) > 1:
log.warn("Found ambiguous match for capability '%s'.", pkg)
except CommandExecutionError as exc:
--
2.17.1

View File

@ -1,4 +1,4 @@
From 0509f0b0f1e880e7651e2a33cf5b70ef1930a3ff Mon Sep 17 00:00:00 2001
From b6d47a2ca7f1bed902dfc6574e6fe91d3034aa29 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 28 Sep 2018 15:22:33 +0200
Subject: [PATCH] Fix IPv6 scope (bsc#1108557)
@ -69,296 +69,17 @@ Lintfix: W0611
Reverse skipping tests: if no ipaddress
---
salt/_compat.py | 287 +++++++++++++++++++++++------
salt/cloud/clouds/saltify.py | 5 +-
salt/cloud/clouds/vagrant.py | 9 +-
salt/ext/win_inet_pton.py | 2 +-
salt/minion.py | 5 +-
salt/modules/ipset.py | 5 +-
salt/modules/network.py | 5 +-
salt/modules/vagrant.py | 6 +-
salt/utils/dns.py | 11 +-
salt/utils/minions.py | 5 +-
tests/unit/grains/test_core.py | 5 +-
tests/unit/modules/test_network.py | 15 +-
12 files changed, 245 insertions(+), 115 deletions(-)
salt/_compat.py | 74 +++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 74 insertions(+)
diff --git a/salt/_compat.py b/salt/_compat.py
index 9b10646ace..0576210afc 100644
index c10b82c0c2..8628833dcf 100644
--- a/salt/_compat.py
+++ b/salt/_compat.py
@@ -2,18 +2,21 @@
'''
Salt compatibility code
'''
-# pylint: disable=import-error,unused-import,invalid-name
+# pylint: disable=import-error,unused-import,invalid-name,W0231,W0233
# Import python libs
-from __future__ import absolute_import
+from __future__ import absolute_import, unicode_literals, print_function
import sys
import types
+import logging
# Import 3rd-party libs
-from salt.ext.six import binary_type, string_types, text_type
+from salt.exceptions import SaltException
+from salt.ext.six import binary_type, string_types, text_type, integer_types
from salt.ext.six.moves import cStringIO, StringIO
-HAS_XML = True
+log = logging.getLogger(__name__)
+
try:
# Python >2.5
import xml.etree.cElementTree as ElementTree
@@ -31,11 +34,10 @@ except Exception:
import elementtree.ElementTree as ElementTree
except Exception:
ElementTree = None
- HAS_XML = False
@@ -229,7 +229,81 @@ class IPv6InterfaceScoped(ipaddress.IPv6Interface, IPv6AddressScoped):
self.hostmask = self.network.hostmask
# True if we are running on Python 3.
-PY3 = sys.version_info[0] == 3
+PY3 = sys.version_info.major == 3
if PY3:
@@ -45,13 +47,12 @@ else:
import exceptions
-if HAS_XML:
+if ElementTree is not None:
if not hasattr(ElementTree, 'ParseError'):
class ParseError(Exception):
'''
older versions of ElementTree do not have ParseError
'''
- pass
ElementTree.ParseError = ParseError
@@ -61,9 +62,7 @@ def text_(s, encoding='latin-1', errors='strict'):
If ``s`` is an instance of ``binary_type``, return
``s.decode(encoding, errors)``, otherwise return ``s``
'''
- if isinstance(s, binary_type):
- return s.decode(encoding, errors)
- return s
+ return s.decode(encoding, errors) if isinstance(s, binary_type) else s
def bytes_(s, encoding='latin-1', errors='strict'):
@@ -71,57 +70,37 @@ def bytes_(s, encoding='latin-1', errors='strict'):
If ``s`` is an instance of ``text_type``, return
``s.encode(encoding, errors)``, otherwise return ``s``
'''
- if isinstance(s, text_type):
- return s.encode(encoding, errors)
- return s
+ return s.encode(encoding, errors) if isinstance(s, text_type) else s
-if PY3:
- def ascii_native_(s):
- if isinstance(s, text_type):
- s = s.encode('ascii')
- return str(s, 'ascii', 'strict')
-else:
- def ascii_native_(s):
- if isinstance(s, text_type):
- s = s.encode('ascii')
- return str(s)
+def ascii_native_(s):
+ '''
+ Python 3: If ``s`` is an instance of ``text_type``, return
+ ``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')``
-ascii_native_.__doc__ = '''
-Python 3: If ``s`` is an instance of ``text_type``, return
-``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')``
+ Python 2: If ``s`` is an instance of ``text_type``, return
+ ``s.encode('ascii')``, otherwise return ``str(s)``
+ '''
+ if isinstance(s, text_type):
+ s = s.encode('ascii')
-Python 2: If ``s`` is an instance of ``text_type``, return
-``s.encode('ascii')``, otherwise return ``str(s)``
-'''
+ return str(s, 'ascii', 'strict') if PY3 else s
-if PY3:
- def native_(s, encoding='latin-1', errors='strict'):
- '''
- If ``s`` is an instance of ``text_type``, return
- ``s``, otherwise return ``str(s, encoding, errors)``
- '''
- if isinstance(s, text_type):
- return s
- return str(s, encoding, errors)
-else:
- def native_(s, encoding='latin-1', errors='strict'):
- '''
- If ``s`` is an instance of ``text_type``, return
- ``s.encode(encoding, errors)``, otherwise return ``str(s)``
- '''
- if isinstance(s, text_type):
- return s.encode(encoding, errors)
- return str(s)
+def native_(s, encoding='latin-1', errors='strict'):
+ '''
+ Python 3: If ``s`` is an instance of ``text_type``, return ``s``, otherwise
+ return ``str(s, encoding, errors)``
-native_.__doc__ = '''
-Python 3: If ``s`` is an instance of ``text_type``, return ``s``, otherwise
-return ``str(s, encoding, errors)``
+ Python 2: If ``s`` is an instance of ``text_type``, return
+ ``s.encode(encoding, errors)``, otherwise return ``str(s)``
+ '''
+ if PY3:
+ out = s if isinstance(s, text_type) else str(s, encoding, errors)
+ else:
+ out = s.encode(encoding, errors) if isinstance(s, text_type) else str(s)
-Python 2: If ``s`` is an instance of ``text_type``, return
-``s.encode(encoding, errors)``, otherwise return ``str(s)``
-'''
+ return out
def string_io(data=None): # cStringIO can't handle unicode
@@ -133,7 +112,199 @@ def string_io(data=None): # cStringIO can't handle unicode
except (UnicodeEncodeError, TypeError):
return StringIO(data)
-if PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+
+try:
+ if PY3:
+ import ipaddress
+ else:
+ import salt.ext.ipaddress as ipaddress
+except ImportError:
+ ipaddress = None
+
+
+class IPv6AddressScoped(ipaddress.IPv6Address):
+ '''
+ Represent and manipulate single IPv6 Addresses.
+ Scope-aware version
+ '''
+ def __init__(self, address):
+ '''
+ Instantiate a new IPv6 address object. Scope is moved to an attribute 'scope'.
+
+ Args:
+ address: A string or integer representing the IP
+
+ Additionally, an integer can be passed, so
+ IPv6Address('2001:db8::') == IPv6Address(42540766411282592856903984951653826560)
+ or, more generally
+ IPv6Address(int(IPv6Address('2001:db8::'))) == IPv6Address('2001:db8::')
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+
+ :param address:
+ '''
+ # pylint: disable-all
+ if not hasattr(self, '_is_packed_binary'):
+ # This method (below) won't be around for some Python 3 versions
+ # and we need check this differently anyway
+ self._is_packed_binary = lambda p: isinstance(p, bytes)
+ # pylint: enable-all
+
+ if isinstance(address, string_types) and '%' in address:
+ buff = address.split('%')
+ if len(buff) != 2:
+ raise SaltException('Invalid IPv6 address: "{}"'.format(address))
+ address, self.__scope = buff
+ else:
+ self.__scope = None
+
+ if sys.version_info.major == 2:
+ ipaddress._BaseAddress.__init__(self, address)
+ ipaddress._BaseV6.__init__(self, address)
+ else:
+ # Python 3.4 fix. Versions higher are simply not affected
+ # https://github.com/python/cpython/blob/3.4/Lib/ipaddress.py#L543-L544
+ self._version = 6
+ self._max_prefixlen = ipaddress.IPV6LENGTH
+
+ # Efficient constructor from integer.
+ if isinstance(address, integer_types):
+ self._check_int_address(address)
+ self._ip = address
+ elif self._is_packed_binary(address):
+ self._check_packed_address(address, 16)
+ self._ip = ipaddress._int_from_bytes(address, 'big')
+ else:
+ address = str(address)
+ if '/' in address:
+ raise ipaddress.AddressValueError("Unexpected '/' in {}".format(address))
+ self._ip = self._ip_int_from_string(address)
+
+ def _is_packed_binary(self, data):
+ '''
+ Check if data is hexadecimal packed
+
+ :param data:
+ :return:
+ '''
+ packed = False
+ if len(data) == 16 and ':' not in data:
+ try:
+ packed = bool(int(str(bytearray(data)).encode('hex'), 16))
+ except ValueError:
+ pass
+
+ return packed
+
+ @property
+ def scope(self):
+ '''
+ Return scope of IPv6 address.
+
+ :return:
+ '''
+ return self.__scope
+
+ def __str__(self):
+ return text_type(self._string_from_ip_int(self._ip) +
+ ('%' + self.scope if self.scope is not None else ''))
+
+
+class IPv6InterfaceScoped(ipaddress.IPv6Interface, IPv6AddressScoped):
+ '''
+ Update
+ '''
+ def __init__(self, address):
+ if isinstance(address, (bytes, int)):
+ IPv6AddressScoped.__init__(self, address)
+ self.network = ipaddress.IPv6Network(self._ip)
+ self._prefixlen = self._max_prefixlen
+ return
+
+ addr = ipaddress._split_optional_netmask(address)
+ IPv6AddressScoped.__init__(self, addr[0])
+ self.network = ipaddress.IPv6Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self._prefixlen = self.network._prefixlen
+ self.hostmask = self.network.hostmask
+
+
+def ip_address(address):
+ """Take an IP string/int and return an object of the correct type.
+
@ -431,229 +152,13 @@ index 9b10646ace..0576210afc 100644
+ raise ValueError('{} does not appear to be an IPv4 or IPv6 interface'.format(address))
+
+
+if ipaddress:
+ ipaddress.IPv6Address = IPv6AddressScoped
+ if sys.version_info.major == 2:
+ ipaddress.IPv6Interface = IPv6InterfaceScoped
if ipaddress:
ipaddress.IPv6Address = IPv6AddressScoped
if sys.version_info.major == 2:
ipaddress.IPv6Interface = IPv6InterfaceScoped
+ ipaddress.ip_address = ip_address
+ ipaddress.ip_interface = ip_interface
diff --git a/salt/cloud/clouds/saltify.py b/salt/cloud/clouds/saltify.py
index c9cc281b42..e0e56349a0 100644
--- a/salt/cloud/clouds/saltify.py
+++ b/salt/cloud/clouds/saltify.py
@@ -27,10 +27,7 @@ import salt.utils.cloud
import salt.config as config
import salt.client
import salt.ext.six as six
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
from salt.exceptions import SaltCloudException, SaltCloudSystemExit
diff --git a/salt/cloud/clouds/vagrant.py b/salt/cloud/clouds/vagrant.py
index a24170c78a..0fe410eb91 100644
--- a/salt/cloud/clouds/vagrant.py
+++ b/salt/cloud/clouds/vagrant.py
@@ -25,13 +25,8 @@ import tempfile
import salt.utils
import salt.config as config
import salt.client
-import salt.ext.six as six
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
-from salt.exceptions import SaltCloudException, SaltCloudSystemExit, \
- SaltInvocationError
+from salt._compat import ipaddress
+from salt.exceptions import SaltCloudException, SaltCloudSystemExit, SaltInvocationError
# Get logging started
log = logging.getLogger(__name__)
diff --git a/salt/ext/win_inet_pton.py b/salt/ext/win_inet_pton.py
index 1204bede10..89aba14ce9 100644
--- a/salt/ext/win_inet_pton.py
+++ b/salt/ext/win_inet_pton.py
@@ -9,7 +9,7 @@ from __future__ import absolute_import
import socket
import ctypes
import os
-import ipaddress
+from salt._compat import ipaddress
import salt.ext.six as six
diff --git a/salt/minion.py b/salt/minion.py
index 17e11c0ebe..9c05a646ea 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -26,10 +26,7 @@ from binascii import crc32
# Import Salt Libs
# pylint: disable=import-error,no-name-in-module,redefined-builtin
from salt.ext import six
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
from salt.ext.six.moves import range
from salt.utils.zeromq import zmq, ZMQDefaultLoop, install_zmq, ZMQ_VERSION_INFO
diff --git a/salt/modules/ipset.py b/salt/modules/ipset.py
index 7047e84c29..1a0fa0044d 100644
--- a/salt/modules/ipset.py
+++ b/salt/modules/ipset.py
@@ -13,10 +13,7 @@ from salt.ext.six.moves import map, range
import salt.utils.path
# Import third-party libs
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
# Set up logging
log = logging.getLogger(__name__)
diff --git a/salt/modules/network.py b/salt/modules/network.py
index 92893572a6..60f586f6bc 100644
--- a/salt/modules/network.py
+++ b/salt/modules/network.py
@@ -26,10 +26,7 @@ from salt.exceptions import CommandExecutionError
# Import 3rd-party libs
from salt.ext import six
from salt.ext.six.moves import range # pylint: disable=import-error,no-name-in-module,redefined-builtin
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
log = logging.getLogger(__name__)
diff --git a/salt/modules/vagrant.py b/salt/modules/vagrant.py
index 0592dede55..0f518c2602 100644
--- a/salt/modules/vagrant.py
+++ b/salt/modules/vagrant.py
@@ -39,11 +39,7 @@ import salt.utils.path
import salt.utils.stringutils
from salt.exceptions import CommandExecutionError, SaltInvocationError
import salt.ext.six as six
-
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
log = logging.getLogger(__name__)
diff --git a/salt/utils/dns.py b/salt/utils/dns.py
index db08bcb7ac..40011016fd 100644
--- a/salt/utils/dns.py
+++ b/salt/utils/dns.py
@@ -1029,18 +1029,13 @@ def parse_resolv(src='/etc/resolv.conf'):
try:
(directive, arg) = (line[0].lower(), line[1:])
# Drop everything after # or ; (comments)
- arg = list(itertools.takewhile(
- lambda x: x[0] not in ('#', ';'), arg))
-
+ arg = list(itertools.takewhile(lambda x: x[0] not in ('#', ';'), arg))
if directive == 'nameserver':
- # Split the scope (interface) if it is present
- addr, scope = arg[0].split('%', 1) if '%' in arg[0] else (arg[0], '')
+ addr = arg[0]
try:
ip_addr = ipaddress.ip_address(addr)
version = ip_addr.version
- # Rejoin scope after address validation
- if scope:
- ip_addr = '%'.join((str(ip_addr), scope))
+ ip_addr = str(ip_addr)
if ip_addr not in nameservers:
nameservers.append(ip_addr)
if version == 4 and ip_addr not in ip4_nameservers:
diff --git a/salt/utils/minions.py b/salt/utils/minions.py
index bb0cbaa589..f282464eee 100644
--- a/salt/utils/minions.py
+++ b/salt/utils/minions.py
@@ -26,10 +26,7 @@ import salt.cache
from salt.ext import six
# Import 3rd-party libs
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
HAS_RANGE = False
try:
import seco.range # pylint: disable=import-error
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index dd7d5b06f8..e973428add 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -32,10 +32,7 @@ import salt.grains.core as core
# Import 3rd-party libs
from salt.ext import six
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
log = logging.getLogger(__name__)
diff --git a/tests/unit/modules/test_network.py b/tests/unit/modules/test_network.py
index 865f15f3e3..50fa629276 100644
--- a/tests/unit/modules/test_network.py
+++ b/tests/unit/modules/test_network.py
@@ -20,20 +20,11 @@ from tests.support.mock import (
)
# Import Salt Libs
-from salt.ext import six
import salt.utils.network
import salt.utils.path
import salt.modules.network as network
from salt.exceptions import CommandExecutionError
-if six.PY2:
- import salt.ext.ipaddress as ipaddress
- HAS_IPADDRESS = True
-else:
- try:
- import ipaddress
- HAS_IPADDRESS = True
- except ImportError:
- HAS_IPADDRESS = False
+from salt._compat import ipaddress
@skipIf(NO_MOCK, NO_MOCK_REASON)
@@ -278,7 +269,7 @@ class NetworkTestCase(TestCase, LoaderModuleMockMixin):
self.assertDictEqual(network.connect('host', 'port'),
{'comment': ret, 'result': True})
- @skipIf(HAS_IPADDRESS is False, 'unable to import \'ipaddress\'')
+ @skipIf(not bool(ipaddress), 'unable to import \'ipaddress\'')
def test_is_private(self):
'''
Test for Check if the given IP address is a private address
@@ -290,7 +281,7 @@ class NetworkTestCase(TestCase, LoaderModuleMockMixin):
return_value=True):
self.assertTrue(network.is_private('::1'))
- @skipIf(HAS_IPADDRESS is False, 'unable to import \'ipaddress\'')
+ @skipIf(not bool(ipaddress), 'unable to import \'ipaddress\'')
def test_is_loopback(self):
'''
Test for Check if the given IP address is a loopback address
--
2.19.0
2.20.1

View File

@ -1,4 +1,4 @@
From 2916f2f3e7c6af07148863281ffaf07df21f21da Mon Sep 17 00:00:00 2001
From 3be2bb0043f15af468f1db33b1aa1cc6f2e5797d Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 9 Jan 2019 16:08:19 +0100
Subject: [PATCH] Fix issue #2068 test
@ -13,7 +13,7 @@ Minor update: more correct is-dict check.
1 file changed, 6 insertions(+), 3 deletions(-)
diff --git a/salt/state.py b/salt/state.py
index b4b2a00601..815ebaec24 100644
index 91985c8edc..01ec1faf8b 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -25,6 +25,7 @@ import traceback
@ -24,7 +24,7 @@ index b4b2a00601..815ebaec24 100644
# Import salt libs
import salt.loader
@@ -2743,16 +2744,18 @@ class State(object):
@@ -2776,16 +2777,18 @@ class State(object):
'''
for chunk in high:
state = high[chunk]

View File

@ -1,941 +0,0 @@
From 140388e51e5b5b7ee33b776269bce67046cce32f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 4 Dec 2018 16:16:18 +0000
Subject: [PATCH] Fix latin1 encoding problems on file module
(bsc#1116837)
_get_line_indent renamed to _set_line_indent
_regex_to_static refactored to work on lists
line function refactored to work on list
Added _set_line_eol and _get_eol functions
Setting end of line
Make tests green
test_line_insert_end fixed
/sr.* pattern should raise exception
file.line function refactored
Make integration green. Added test for mode ensure insert before first line
Fixed file permissions
Removed regex compilation
Comprehensions converting to unicode replaced by salt.utils.data.decode_list
Empty match on delete or replace not causing IndexError exception
List comprehension replaced
Added comments
Add get_diff to salt.utils.stringutils
Make to_unicode/to_str/to_bytes helpers attempt latin-1
Also allow for multiple encodings to be passed
Use new get_diff helper in file module
Use BASE_FILES instead of redundant STATE_DIR
Add integration test for latin-1 file diffs
PY3 scoping fix
In PY3 the caught exceptions now drop out of scope when leaving the for
loop.
Add unit test for latin-1 fallback, multi-encoding
Skip pylint false-positives
Fix incorrect use of __salt__ when __utils__ is needed
Add stringutils.get_diff to mocks
Only try latin-1 from get_diff instead of by default
Fix to_unicode test
Since latin-1 is not being automatically decoded, we need to explicitly
pass it on the test.
Revert "Use BASE_FILES instead of redundant STATE_DIR"
This reverts commit ba524c81b6ae6091259157cec1259f5a7fb776c0.
---
salt/modules/file.py | 224 +++++++++---------
salt/modules/win_file.py | 14 +-
salt/utils/stringutils.py | 118 ++++++---
.../files/file/base/issue-48777/new.html | 5 +
.../files/file/base/issue-48777/old.html | 4 +
tests/integration/states/test_file.py | 23 ++
tests/unit/modules/test_file.py | 102 +++++++-
tests/unit/utils/test_stringutils.py | 14 ++
8 files changed, 348 insertions(+), 156 deletions(-)
create mode 100644 tests/integration/files/file/base/issue-48777/new.html
create mode 100644 tests/integration/files/file/base/issue-48777/old.html
diff --git a/salt/modules/file.py b/salt/modules/file.py
index 1b4b7e0e46..1ad0fef1ea 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -12,7 +12,6 @@ from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import datetime
-import difflib
import errno
import fileinput
import fnmatch
@@ -61,6 +60,7 @@ import salt.utils.stringutils
import salt.utils.templates
import salt.utils.url
import salt.utils.user
+import salt.utils.data
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError, get_error_message as _get_error_message
from salt.utils.files import HASHES, HASHES_REVMAP
@@ -1570,7 +1570,7 @@ def comment_line(path,
check_perms(path, None, pre_user, pre_group, pre_mode)
# Return a diff using the two dictionaries
- return ''.join(difflib.unified_diff(orig_file, new_file))
+ return __utils__['stringutils.get_diff'](orig_file, new_file)
def _get_flags(flags):
@@ -1722,18 +1722,19 @@ def _regex_to_static(src, regex):
return None
try:
- src = re.search(regex, src, re.M)
+ compiled = re.compile(regex, re.DOTALL)
+ src = [line for line in src if compiled.search(line) or line.count(regex)]
except Exception as ex:
raise CommandExecutionError("{0}: '{1}'".format(_get_error_message(ex), regex))
- return src and src.group().rstrip('\r') or regex
+ return src and src or []
-def _assert_occurrence(src, probe, target, amount=1):
+def _assert_occurrence(probe, target, amount=1):
'''
Raise an exception, if there are different amount of specified occurrences in src.
'''
- occ = src.count(probe)
+ occ = len(probe)
if occ > amount:
msg = 'more than'
elif occ < amount:
@@ -1749,7 +1750,7 @@ def _assert_occurrence(src, probe, target, amount=1):
return occ
-def _get_line_indent(src, line, indent):
+def _set_line_indent(src, line, indent):
'''
Indent the line with the source line.
'''
@@ -1762,7 +1763,36 @@ def _get_line_indent(src, line, indent):
break
idt.append(c)
- return ''.join(idt) + line.strip()
+ return ''.join(idt) + line.lstrip()
+
+
+def _get_eol(line):
+ match = re.search('((?<!\r)\n|\r(?!\n)|\r\n)$', line)
+ return match and match.group() or ''
+
+
+def _set_line_eol(src, line):
+ '''
+ Add line ending
+ '''
+ line_ending = _get_eol(src) or os.linesep
+ return line.rstrip() + line_ending
+
+
+def _insert_line_before(idx, body, content, indent):
+ if not idx or (idx and _starts_till(body[idx - 1], content) < 0):
+ cnd = _set_line_indent(body[idx], content, indent)
+ body.insert(idx, cnd)
+ return body
+
+
+def _insert_line_after(idx, body, content, indent):
+ # No duplicates or append, if "after" is the last line
+ next_line = idx + 1 < len(body) and body[idx + 1] or None
+ if next_line is None or _starts_till(next_line, content) < 0:
+ cnd = _set_line_indent(body[idx], content, indent)
+ body.insert(idx + 1, cnd)
+ return body
def line(path, content=None, match=None, mode=None, location=None,
@@ -1893,132 +1923,110 @@ def line(path, content=None, match=None, mode=None, location=None,
match = content
with salt.utils.files.fopen(path, mode='r') as fp_:
- body = salt.utils.stringutils.to_unicode(fp_.read())
- body_before = hashlib.sha256(salt.utils.stringutils.to_bytes(body)).hexdigest()
+ body = salt.utils.data.decode_list(fp_.readlines())
+ body_before = hashlib.sha256(salt.utils.stringutils.to_bytes(''.join(body))).hexdigest()
+ # Add empty line at the end if last line ends with eol.
+ # Allows simpler code
+ if body and _get_eol(body[-1]):
+ body.append('')
+
after = _regex_to_static(body, after)
before = _regex_to_static(body, before)
match = _regex_to_static(body, match)
if os.stat(path).st_size == 0 and mode in ('delete', 'replace'):
log.warning('Cannot find text to {0}. File \'{1}\' is empty.'.format(mode, path))
- body = ''
- elif mode == 'delete':
- body = os.linesep.join([line for line in body.split(os.linesep) if line.find(match) < 0])
- elif mode == 'replace':
- body = os.linesep.join([(_get_line_indent(file_line, content, indent)
- if (file_line.find(match) > -1 and not file_line == content) else file_line)
- for file_line in body.split(os.linesep)])
+ body = []
+ elif mode == 'delete' and match:
+ body = [line for line in body if line != match[0]]
+ elif mode == 'replace' and match:
+ idx = body.index(match[0])
+ file_line = body.pop(idx)
+ body.insert(idx, _set_line_indent(file_line, content, indent))
elif mode == 'insert':
if not location and not before and not after:
raise CommandExecutionError('On insert must be defined either "location" or "before/after" conditions.')
if not location:
if before and after:
- _assert_occurrence(body, before, 'before')
- _assert_occurrence(body, after, 'after')
+ _assert_occurrence(before, 'before')
+ _assert_occurrence(after, 'after')
+
out = []
- lines = body.split(os.linesep)
in_range = False
- for line in lines:
- if line.find(after) > -1:
+ for line in body:
+ if line == after[0]:
in_range = True
- elif line.find(before) > -1 and in_range:
- out.append(_get_line_indent(line, content, indent))
+ elif line == before[0] and in_range:
+ cnd = _set_line_indent(line, content, indent)
+ out.append(cnd)
out.append(line)
- body = os.linesep.join(out)
+ body = out
if before and not after:
- _assert_occurrence(body, before, 'before')
- out = []
- lines = body.split(os.linesep)
- for idx in range(len(lines)):
- _line = lines[idx]
- if _line.find(before) > -1:
- cnd = _get_line_indent(_line, content, indent)
- if not idx or (idx and _starts_till(lines[idx - 1], cnd) < 0): # Job for replace instead
- out.append(cnd)
- out.append(_line)
- body = os.linesep.join(out)
+ _assert_occurrence(before, 'before')
+
+ idx = body.index(before[0])
+ body = _insert_line_before(idx, body, content, indent)
elif after and not before:
- _assert_occurrence(body, after, 'after')
- out = []
- lines = body.split(os.linesep)
- for idx, _line in enumerate(lines):
- out.append(_line)
- cnd = _get_line_indent(_line, content, indent)
- # No duplicates or append, if "after" is the last line
- if (_line.find(after) > -1 and
- (lines[((idx + 1) < len(lines)) and idx + 1 or idx].strip() != cnd or
- idx + 1 == len(lines))):
- out.append(cnd)
- body = os.linesep.join(out)
+ _assert_occurrence(after, 'after')
+
+ idx = body.index(after[0])
+ body = _insert_line_after(idx, body, content, indent)
else:
if location == 'start':
- body = os.linesep.join((content, body))
+ if body:
+ body.insert(0, _set_line_eol(body[0], content))
+ else:
+ body.append(content + os.linesep)
elif location == 'end':
- body = os.linesep.join((body, _get_line_indent(body[-1], content, indent) if body else content))
+ body.append(_set_line_indent(body[-1], content, indent) if body else content)
elif mode == 'ensure':
- after = after and after.strip()
- before = before and before.strip()
if before and after:
- _assert_occurrence(body, before, 'before')
- _assert_occurrence(body, after, 'after')
+ _assert_occurrence(before, 'before')
+ _assert_occurrence(after, 'after')
- is_there = bool(body.count(content))
+ is_there = bool([l for l in body if l.count(content)])
if not is_there:
- out = []
- body = body.split(os.linesep)
- for idx, line in enumerate(body):
- out.append(line)
- if line.find(content) > -1:
- is_there = True
- if not is_there:
- if idx < (len(body) - 1) and line.find(after) > -1 and body[idx + 1].find(before) > -1:
- out.append(content)
- elif line.find(after) > -1:
- raise CommandExecutionError('Found more than one line between '
- 'boundaries "before" and "after".')
- body = os.linesep.join(out)
+ idx = body.index(after[0])
+ if idx < (len(body) - 1) and body[idx + 1] == before[0]:
+ cnd = _set_line_indent(body[idx], content, indent)
+ body.insert(idx + 1, cnd)
+ else:
+ raise CommandExecutionError('Found more than one line between '
+ 'boundaries "before" and "after".')
elif before and not after:
- _assert_occurrence(body, before, 'before')
- body = body.split(os.linesep)
- out = []
- for idx in range(len(body)):
- if body[idx].find(before) > -1:
- prev = (idx > 0 and idx or 1) - 1
- out.append(_get_line_indent(body[idx], content, indent))
- if _starts_till(out[prev], content) > -1:
- del out[prev]
- out.append(body[idx])
- body = os.linesep.join(out)
+ _assert_occurrence(before, 'before')
+
+ idx = body.index(before[0])
+ body = _insert_line_before(idx, body, content, indent)
elif not before and after:
- _assert_occurrence(body, after, 'after')
- body = body.split(os.linesep)
- skip = None
- out = []
- for idx in range(len(body)):
- if skip != body[idx]:
- out.append(body[idx])
-
- if body[idx].find(after) > -1:
- next_line = idx + 1 < len(body) and body[idx + 1] or None
- if next_line is not None and _starts_till(next_line, content) > -1:
- skip = next_line
- out.append(_get_line_indent(body[idx], content, indent))
- body = os.linesep.join(out)
+ _assert_occurrence(after, 'after')
+
+ idx = body.index(after[0])
+ body = _insert_line_after(idx, body, content, indent)
else:
raise CommandExecutionError("Wrong conditions? "
"Unable to ensure line without knowing "
"where to put it before and/or after.")
- changed = body_before != hashlib.sha256(salt.utils.stringutils.to_bytes(body)).hexdigest()
+ if body:
+ for idx, line in enumerate(body):
+ if not _get_eol(line) and idx+1 < len(body):
+ prev = idx and idx-1 or 1
+ body[idx] = _set_line_eol(body[prev], line)
+ # We do not need empty line at the end anymore
+ if '' == body[-1]:
+ body.pop()
+
+ changed = body_before != hashlib.sha256(salt.utils.stringutils.to_bytes(''.join(body))).hexdigest()
if backup and changed and __opts__['test'] is False:
try:
@@ -2032,20 +2040,15 @@ def line(path, content=None, match=None, mode=None, location=None,
if changed:
if show_changes:
with salt.utils.files.fopen(path, 'r') as fp_:
- path_content = [salt.utils.stringutils.to_unicode(x)
- for x in fp_.read().splitlines(True)]
- changes_diff = ''.join(difflib.unified_diff(
- path_content,
- [salt.utils.stringutils.to_unicode(x)
- for x in body.splitlines(True)]
- ))
+ path_content = salt.utils.data.decode_list(fp_.read().splitlines(True))
+ changes_diff = __utils__['stringutils.get_diff'](path_content, body)
if __opts__['test'] is False:
fh_ = None
try:
# Make sure we match the file mode from salt.utils.files.fopen
mode = 'wb' if six.PY2 and salt.utils.platform.is_windows() else 'w'
fh_ = salt.utils.atomicfile.atomic_open(path, mode)
- fh_.write(body)
+ fh_.write(''.join(body))
finally:
if fh_:
fh_.close()
@@ -2419,18 +2422,15 @@ def replace(path,
if not dry_run and not salt.utils.platform.is_windows():
check_perms(path, None, pre_user, pre_group, pre_mode)
- def get_changes():
- orig_file_as_str = [salt.utils.stringutils.to_unicode(x) for x in orig_file]
- new_file_as_str = [salt.utils.stringutils.to_unicode(x) for x in new_file]
- return ''.join(difflib.unified_diff(orig_file_as_str, new_file_as_str))
+ differences = __utils__['stringutils.get_diff'](orig_file, new_file)
if show_changes:
- return get_changes()
+ return differences
# We may have found a regex line match but don't need to change the line
# (for situations where the pattern also matches the repl). Revert the
# has_changes flag to False if the final result is unchanged.
- if not get_changes():
+ if not differences:
has_changes = False
return has_changes
@@ -2684,7 +2684,7 @@ def blockreplace(path,
)
if block_found:
- diff = ''.join(difflib.unified_diff(orig_file, new_file))
+ diff = __utils__['stringutils.get_diff'](orig_file, new_file)
has_changes = diff is not ''
if has_changes and not dry_run:
# changes detected
@@ -5003,11 +5003,7 @@ def get_diff(file1,
else:
if show_filenames:
args.extend(files)
- ret = ''.join(
- difflib.unified_diff(
- *salt.utils.data.decode(args)
- )
- )
+ ret = __utils__['stringutils.get_diff'](*args)
return ret
return ''
diff --git a/salt/modules/win_file.py b/salt/modules/win_file.py
index d321bd538e..0f5c908c8f 100644
--- a/salt/modules/win_file.py
+++ b/salt/modules/win_file.py
@@ -58,8 +58,9 @@ from salt.modules.file import (check_hash, # pylint: disable=W0611
RE_FLAG_TABLE, blockreplace, prepend, seek_read, seek_write, rename,
lstat, path_exists_glob, write, pardir, join, HASHES, HASHES_REVMAP,
comment, uncomment, _add_flags, comment_line, _regex_to_static,
- _get_line_indent, apply_template_on_contents, dirname, basename,
- list_backups_dir, _assert_occurrence, _starts_till)
+ _set_line_indent, apply_template_on_contents, dirname, basename,
+ list_backups_dir, _assert_occurrence, _starts_till, _set_line_eol, _get_eol,
+ _insert_line_after, _insert_line_before)
from salt.modules.file import normpath as normpath_
from salt.utils.functools import namespaced_function as _namespaced_function
@@ -116,8 +117,9 @@ def __virtual__():
global blockreplace, prepend, seek_read, seek_write, rename, lstat
global write, pardir, join, _add_flags, apply_template_on_contents
global path_exists_glob, comment, uncomment, _mkstemp_copy
- global _regex_to_static, _get_line_indent, dirname, basename
+ global _regex_to_static, _set_line_indent, dirname, basename
global list_backups_dir, normpath_, _assert_occurrence, _starts_till
+ global _insert_line_before, _insert_line_after, _set_line_eol, _get_eol
replace = _namespaced_function(replace, globals())
search = _namespaced_function(search, globals())
@@ -172,7 +174,11 @@ def __virtual__():
uncomment = _namespaced_function(uncomment, globals())
comment_line = _namespaced_function(comment_line, globals())
_regex_to_static = _namespaced_function(_regex_to_static, globals())
- _get_line_indent = _namespaced_function(_get_line_indent, globals())
+ _set_line_indent = _namespaced_function(_set_line_indent, globals())
+ _set_line_eol = _namespaced_function(_set_line_eol, globals())
+ _get_eol = _namespaced_function(_get_eol, globals())
+ _insert_line_after = _namespaced_function(_insert_line_after, globals())
+ _insert_line_before = _namespaced_function(_insert_line_before, globals())
_mkstemp_copy = _namespaced_function(_mkstemp_copy, globals())
_add_flags = _namespaced_function(_add_flags, globals())
apply_template_on_contents = _namespaced_function(apply_template_on_contents, globals())
diff --git a/salt/utils/stringutils.py b/salt/utils/stringutils.py
index 2909d4aebe..f84fda70a5 100644
--- a/salt/utils/stringutils.py
+++ b/salt/utils/stringutils.py
@@ -6,6 +6,7 @@ Functions for manipulating or otherwise processing strings
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import base64
+import difflib
import errno
import fnmatch
import logging
@@ -31,21 +32,32 @@ def to_bytes(s, encoding=None, errors='strict'):
Given bytes, bytearray, str, or unicode (python 2), return bytes (str for
python 2)
'''
+ if encoding is None:
+ # Try utf-8 first, and fall back to detected encoding
+ encoding = ('utf-8', __salt_system_encoding__)
+ if not isinstance(encoding, (tuple, list)):
+ encoding = (encoding,)
+
+ if not encoding:
+ raise ValueError('encoding cannot be empty')
+
+ exc = None
if six.PY3:
if isinstance(s, bytes):
return s
if isinstance(s, bytearray):
return bytes(s)
if isinstance(s, six.string_types):
- if encoding:
- return s.encode(encoding, errors)
- else:
+ for enc in encoding:
try:
- # Try UTF-8 first
- return s.encode('utf-8', errors)
- except UnicodeEncodeError:
- # Fall back to detected encoding
- return s.encode(__salt_system_encoding__, errors)
+ return s.encode(enc, errors)
+ except UnicodeEncodeError as err:
+ exc = err
+ continue
+ # The only way we get this far is if a UnicodeEncodeError was
+ # raised, otherwise we would have already returned (or raised some
+ # other exception).
+ raise exc # pylint: disable=raising-bad-type
raise TypeError('expected bytes, bytearray, or str')
else:
return to_str(s, encoding, errors)
@@ -61,35 +73,48 @@ def to_str(s, encoding=None, errors='strict', normalize=False):
except TypeError:
return s
+ if encoding is None:
+ # Try utf-8 first, and fall back to detected encoding
+ encoding = ('utf-8', __salt_system_encoding__)
+ if not isinstance(encoding, (tuple, list)):
+ encoding = (encoding,)
+
+ if not encoding:
+ raise ValueError('encoding cannot be empty')
+
# This shouldn't be six.string_types because if we're on PY2 and we already
# have a string, we should just return it.
if isinstance(s, str):
return _normalize(s)
+
+ exc = None
if six.PY3:
if isinstance(s, (bytes, bytearray)):
- if encoding:
- return _normalize(s.decode(encoding, errors))
- else:
+ for enc in encoding:
try:
- # Try UTF-8 first
- return _normalize(s.decode('utf-8', errors))
- except UnicodeDecodeError:
- # Fall back to detected encoding
- return _normalize(s.decode(__salt_system_encoding__, errors))
+ return _normalize(s.decode(enc, errors))
+ except UnicodeDecodeError as err:
+ exc = err
+ continue
+ # The only way we get this far is if a UnicodeDecodeError was
+ # raised, otherwise we would have already returned (or raised some
+ # other exception).
+ raise exc # pylint: disable=raising-bad-type
raise TypeError('expected str, bytes, or bytearray not {}'.format(type(s)))
else:
if isinstance(s, bytearray):
return str(s) # future lint: disable=blacklisted-function
if isinstance(s, unicode): # pylint: disable=incompatible-py3-code,undefined-variable
- if encoding:
- return _normalize(s).encode(encoding, errors)
- else:
+ for enc in encoding:
try:
- # Try UTF-8 first
- return _normalize(s).encode('utf-8', errors)
- except UnicodeEncodeError:
- # Fall back to detected encoding
- return _normalize(s).encode(__salt_system_encoding__, errors)
+ return _normalize(s).encode(enc, errors)
+ except UnicodeEncodeError as err:
+ exc = err
+ continue
+ # The only way we get this far is if a UnicodeDecodeError was
+ # raised, otherwise we would have already returned (or raised some
+ # other exception).
+ raise exc # pylint: disable=raising-bad-type
raise TypeError('expected str, bytearray, or unicode')
@@ -100,6 +125,16 @@ def to_unicode(s, encoding=None, errors='strict', normalize=False):
def _normalize(s):
return unicodedata.normalize('NFC', s) if normalize else s
+ if encoding is None:
+ # Try utf-8 first, and fall back to detected encoding
+ encoding = ('utf-8', __salt_system_encoding__)
+ if not isinstance(encoding, (tuple, list)):
+ encoding = (encoding,)
+
+ if not encoding:
+ raise ValueError('encoding cannot be empty')
+
+ exc = None
if six.PY3:
if isinstance(s, str):
return _normalize(s)
@@ -113,15 +148,16 @@ def to_unicode(s, encoding=None, errors='strict', normalize=False):
if isinstance(s, unicode): # pylint: disable=incompatible-py3-code
return _normalize(s)
elif isinstance(s, (str, bytearray)):
- if encoding:
- return _normalize(s.decode(encoding, errors))
- else:
+ for enc in encoding:
try:
- # Try UTF-8 first
- return _normalize(s.decode('utf-8', errors))
- except UnicodeDecodeError:
- # Fall back to detected encoding
- return _normalize(s.decode(__salt_system_encoding__, errors))
+ return _normalize(s.decode(enc, errors))
+ except UnicodeDecodeError as err:
+ exc = err
+ continue
+ # The only way we get this far is if a UnicodeDecodeError was
+ # raised, otherwise we would have already returned (or raised some
+ # other exception).
+ raise exc # pylint: disable=raising-bad-type
raise TypeError('expected str or bytearray')
@@ -513,3 +549,21 @@ def get_context(template, line, num_lines=5, marker=None):
buf[error_line_in_context] += marker
return '---\n{0}\n---'.format('\n'.join(buf))
+
+
+def get_diff(a, b, *args, **kwargs):
+ '''
+ Perform diff on two iterables containing lines from two files, and return
+ the diff as as string. Lines are normalized to str types to avoid issues
+ with unicode on PY2.
+ '''
+ encoding = ('utf-8', 'latin-1', __salt_system_encoding__)
+ # Late import to avoid circular import
+ import salt.utils.data
+ return ''.join(
+ difflib.unified_diff(
+ salt.utils.data.decode_list(a, encoding=encoding),
+ salt.utils.data.decode_list(b, encoding=encoding),
+ *args, **kwargs
+ )
+ )
diff --git a/tests/integration/files/file/base/issue-48777/new.html b/tests/integration/files/file/base/issue-48777/new.html
new file mode 100644
index 0000000000..2d5c1ae744
--- /dev/null
+++ b/tests/integration/files/file/base/issue-48777/new.html
@@ -0,0 +1,5 @@
+<html>
+<body>
+räksmörgås
+</body>
+</html>
diff --git a/tests/integration/files/file/base/issue-48777/old.html b/tests/integration/files/file/base/issue-48777/old.html
new file mode 100644
index 0000000000..7879e1ce9f
--- /dev/null
+++ b/tests/integration/files/file/base/issue-48777/old.html
@@ -0,0 +1,4 @@
+<html>
+<body>
+</body>
+</html>
diff --git a/tests/integration/states/test_file.py b/tests/integration/states/test_file.py
index 9064ba7cc1..30ad39de6b 100644
--- a/tests/integration/states/test_file.py
+++ b/tests/integration/states/test_file.py
@@ -656,6 +656,29 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertIn(
'does not exist', ret['comment'])
+ def test_managed_latin1_diff(self):
+ '''
+ Tests that latin-1 file contents are represented properly in the diff
+ '''
+ name = os.path.join(TMP, 'local_latin1_diff')
+ # Lay down the initial file
+ ret = self.run_state(
+ 'file.managed',
+ name=name,
+ source='salt://issue-48777/old.html')
+ ret = ret[next(iter(ret))]
+ assert ret['result'] is True, ret
+
+ # Replace it with the new file and check the diff
+ ret = self.run_state(
+ 'file.managed',
+ name=name,
+ source='salt://issue-48777/new.html')
+ ret = ret[next(iter(ret))]
+ assert ret['result'] is True, ret
+ diff_lines = ret['changes']['diff'].split('\n')
+ assert '+räksmörgås' in diff_lines, diff_lines
+
def test_directory(self):
'''
file.directory
diff --git a/tests/unit/modules/test_file.py b/tests/unit/modules/test_file.py
index b157a577e5..66acaf9cb6 100644
--- a/tests/unit/modules/test_file.py
+++ b/tests/unit/modules/test_file.py
@@ -57,7 +57,10 @@ class FileReplaceTestCase(TestCase, LoaderModuleMockMixin):
'grains': {},
},
'__grains__': {'kernel': 'Linux'},
- '__utils__': {'files.is_text': MagicMock(return_value=True)},
+ '__utils__': {
+ 'files.is_text': MagicMock(return_value=True),
+ 'stringutils.get_diff': salt.utils.stringutils.get_diff,
+ },
}
}
@@ -235,7 +238,12 @@ class FileBlockReplaceTestCase(TestCase, LoaderModuleMockMixin):
'grains': {},
},
'__grains__': {'kernel': 'Linux'},
- '__utils__': {'files.is_text': MagicMock(return_value=True)},
+ '__utils__': {
+ 'files.is_binary': MagicMock(return_value=False),
+ 'files.is_text': MagicMock(return_value=True),
+ 'files.get_encoding': MagicMock(return_value='utf-8'),
+ 'stringutils.get_diff': salt.utils.stringutils.get_diff,
+ },
}
}
@@ -528,7 +536,10 @@ class FileModuleTestCase(TestCase, LoaderModuleMockMixin):
'cachedir': 'tmp',
'grains': {},
},
- '__grains__': {'kernel': 'Linux'}
+ '__grains__': {'kernel': 'Linux'},
+ '__utils__': {
+ 'stringutils.get_diff': salt.utils.stringutils.get_diff,
+ },
}
}
@@ -907,7 +918,10 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
'cachedir': 'tmp',
'grains': {},
},
- '__grains__': {'kernel': 'Linux'}
+ '__grains__': {'kernel': 'Linux'},
+ '__utils__': {
+ 'stringutils.get_diff': salt.utils.stringutils.get_diff,
+ },
}
}
@@ -930,6 +944,29 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
self.assertIn('Cannot find text to {0}'.format(mode),
_log.warning.call_args_list[0][0][0])
+ @patch('os.path.realpath', MagicMock())
+ @patch('os.path.isfile', MagicMock(return_value=True))
+ @patch('os.stat', MagicMock())
+ def test_line_delete_no_match(self):
+ '''
+ Tests that when calling file.line with ``mode=delete``,
+ with not matching pattern to delete returns False
+ :return:
+ '''
+ file_content = os.linesep.join([
+ 'file_roots:',
+ ' base:',
+ ' - /srv/salt',
+ ' - /srv/custom'
+ ])
+ match = 'not matching'
+ for mode in ['delete', 'replace']:
+ files_fopen = mock_open(read_data=file_content)
+ with patch('salt.utils.files.fopen', files_fopen):
+ atomic_opener = mock_open()
+ with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
+ self.assertFalse(filemod.line('foo', content='foo', match=match, mode=mode))
+
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
def test_line_modecheck_failure(self):
@@ -1082,7 +1119,7 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
' - /srv/sugar'
])
cfg_content = '- /srv/custom'
- for before_line in ['/srv/salt', '/srv/sa.*t', '/sr.*']:
+ for before_line in ['/srv/salt', '/srv/sa.*t']:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
@@ -1092,6 +1129,32 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
+ @patch('os.path.realpath', MagicMock())
+ @patch('os.path.isfile', MagicMock(return_value=True))
+ @patch('os.stat', MagicMock())
+ def test_line_assert_exception_pattern(self):
+ '''
+ Test for file.line for exception on insert with too general pattern.
+
+ :return:
+ '''
+ file_content = os.linesep.join([
+ 'file_roots:',
+ ' base:',
+ ' - /srv/salt',
+ ' - /srv/sugar'
+ ])
+ cfg_content = '- /srv/custom'
+ for before_line in ['/sr.*']:
+ files_fopen = mock_open(read_data=file_content)
+ with patch('salt.utils.files.fopen', files_fopen):
+ atomic_opener = mock_open()
+ with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
+ with self.assertRaises(CommandExecutionError) as cm:
+ filemod.line('foo', content=cfg_content, before=before_line, mode='insert')
+ self.assertEqual(cm.exception.strerror,
+ 'Found more than expected occurrences in "before" expression')
+
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
@@ -1179,7 +1242,7 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
' base:',
' - /srv/salt',
' - /srv/sugar',
- cfg_content
+ ' ' + cfg_content
])
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
@@ -1273,6 +1336,33 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
+ @patch('os.path.realpath', MagicMock())
+ @patch('os.path.isfile', MagicMock(return_value=True))
+ @patch('os.stat', MagicMock())
+ def test_line_insert_ensure_before_first_line(self):
+ '''
+ Test for file.line for insertion ensuring the line is before first line
+ :return:
+ '''
+ cfg_content = '#!/bin/bash'
+ file_content = os.linesep.join([
+ '/etc/init.d/someservice restart',
+ 'exit 0'
+ ])
+ file_modified = os.linesep.join([
+ cfg_content,
+ '/etc/init.d/someservice restart',
+ 'exit 0'
+ ])
+ files_fopen = mock_open(read_data=file_content)
+ with patch('salt.utils.files.fopen', files_fopen):
+ atomic_opener = mock_open()
+ with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
+ filemod.line('foo', content=cfg_content, before='/etc/init.d/someservice restart', mode='ensure')
+ self.assertEqual(len(atomic_opener().write.call_args_list), 1)
+ self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
+ file_modified)
+
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
diff --git a/tests/unit/utils/test_stringutils.py b/tests/unit/utils/test_stringutils.py
index 9c8fd4f7c3..852f558793 100644
--- a/tests/unit/utils/test_stringutils.py
+++ b/tests/unit/utils/test_stringutils.py
@@ -18,6 +18,9 @@ STR = BYTES = UNICODE.encode('utf-8')
# code points. Do not modify it.
EGGS = '\u044f\u0438\u0306\u0446\u0430'
+LATIN1_UNICODE = 'räksmörgås'
+LATIN1_BYTES = LATIN1_UNICODE.encode('latin-1')
+
class StringutilsTestCase(TestCase):
def test_contains_whitespace(self):
@@ -134,6 +137,13 @@ class StringutilsTestCase(TestCase):
<>йца'
)
+ self.assertEqual(
+ salt.utils.stringutils.to_unicode(
+ LATIN1_BYTES, encoding='latin-1'
+ ),
+ LATIN1_UNICODE
+ )
+
if six.PY3:
self.assertEqual(salt.utils.stringutils.to_unicode('plugh'), 'plugh')
self.assertEqual(salt.utils.stringutils.to_unicode('áéíóúý'), 'áéíóúý')
@@ -150,6 +160,10 @@ class StringutilsTestCase(TestCase):
with patch.object(builtins, '__salt_system_encoding__', 'CP1252'):
self.assertEqual(salt.utils.stringutils.to_unicode('Ψ'.encode('utf-8')), 'Ψ')
+ def test_to_unicode_multi_encoding(self):
+ result = salt.utils.stringutils.to_unicode(LATIN1_BYTES, encoding=('utf-8', 'latin1'))
+ assert result == LATIN1_UNICODE
+
def test_build_whitespace_split_regex(self):
expected_regex = '(?m)^(?:[\\s]+)?Lorem(?:[\\s]+)?ipsum(?:[\\s]+)?dolor(?:[\\s]+)?sit(?:[\\s]+)?amet\\,' \
'(?:[\\s]+)?$'
--
2.17.1

View File

@ -1,34 +0,0 @@
From f8c0811c3a05ef334eef1943a906fe01b13c1afc Mon Sep 17 00:00:00 2001
From: Federico Ceratto <federico.ceratto@suse.com>
Date: Wed, 25 Jul 2018 10:33:09 +0000
Subject: [PATCH] Fix mine.get not returning data (Workaround for #48020)
---
salt/utils/minions.py | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/salt/utils/minions.py b/salt/utils/minions.py
index c3acc6ba90..bb0cbaa589 100644
--- a/salt/utils/minions.py
+++ b/salt/utils/minions.py
@@ -239,12 +239,12 @@ class CkMinions(object):
Retreive complete minion list from PKI dir.
Respects cache if configured
'''
- if self.opts.get('__role') == 'master' and self.opts.get('__cli') == 'salt-run':
- # Compiling pillar directly on the master, just return the master's
- # ID as that is the only one that is available.
- return [self.opts['id']]
minions = []
pki_cache_fn = os.path.join(self.opts['pki_dir'], self.acc, '.key_cache')
+ try:
+ os.makedirs(os.path.dirname(pki_cache_fn))
+ except OSError:
+ pass
try:
if self.opts['key_cache'] and os.path.exists(pki_cache_fn):
log.debug('Returning cached minion list')
--
2.17.1

View File

@ -1,32 +0,0 @@
From 854ffed98894b8aa8b51973c0ba13fb75093e715 Mon Sep 17 00:00:00 2001
From: Erik Johnson <palehose@gmail.com>
Date: Mon, 30 Apr 2018 10:25:40 -0500
Subject: [PATCH] Fix UnboundLocalError in file.get_diff
This was only in 2018.3 head and not part of a release
Add unit test for file.get_diff
Use a lambda instead of defining a one-line function
---
salt/modules/file.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/salt/modules/file.py b/salt/modules/file.py
index 95bca7fb1b..1b4b7e0e46 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -5008,7 +5008,8 @@ def get_diff(file1,
*salt.utils.data.decode(args)
)
)
- return ret
+ return ret
+ return ''
def manage_file(name,
--
2.13.7

View File

@ -1,4 +1,4 @@
From 997edb18b81d73933324b299da36474c24630b42 Mon Sep 17 00:00:00 2001
From 3b5803d31a93d2f619246d48691f52f6c65d52ee Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 25 Jun 2018 13:06:40 +0100
@ -10,102 +10,14 @@ Add unit test coverage for multiple version packages on Zypper
Fix '_find_remove_targets' after aligning Zypper with pkg state
---
salt/modules/zypper.py | 57 +++++++++++++---------
salt/states/pkg.py | 21 --------
tests/unit/modules/test_zypper.py | 100 +++++++++++++++++++++++++-------------
3 files changed, 102 insertions(+), 76 deletions(-)
salt/states/pkg.py | 21 ---------------------
1 file changed, 21 deletions(-)
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 05ba3d86c9..4689f84926 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -38,6 +38,7 @@ import salt.utils.files
import salt.utils.functools
import salt.utils.path
import salt.utils.pkg
+import salt.utils.pkg.rpm
import salt.utils.stringutils
import salt.utils.systemd
from salt.utils.versions import LooseVersion
@@ -714,24 +715,44 @@ def list_pkgs(versions_as_list=False, **kwargs):
contextkey = 'pkg.list_pkgs'
if contextkey not in __context__:
-
- cmd = ['rpm', '-qa', '--queryformat', (
- "%{NAME}_|-%{VERSION}_|-%{RELEASE}_|-%{ARCH}_|-"
- "%|EPOCH?{%{EPOCH}}:{}|_|-%{INSTALLTIME}\\n")]
ret = {}
- for line in __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False).splitlines():
- name, pkgver, rel, arch, epoch, install_time = line.split('_|-')
- install_date = datetime.datetime.utcfromtimestamp(int(install_time)).isoformat() + "Z"
- install_date_time_t = int(install_time)
-
- all_attr = {'epoch': epoch, 'version': pkgver, 'release': rel, 'arch': arch,
- 'install_date': install_date, 'install_date_time_t': install_date_time_t}
- __salt__['pkg_resource.add_pkg'](ret, name, all_attr)
+ cmd = ['rpm', '-qa', '--queryformat',
+ salt.utils.pkg.rpm.QUERYFORMAT.replace('%{REPOID}', '(none)') + '\n']
+ output = __salt__['cmd.run'](cmd,
+ python_shell=False,
+ output_loglevel='trace')
+ for line in output.splitlines():
+ pkginfo = salt.utils.pkg.rpm.parse_pkginfo(
+ line,
+ osarch=__grains__['osarch']
+ )
+ if pkginfo is not None:
+ # see rpm version string rules available at https://goo.gl/UGKPNd
+ pkgver = pkginfo.version
+ epoch = ''
+ release = ''
+ if ':' in pkgver:
+ epoch, pkgver = pkgver.split(":", 1)
+ if '-' in pkgver:
+ pkgver, release = pkgver.split("-", 1)
+ all_attr = {
+ 'epoch': epoch,
+ 'version': pkgver,
+ 'release': release,
+ 'arch': pkginfo.arch,
+ 'install_date': pkginfo.install_date,
+ 'install_date_time_t': pkginfo.install_date_time_t
+ }
+ __salt__['pkg_resource.add_pkg'](ret, pkginfo.name, all_attr)
+ _ret = {}
for pkgname in ret:
- ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version'])
+ # Filter out GPG public keys packages
+ if pkgname.startswith('gpg-pubkey'):
+ continue
+ _ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version'])
- __context__[contextkey] = ret
+ __context__[contextkey] = _ret
return __salt__['pkg_resource.format_pkg_list'](
__context__[contextkey],
@@ -1350,14 +1371,6 @@ def install(name=None,
_clean_cache()
new = list_pkgs(attr=diff_attr) if not downloadonly else list_downloaded()
-
- # Handle packages which report multiple new versions
- # (affects only kernel packages at this point)
- for pkg_name in new:
- pkg_data = new[pkg_name]
- if isinstance(pkg_data, six.string_types):
- new[pkg_name] = pkg_data.split(',')[-1]
-
ret = salt.utils.data.compare_dicts(old, new)
if errors:
diff --git a/salt/states/pkg.py b/salt/states/pkg.py
index ed405cb6b5..aad87e3278 100644
index 2034262b23..0aca1e0af8 100644
--- a/salt/states/pkg.py
+++ b/salt/states/pkg.py
@@ -415,16 +415,6 @@ def _find_remove_targets(name=None,
@@ -455,16 +455,6 @@ def _find_remove_targets(name=None,
if __grains__['os'] == 'FreeBSD' and origin:
cver = [k for k, v in six.iteritems(cur_pkgs) if v['origin'] == pkgname]
@ -122,7 +34,7 @@ index ed405cb6b5..aad87e3278 100644
else:
cver = cur_pkgs.get(pkgname, [])
@@ -854,17 +844,6 @@ def _verify_install(desired, new_pkgs, ignore_epoch=False, new_caps=None):
@@ -871,17 +861,6 @@ def _verify_install(desired, new_pkgs, ignore_epoch=False, new_caps=None):
cver = new_pkgs.get(pkgname.split('%')[0])
elif __grains__['os_family'] == 'Debian':
cver = new_pkgs.get(pkgname.split('=')[0])
@ -140,172 +52,7 @@ index ed405cb6b5..aad87e3278 100644
else:
cver = new_pkgs.get(pkgname)
if not cver and pkgname in new_caps:
diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py
index 6eccee568b..bb15aca11a 100644
--- a/tests/unit/modules/test_zypper.py
+++ b/tests/unit/modules/test_zypper.py
@@ -475,7 +475,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[
{"kernel-default": "3.12.49-11.1"}, {"kernel-default": "3.12.49-11.1,3.12.51-60.20.2"}])):
ret = zypper.install('kernel-default', '--auto-agree-with-licenses')
- self.assertDictEqual(ret, {"kernel-default": {"old": "3.12.49-11.1", "new": "3.12.51-60.20.2"}})
+ self.assertDictEqual(ret, {"kernel-default": {"old": "3.12.49-11.1", "new": "3.12.49-11.1,3.12.51-60.20.2"}})
def test_upgrade_failure(self):
'''
@@ -540,27 +540,36 @@ Repository 'DUMMY' not found by its alias, number, or URI.
data.setdefault(key, []).append(value)
rpm_out = [
- 'protobuf-java_|-2.6.1_|-3.1.develHead_|-noarch_|-_|-1499257756',
- 'yast2-ftp-server_|-3.1.8_|-8.1_|-x86_64_|-_|-1499257798',
- 'jose4j_|-0.4.4_|-2.1.develHead_|-noarch_|-_|-1499257756',
- 'apache-commons-cli_|-1.2_|-1.233_|-noarch_|-_|-1498636510',
- 'jakarta-commons-discovery_|-0.4_|-129.686_|-noarch_|-_|-1498636511',
- 'susemanager-build-keys-web_|-12.0_|-5.1.develHead_|-noarch_|-_|-1498636510',
+ 'protobuf-java_|-(none)_|-2.6.1_|-3.1.develHead_|-noarch_|-(none)_|-1499257756',
+ 'yast2-ftp-server_|-(none)_|-3.1.8_|-8.1_|-x86_64_|-(none)_|-1499257798',
+ 'jose4j_|-(none)_|-0.4.4_|-2.1.develHead_|-noarch_|-(none)_|-1499257756',
+ 'apache-commons-cli_|-(none)_|-1.2_|-1.233_|-noarch_|-(none)_|-1498636510',
+ 'jakarta-commons-discovery_|-(none)_|-0.4_|-129.686_|-noarch_|-(none)_|-1498636511',
+ 'susemanager-build-keys-web_|-(none)_|-12.0_|-5.1.develHead_|-noarch_|-(none)_|-1498636510',
+ 'gpg-pubkey_|-(none)_|-39db7c82_|-5847eb1f_|-(none)_|-(none)_|-1519203802',
+ 'gpg-pubkey_|-(none)_|-8a7c64f9_|-5aaa93ca_|-(none)_|-(none)_|-1529925595',
+ 'kernel-default_|-(none)_|-4.4.138_|-94.39.1_|-x86_64_|-(none)_|-1529936067',
+ 'kernel-default_|-(none)_|-4.4.73_|-5.1_|-x86_64_|-(none)_|-1503572639',
+ 'perseus-dummy_|-(none)_|-1.1_|-1.1_|-i586_|-(none)_|-1529936062',
]
- with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
+ with patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \
+ patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \
patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}):
pkgs = zypper.list_pkgs(versions_as_list=True)
+ self.assertFalse(pkgs.get('gpg-pubkey', False))
for pkg_name, pkg_version in {
- 'jakarta-commons-discovery': '0.4-129.686',
- 'yast2-ftp-server': '3.1.8-8.1',
- 'protobuf-java': '2.6.1-3.1.develHead',
- 'susemanager-build-keys-web': '12.0-5.1.develHead',
- 'apache-commons-cli': '1.2-1.233',
- 'jose4j': '0.4.4-2.1.develHead'}.items():
+ 'jakarta-commons-discovery': ['0.4-129.686'],
+ 'yast2-ftp-server': ['3.1.8-8.1'],
+ 'protobuf-java': ['2.6.1-3.1.develHead'],
+ 'susemanager-build-keys-web': ['12.0-5.1.develHead'],
+ 'apache-commons-cli': ['1.2-1.233'],
+ 'kernel-default': ['4.4.138-94.39.1', '4.4.73-5.1'],
+ 'perseus-dummy.i586': ['1.1-1.1'],
+ 'jose4j': ['0.4.4-2.1.develHead']}.items():
self.assertTrue(pkgs.get(pkg_name))
- self.assertEqual(pkgs[pkg_name], [pkg_version])
+ self.assertEqual(pkgs[pkg_name], pkg_version)
def test_list_pkgs_with_attr(self):
'''
@@ -572,57 +581,82 @@ Repository 'DUMMY' not found by its alias, number, or URI.
data.setdefault(key, []).append(value)
rpm_out = [
- 'protobuf-java_|-2.6.1_|-3.1.develHead_|-noarch_|-_|-1499257756',
- 'yast2-ftp-server_|-3.1.8_|-8.1_|-x86_64_|-_|-1499257798',
- 'jose4j_|-0.4.4_|-2.1.develHead_|-noarch_|-_|-1499257756',
- 'apache-commons-cli_|-1.2_|-1.233_|-noarch_|-_|-1498636510',
- 'jakarta-commons-discovery_|-0.4_|-129.686_|-noarch_|-_|-1498636511',
- 'susemanager-build-keys-web_|-12.0_|-5.1.develHead_|-noarch_|-_|-1498636510',
+ 'protobuf-java_|-(none)_|-2.6.1_|-3.1.develHead_|-noarch_|-(none)_|-1499257756',
+ 'yast2-ftp-server_|-(none)_|-3.1.8_|-8.1_|-x86_64_|-(none)_|-1499257798',
+ 'jose4j_|-(none)_|-0.4.4_|-2.1.develHead_|-noarch_|-(none)_|-1499257756',
+ 'apache-commons-cli_|-(none)_|-1.2_|-1.233_|-noarch_|-(none)_|-1498636510',
+ 'jakarta-commons-discovery_|-(none)_|-0.4_|-129.686_|-noarch_|-(none)_|-1498636511',
+ 'susemanager-build-keys-web_|-(none)_|-12.0_|-5.1.develHead_|-noarch_|-(none)_|-1498636510',
+ 'gpg-pubkey_|-(none)_|-39db7c82_|-5847eb1f_|-(none)_|-(none)_|-1519203802',
+ 'gpg-pubkey_|-(none)_|-8a7c64f9_|-5aaa93ca_|-(none)_|-(none)_|-1529925595',
+ 'kernel-default_|-(none)_|-4.4.138_|-94.39.1_|-x86_64_|-(none)_|-1529936067',
+ 'kernel-default_|-(none)_|-4.4.73_|-5.1_|-x86_64_|-(none)_|-1503572639',
+ 'perseus-dummy_|-(none)_|-1.1_|-1.1_|-i586_|-(none)_|-1529936062',
]
with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
+ patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \
patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \
patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}):
pkgs = zypper.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
+ self.assertFalse(pkgs.get('gpg-pubkey', False))
for pkg_name, pkg_attr in {
- 'jakarta-commons-discovery': {
+ 'jakarta-commons-discovery': [{
'version': '0.4',
'release': '129.686',
'arch': 'noarch',
'install_date_time_t': 1498636511,
- },
- 'yast2-ftp-server': {
+ }],
+ 'yast2-ftp-server': [{
'version': '3.1.8',
'release': '8.1',
'arch': 'x86_64',
'install_date_time_t': 1499257798,
- },
- 'protobuf-java': {
+ }],
+ 'protobuf-java': [{
'version': '2.6.1',
'release': '3.1.develHead',
'install_date_time_t': 1499257756,
'arch': 'noarch',
- },
- 'susemanager-build-keys-web': {
+ }],
+ 'susemanager-build-keys-web': [{
'version': '12.0',
'release': '5.1.develHead',
'arch': 'noarch',
'install_date_time_t': 1498636510,
- },
- 'apache-commons-cli': {
+ }],
+ 'apache-commons-cli': [{
'version': '1.2',
'release': '1.233',
'arch': 'noarch',
'install_date_time_t': 1498636510,
+ }],
+ 'kernel-default': [{
+ 'version': '4.4.138',
+ 'release': '94.39.1',
+ 'arch': 'x86_64',
+ 'install_date_time_t': 1529936067
},
- 'jose4j': {
+ {
+ 'version': '4.4.73',
+ 'release': '5.1',
+ 'arch': 'x86_64',
+ 'install_date_time_t': 1503572639,
+ }],
+ 'perseus-dummy.i586': [{
+ 'version': '1.1',
+ 'release': '1.1',
+ 'arch': 'i586',
+ 'install_date_time_t': 1529936062,
+ }],
+ 'jose4j': [{
'arch': 'noarch',
'version': '0.4.4',
'release': '2.1.develHead',
'install_date_time_t': 1499257756,
- }}.items():
+ }]}.items():
self.assertTrue(pkgs.get(pkg_name))
- self.assertEqual(pkgs[pkg_name], [pkg_attr])
+ self.assertEqual(pkgs[pkg_name], pkg_attr)
def test_list_patches(self):
'''
--
2.13.7
2.17.1

View File

@ -1,28 +0,0 @@
From 8ae052fbb07d7506492b430579308e4611e51754 Mon Sep 17 00:00:00 2001
From: Jochen Breuer <jbreuer@suse.de>
Date: Sun, 22 Apr 2018 23:11:11 +0200
Subject: [PATCH] Fixed usage of ipaddress
ipaddress is imported either directly or from salt.ext. If we
use it, we shouldn't address it with salt.ext.ipaddress.
---
salt/modules/network.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/modules/network.py b/salt/modules/network.py
index f188fd7954..92893572a6 100644
--- a/salt/modules/network.py
+++ b/salt/modules/network.py
@@ -1140,7 +1140,7 @@ def convert_cidr(cidr):
ret = {'network': None,
'netmask': None}
cidr = calc_net(cidr)
- network_info = salt.ext.ipaddress.ip_network(cidr)
+ network_info = ipaddress.ip_network(cidr)
ret['network'] = six.text_type(network_info.network_address)
ret['netmask'] = six.text_type(network_info.netmask)
return ret
--
2.13.7

View File

@ -1,4 +1,4 @@
From 43b1f8fb6608c944812bc5bcd9da407624409ac7 Mon Sep 17 00:00:00 2001
From b10ca8ee857e14915ac83a8614521495b42b5d2b Mon Sep 17 00:00:00 2001
From: Erik Johnson <palehose@gmail.com>
Date: Fri, 24 Aug 2018 10:35:55 -0500
Subject: [PATCH] Fixes: CVE-2018-15750, CVE-2018-15751
@ -11,132 +11,43 @@ Handle Auth exceptions in run_job
Update tornado test to correct authentication message
---
salt/client/__init__.py | 8 ++++
salt/netapi/rest_cherrypy/app.py | 13 ++++++-
.../netapi/rest_cherrypy/test_app.py | 39 +++++++++++++++++++
.../netapi/rest_tornado/test_app.py | 2 +-
4 files changed, 60 insertions(+), 2 deletions(-)
salt/netapi/rest_cherrypy/app.py | 7 -------
tests/integration/netapi/rest_tornado/test_app.py | 4 ++--
2 files changed, 2 insertions(+), 9 deletions(-)
diff --git a/salt/client/__init__.py b/salt/client/__init__.py
index dcbc1473e1..77f2a963f7 100644
--- a/salt/client/__init__.py
+++ b/salt/client/__init__.py
@@ -349,6 +349,10 @@ class LocalClient(object):
raise SaltClientError(
'The salt master could not be contacted. Is master running?'
)
+ except AuthenticationError as err:
+ raise AuthenticationError(err)
+ except AuthorizationError as err:
+ raise AuthorizationError(err)
except Exception as general_exception:
# Convert to generic client error and pass along message
raise SaltClientError(general_exception)
@@ -415,6 +419,10 @@ class LocalClient(object):
raise SaltClientError(
'The salt master could not be contacted. Is master running?'
)
+ except AuthenticationError as err:
+ raise AuthenticationError(err)
+ except AuthorizationError as err:
+ raise AuthorizationError(err)
except Exception as general_exception:
# Convert to generic client error and pass along message
raise SaltClientError(general_exception)
diff --git a/salt/netapi/rest_cherrypy/app.py b/salt/netapi/rest_cherrypy/app.py
index 78ea3c3fef..c272674146 100644
index 40ee976b25..f9ca908115 100644
--- a/salt/netapi/rest_cherrypy/app.py
+++ b/salt/netapi/rest_cherrypy/app.py
@@ -1167,6 +1167,13 @@ class LowDataAdapter(object):
if token:
chunk['token'] = token
@@ -1174,13 +1174,6 @@ class LowDataAdapter(object):
except (TypeError, ValueError):
raise cherrypy.HTTPError(401, 'Invalid token')
+ if 'token' in chunk:
+ # Make sure that auth token is hex
+ try:
+ int(chunk['token'], 16)
+ except (TypeError, ValueError):
+ raise cherrypy.HTTPError(401, 'Invalid token')
+
- if 'token' in chunk:
- # Make sure that auth token is hex
- try:
- int(chunk['token'], 16)
- except (TypeError, ValueError):
- raise cherrypy.HTTPError(401, 'Invalid token')
-
if client:
chunk['client'] = client
@@ -2167,7 +2174,11 @@ class Events(object):
:return bool: True if valid, False if not valid.
'''
- if auth_token is None:
+ # Make sure that auth token is hex. If it's None, or something other
+ # than hex, this will raise a ValueError.
+ try:
+ int(auth_token, 16)
+ except ValueError:
return False
# First check if the given token is in our session table; if so it's a
diff --git a/tests/integration/netapi/rest_cherrypy/test_app.py b/tests/integration/netapi/rest_cherrypy/test_app.py
index 000b7418bf..5865510fd7 100644
--- a/tests/integration/netapi/rest_cherrypy/test_app.py
+++ b/tests/integration/netapi/rest_cherrypy/test_app.py
@@ -124,6 +124,45 @@ class TestRun(cptc.BaseRestCherryPyTest):
})
self.assertEqual(response.status, '401 Unauthorized')
+ def test_run_empty_token(self):
+ '''
+ Test the run URL with empty token
+ '''
+ cmd = dict(self.low, **{'token': ''})
+ body = urlencode(cmd)
+
+ request, response = self.request('/run', method='POST', body=body,
+ headers={
+ 'content-type': 'application/x-www-form-urlencoded'
+ })
+ assert response.status == '401 Unauthorized'
+
+ def test_run_empty_token_upercase(self):
+ '''
+ Test the run URL with empty token with upercase characters
+ '''
+ cmd = dict(self.low, **{'ToKen': ''})
+ body = urlencode(cmd)
+
+ request, response = self.request('/run', method='POST', body=body,
+ headers={
+ 'content-type': 'application/x-www-form-urlencoded'
+ })
+ assert response.status == '401 Unauthorized'
+
+ def test_run_wrong_token(self):
+ '''
+ Test the run URL with incorrect token
+ '''
+ cmd = dict(self.low, **{'token': 'bad'})
+ body = urlencode(cmd)
+
+ request, response = self.request('/run', method='POST', body=body,
+ headers={
+ 'content-type': 'application/x-www-form-urlencoded'
+ })
+ assert response.status == '401 Unauthorized'
+
class TestWebhookDisableAuth(cptc.BaseRestCherryPyTest):
diff --git a/tests/integration/netapi/rest_tornado/test_app.py b/tests/integration/netapi/rest_tornado/test_app.py
index beb085db1e..01abd354a7 100644
index a6829bdd4f..da96012b41 100644
--- a/tests/integration/netapi/rest_tornado/test_app.py
+++ b/tests/integration/netapi/rest_tornado/test_app.py
@@ -237,7 +237,7 @@ class TestSaltAPIHandler(_SaltnadoIntegrationTestCase):
self.assertEqual(len(ret), 3) # make sure we got 3 responses
@@ -240,8 +240,8 @@ class TestSaltAPIHandler(_SaltnadoIntegrationTestCase):
self.assertIn('jid', ret[0]) # the first 2 are regular returns
self.assertIn('jid', ret[1])
- self.assertIn('Authentication error occurred.', ret[2]) # bad auth
+ self.assertIn('Failed to authenticate', ret[2]) # bad auth
self.assertEqual(ret[0]['minions'], sorted(['minion', 'sub_minion', 'localhost']))
self.assertEqual(ret[1]['minions'], sorted(['minion', 'sub_minion', 'localhost']))
self.assertIn('Failed to authenticate', ret[2]) # bad auth
- self.assertEqual(ret[0]['minions'], sorted(['minion', 'sub_minion']))
- self.assertEqual(ret[1]['minions'], sorted(['minion', 'sub_minion']))
+ self.assertEqual(ret[0]['minions'], sorted(['minion', 'sub_minion', 'localhost']))
+ self.assertEqual(ret[1]['minions'], sorted(['minion', 'sub_minion', 'localhost']))
def test_simple_local_async_post_no_tgt(self):
low = [{'client': 'local_async',
--
2.17.1

View File

@ -1,37 +0,0 @@
From 6c85da9a53e9dd022c96a199be4e3bdd280543d6 Mon Sep 17 00:00:00 2001
From: "Gareth J. Greenaway" <gareth@wiked.org>
Date: Thu, 2 Aug 2018 15:35:24 -0700
Subject: [PATCH] Fixing issue when a valid token is generated even when
invalid user credentials are passed. This change verifies that the binddn
credentials are valid, then verifies that the username & password (if not
None) are also valid.
---
salt/auth/ldap.py | 8 +++++++-
1 file changed, 7 insertions(+), 1 deletion(-)
diff --git a/salt/auth/ldap.py b/salt/auth/ldap.py
index cbfb03a2f2..0b9aa69fe4 100644
--- a/salt/auth/ldap.py
+++ b/salt/auth/ldap.py
@@ -283,9 +283,15 @@ def auth(username, password):
log.error('LDAP authentication requires python-ldap module')
return False
- # If bind credentials are configured, use them instead of user's
+ # If bind credentials are configured, verify that we can a valid bind
if _config('binddn', mandatory=False) and _config('bindpw', mandatory=False):
bind = _bind_for_search(anonymous=_config('anonymous', mandatory=False))
+
+ # If username & password are not None, attempt to verify they are valid
+ if bind and username and password:
+ bind = _bind(username, password,
+ anonymous=_config('auth_by_group_membership_only', mandatory=False)
+ and _config('anonymous', mandatory=False))
else:
bind = _bind(username, password,
anonymous=_config('auth_by_group_membership_only', mandatory=False)
--
2.19.0

View File

@ -1,4 +1,4 @@
From 2e0abe6d12aa2657a4febed3a80b8c4cf104487a Mon Sep 17 00:00:00 2001
From 11c9eacc439697e9fa7b30918963e4736333ed36 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 14 Nov 2018 17:36:23 +0100
Subject: [PATCH] Get os_arch also without RPM package installed
@ -17,10 +17,9 @@ Add UT for OS arch detection when no CPU arch or machine can be determined
Remove unsupported testcase
---
salt/utils/pkg/rpm.py | 18 ++++++---
tests/unit/utils/test_pkg.py | 72 ++++++++++++++++++++++++++++++++++++
2 files changed, 84 insertions(+), 6 deletions(-)
create mode 100644 tests/unit/utils/test_pkg.py
salt/utils/pkg/rpm.py | 18 ++++--
tests/unit/utils/test_pkg.py | 105 ++++++++++++++++++++++-------------
2 files changed, 77 insertions(+), 46 deletions(-)
diff --git a/salt/utils/pkg/rpm.py b/salt/utils/pkg/rpm.py
index 94e231da4b..bb8c3fb589 100644
@ -60,32 +59,71 @@ index 94e231da4b..bb8c3fb589 100644
diff --git a/tests/unit/utils/test_pkg.py b/tests/unit/utils/test_pkg.py
new file mode 100644
index 0000000000..361e0bf92f
--- /dev/null
index c293852058..361e0bf92f 100644
--- a/tests/unit/utils/test_pkg.py
+++ b/tests/unit/utils/test_pkg.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+
@@ -1,47 +1,72 @@
# -*- coding: utf-8 -*-
-# Import Python libs
-from __future__ import absolute_import
-# Import Salt Libs
+from __future__ import absolute_import, unicode_literals, print_function
+
+from tests.support.unit import TestCase, skipIf
+from tests.support.mock import Mock, MagicMock, patch, NO_MOCK, NO_MOCK_REASON
+import salt.utils.pkg
import salt.utils.pkg
-# Import Salt Testing Libs
-from tests.support.unit import TestCase
+from salt.utils.pkg import rpm
+
+try:
+ import pytest
+except ImportError:
+ pytest = None
+
+
-class PkgUtilsTestCase(TestCase):
+@skipIf(NO_MOCK, NO_MOCK_REASON)
+@skipIf(pytest is None, 'PyTest is missing')
+class PkgRPMTestCase(TestCase):
+ '''
'''
- TestCase for salt.utils.pkg module
+ Test case for pkg.rpm utils
+ '''
'''
- test_parameters = [
- ("16.0.0.49153-0+f1", "", "16.0.0.49153-0+f1"),
- ("> 15.0.0", ">", "15.0.0"),
- ("< 15.0.0", "<", "15.0.0"),
- ("<< 15.0.0", "<<", "15.0.0"),
- (">> 15.0.0", ">>", "15.0.0"),
- (">= 15.0.0", ">=", "15.0.0"),
- ("<= 15.0.0", "<=", "15.0.0"),
- ("!= 15.0.0", "!=", "15.0.0"),
- ("<=> 15.0.0", "<=>", "15.0.0"),
- ("<> 15.0.0", "<>", "15.0.0"),
- ("= 15.0.0", "=", "15.0.0"),
- (">15.0.0", ">", "15.0.0"),
- ("<15.0.0", "<", "15.0.0"),
- ("<<15.0.0", "<<", "15.0.0"),
- (">>15.0.0", ">>", "15.0.0"),
- (">=15.0.0", ">=", "15.0.0"),
- ("<=15.0.0", "<=", "15.0.0"),
- ("!=15.0.0", "!=", "15.0.0"),
- ("<=>15.0.0", "<=>", "15.0.0"),
- ("<>15.0.0", "<>", "15.0.0"),
- ("=15.0.0", "=", "15.0.0"),
- ("", "", "")
- ]
-
- def test_split_comparison(self):
- '''
- Tests salt.utils.pkg.split_comparison
- '''
- for test_parameter in self.test_parameters:
- oper, verstr = salt.utils.pkg.split_comparison(test_parameter[0])
- self.assertEqual(test_parameter[1], oper)
- self.assertEqual(test_parameter[2], verstr)
+
+ @patch('salt.utils.path.which', MagicMock(return_value=True))
+ def test_get_osarch_by_rpm(self):
@ -138,6 +176,6 @@ index 0000000000..361e0bf92f
+ '''
+ assert rpm.get_osarch() == 'unknown'
--
2.19.1
2.17.1

View File

@ -1,65 +0,0 @@
From 943a258da3ed460f173968b0a92b95f2e63ab669 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Mon, 8 Oct 2018 12:48:24 +0200
Subject: [PATCH] Get os_family for RPM distros from the RPM macros.
(U#49930)
Strip and stringify the return for the osarch
Fix imports
---
salt/grains/core.py | 8 +++++---
salt/utils/pkg/rpm.py | 3 ++-
2 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 6aaf38096d..80eebd1c05 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -49,6 +49,8 @@ import salt.utils.path
import salt.utils.platform
import salt.utils.stringutils
import salt.utils.versions
+import salt.utils.pkg.rpm
+
from salt.ext import six
from salt.ext.six.moves import range
@@ -1776,9 +1778,9 @@ def os_data():
# architecture.
if grains.get('os_family') == 'Debian':
osarch = __salt__['cmd.run']('dpkg --print-architecture').strip()
- elif grains.get('os_family') == 'RedHat':
- osarch = __salt__['cmd.run']('rpm --eval %{_host_cpu}').strip()
- elif grains.get('os_family') == 'NILinuxRT':
+ elif grains.get('os_family') in ['RedHat', 'Suse']:
+ osarch = salt.utils.pkg.rpm.get_osarch()
+ elif grains.get('os_family') in ('NILinuxRT', 'Poky'):
archinfo = {}
for line in __salt__['cmd.run']('opkg print-architecture').splitlines():
if line.startswith('arch'):
diff --git a/salt/utils/pkg/rpm.py b/salt/utils/pkg/rpm.py
index 987edab894..94e231da4b 100644
--- a/salt/utils/pkg/rpm.py
+++ b/salt/utils/pkg/rpm.py
@@ -9,6 +9,7 @@ import collections
import datetime
import logging
import subprocess
+import salt.utils.stringutils
# Import 3rd-party libs
from salt.ext import six
@@ -47,7 +48,7 @@ def get_osarch():
close_fds=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()[0]
- return ret or 'unknown'
+ return salt.utils.stringutils.to_str(ret).strip() or 'unknown'
def check_32(arch, osarch=None):
--
2.19.0

View File

@ -1,38 +0,0 @@
From d0234ed977ca860b3a3a6a587a6972bbaf5ae345 Mon Sep 17 00:00:00 2001
From: Raine Curtis <rcurtis@suse.com>
Date: Mon, 9 Jul 2018 09:55:30 -0600
Subject: [PATCH] Improved handling of LDAP group id
gid is casted to int, which should be the case. Otherwise an error
is returned.
---
salt/states/group.py | 11 +++++++++--
1 file changed, 9 insertions(+), 2 deletions(-)
diff --git a/salt/states/group.py b/salt/states/group.py
index 6a720757e8..acf775134c 100644
--- a/salt/states/group.py
+++ b/salt/states/group.py
@@ -72,9 +72,16 @@ def _changes(name,
delusers = [salt.utils.win_functions.get_sam_name(user).lower() for user in delusers]
change = {}
+ ret = {}
if gid:
- if lgrp['gid'] != gid:
- change['gid'] = gid
+ try:
+ gid = int(gid)
+ if lgrp['gid'] != gid:
+ change['gid'] = gid
+ except (TypeError, ValueError):
+ ret['result'] = False
+ ret['comment'] = 'Invalid gid'
+ return ret
if members:
# -- if new member list if different than the current
--
2.19.1

View File

@ -0,0 +1,147 @@
From 5dc6f2a59a8a774d13dcfd36b25ea735df18f10f Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 29 Jan 2019 11:11:38 +0100
Subject: [PATCH] Include aliases in the fqdns grains
Add UT for "is_fqdn"
Add "is_fqdn" check to the network utils
Bugfix: include FQDNs aliases
Deprecate UnitTest assertion in favour of built-in assert keyword
Add UT for fqdns aliases
Leverage cached interfaces, if any.
---
salt/grains/core.py | 12 +++++-------
salt/utils/network.py | 12 ++++++++++++
tests/unit/grains/test_core.py | 28 +++++++++++++++++++++++++---
tests/unit/utils/test_network.py | 19 +++++++++++++++++++
4 files changed, 61 insertions(+), 10 deletions(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index b0c1acceeb..05a9d5035d 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -2200,14 +2200,13 @@ def fqdns():
grains = {}
fqdns = set()
- addresses = salt.utils.network.ip_addrs(include_loopback=False,
- interface_data=_INTERFACES)
- addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False,
- interface_data=_INTERFACES))
+ addresses = salt.utils.network.ip_addrs(include_loopback=False, interface_data=_get_interfaces())
+ addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False, interface_data=_get_interfaces()))
err_message = 'Exception during resolving address: %s'
for ip in addresses:
try:
- fqdns.add(socket.getfqdn(socket.gethostbyaddr(ip)[0]))
+ name, aliaslist, addresslist = socket.gethostbyaddr(ip)
+ fqdns.update([socket.getfqdn(name)] + [als for als in aliaslist if salt.utils.network.is_fqdn(als)])
except socket.herror as err:
if err.errno == 0:
# No FQDN for this IP address, so we don't need to know this all the time.
@@ -2217,8 +2216,7 @@ def fqdns():
except (socket.error, socket.gaierror, socket.timeout) as err:
log.error(err_message, err)
- grains['fqdns'] = sorted(list(fqdns))
- return grains
+ return {"fqdns": sorted(list(fqdns))}
def ip_fqdn():
diff --git a/salt/utils/network.py b/salt/utils/network.py
index 83269cdcf6..c72d2aec41 100644
--- a/salt/utils/network.py
+++ b/salt/utils/network.py
@@ -2016,3 +2016,15 @@ def parse_host_port(host_port):
raise ValueError('bad hostname: "{}"'.format(host))
return host, port
+
+
+def is_fqdn(hostname):
+ """
+ Verify if hostname conforms to be a FQDN.
+
+ :param hostname: text string with the name of the host
+ :return: bool, True if hostname is correct FQDN, False otherwise
+ """
+
+ compliant = re.compile(r"(?!-)[A-Z\d\-\_]{1,63}(?<!-)$", re.IGNORECASE)
+ return "." in hostname and len(hostname) < 0xff and all(compliant.match(x) for x in hostname.rstrip(".").split("."))
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index d5a1b1a36b..117e02c39f 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -863,10 +863,32 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
ret = {'fqdns': ['bluesniff.foo.bar', 'foo.bar.baz', 'rinzler.evil-corp.com']}
with patch.object(socket, 'gethostbyaddr', side_effect=reverse_resolv_mock):
fqdns = core.fqdns()
- self.assertIn('fqdns', fqdns)
- self.assertEqual(len(fqdns['fqdns']), len(ret['fqdns']))
- self.assertEqual(set(fqdns['fqdns']), set(ret['fqdns']))
+ assert "fqdns" in fqdns
+ assert len(fqdns['fqdns']) == len(ret['fqdns'])
+ assert set(fqdns['fqdns']) == set(ret['fqdns'])
+ @skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
+ @patch.object(salt.utils.platform, 'is_windows', MagicMock(return_value=False))
+ @patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '5.6.7.8']))
+ @patch('salt.utils.network.ip_addrs6',
+ MagicMock(return_value=['fe80::a8b2:93ff:fe00:0', 'fe80::a8b2:93ff:dead:beef']))
+ @patch('salt.utils.network.socket.getfqdn', MagicMock(side_effect=lambda v: v)) # Just pass-through
+ def test_fqdns_aliases(self):
+ '''
+ FQDNs aliases
+ '''
+ reverse_resolv_mock = [('foo.bar.baz', ["throwmeaway", "this.is.valid.alias"], ['1.2.3.4']),
+ ('rinzler.evil-corp.com', ["false-hostname", "badaliass"], ['5.6.7.8']),
+ ('foo.bar.baz', [], ['fe80::a8b2:93ff:fe00:0']),
+ ('bluesniff.foo.bar', ["alias.bluesniff.foo.bar"], ['fe80::a8b2:93ff:dead:beef'])]
+ with patch.object(socket, 'gethostbyaddr', side_effect=reverse_resolv_mock):
+ fqdns = core.fqdns()
+ assert "fqdns" in fqdns
+ for alias in ["this.is.valid.alias", "alias.bluesniff.foo.bar"]:
+ assert alias in fqdns["fqdns"]
+
+ for alias in ["throwmeaway", "false-hostname", "badaliass"]:
+ assert alias not in fqdns["fqdns"]
def test_core_virtual(self):
'''
test virtual grain with cmd virt-what
diff --git a/tests/unit/utils/test_network.py b/tests/unit/utils/test_network.py
index 3d20c880bd..ca627777a7 100644
--- a/tests/unit/utils/test_network.py
+++ b/tests/unit/utils/test_network.py
@@ -637,3 +637,22 @@ class NetworkTestCase(TestCase):
# An exception is raised if unicode is passed to socket.getfqdn
minion_id = network.generate_minion_id()
assert minion_id != '', minion_id
+
+ def test_is_fqdn(self):
+ """
+ Test is_fqdn function passes possible FQDN names.
+
+ :return: None
+ """
+ for fqdn in ["host.domain.com", "something.with.the.dots.still.ok", "UPPERCASE.ALSO.SHOULD.WORK",
+ "MiXeD.CaSe.AcCePtAbLe", "123.host.com", "host123.com", "some_underscore.com", "host-here.com"]:
+ assert network.is_fqdn(fqdn)
+
+ def test_is_not_fqdn(self):
+ """
+ Test is_fqdn function rejects FQDN names.
+
+ :return: None
+ """
+ for fqdn in ["hostname", "/some/path", "$variable.here", "verylonghostname.{}".format("domain" * 45)]:
+ assert not network.is_fqdn(fqdn)
--
2.20.1

View File

@ -1,20 +1,20 @@
From 06aff97c83342cf9635fa750222f774ab1664a0d Mon Sep 17 00:00:00 2001
From 216342f03940080176111f5e0e31b43cd909e164 Mon Sep 17 00:00:00 2001
From: ed lane <ed.lane.0@gmail.com>
Date: Thu, 30 Aug 2018 06:07:08 -0600
Subject: [PATCH] Integration of MSI authentication with azurearm cloud
driver (#105)
---
salt/cloud/clouds/azurearm.py | 47 +++++++++++++++++++++++++++--------
1 file changed, 36 insertions(+), 11 deletions(-)
salt/cloud/clouds/azurearm.py | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/salt/cloud/clouds/azurearm.py b/salt/cloud/clouds/azurearm.py
index bd9a25a7e2..8b9a9e8903 100644
index e8050dca16..229412adcd 100644
--- a/salt/cloud/clouds/azurearm.py
+++ b/salt/cloud/clouds/azurearm.py
@@ -25,6 +25,9 @@ The Azure cloud module is used to control access to Microsoft Azure
* ``client_id``
* ``secret``
@@ -58,6 +58,9 @@ The Azure ARM cloud module is used to control access to Microsoft Azure Resource
virtual machine type will be "Windows". Only set this parameter on profiles which install Windows operating systems.
+ if using MSI-style authentication:
+ * ``subscription_id``
@ -22,97 +22,25 @@ index bd9a25a7e2..8b9a9e8903 100644
Example ``/etc/salt/cloud.providers`` or
``/etc/salt/cloud.providers.d/azure.conf`` configuration:
@@ -48,6 +51,10 @@ Example ``/etc/salt/cloud.providers`` or
For example, this creates a service principal with 'owner' role for the whole subscription:
az ad sp create-for-rbac -n "http://mysaltapp" --role owner --scopes /subscriptions/3287abc8-f98a-c678-3bde-326766fd3617
*Note: review the details of Service Principals. Owner role is more than you normally need, and you can restrict scope to a resource group or individual resources.
+
+ Or my-azure-config with MSI-style authentication:
+ driver: azure
+ subscription_id: 3287abc8-f98a-c678-3bde-326766fd3617
'''
# pylint: disable=E0102
@@ -86,6 +93,7 @@ try:
UserPassCredentials,
ServicePrincipalCredentials,
)
+ from msrestazure.azure_active_directory import MSIAuthentication
from azure.mgmt.compute import ComputeManagementClient
from azure.mgmt.compute.models import (
CachingTypes,
@@ -166,19 +174,30 @@ def get_configured_provider():
'''
Return the first configured instance.
'''
+ # check if using Service Principle style authentication...
provider = config.is_provider_configured(
__opts__,
__active_provider_name__ or __virtualname__,
- ('subscription_id', 'tenant', 'client_id', 'secret')
+ required_keys=('subscription_id', 'tenant', 'client_id', 'secret'),
+ log_message=False #... allowed to fail so no need to log warnings
)
if provider is False:
- return config.is_provider_configured(
+ # check if using username/password style authentication...
+ provider = config.is_provider_configured(
@@ -258,7 +261,8 @@ def get_configured_provider():
provider = __is_provider_configured(
__opts__,
__active_provider_name__ or __virtualname__,
- ('subscription_id', 'username', 'password')
+ required_keys=('subscription_id', 'username', 'password'),
+ log_message=False
)
- else:
- return provider
+ if provider is False:
+ # check if using MSI style credentials...
+ provider = config.is_provider_configured(
+ __opts__,
+ __active_provider_name__ or __virtualname__,
+ required_keys=('subscription_id',),
+ log_message=False
+ )
+ return provider
def get_dependencies():
@@ -210,6 +229,7 @@ def get_conn(Client=None):
get_configured_provider(), __opts__, search_global=False
return provider
@@ -301,6 +305,7 @@ def get_conn(client_type):
)
if tenant is not None:
+ # using Service Principle style authentication...
client_id = config.get_cloud_config_value(
'client_id',
get_configured_provider(), __opts__, search_global=False
@@ -224,15 +244,20 @@ def get_conn(Client=None):
'username',
get_configured_provider(), __opts__, search_global=False
)
- password = config.get_cloud_config_value(
- 'password',
- get_configured_provider(), __opts__, search_global=False
- )
- credentials = UserPassCredentials(username, password)
+ if username is not None:
+ # using username/password style authentication...
+ password = config.get_cloud_config_value(
+ 'password',
+ get_configured_provider(), __opts__, search_global=False
+ )
+ credentials = UserPassCredentials(username, password)
+ else:
+ # using MSI style authentication ...
+ credentials = MSIAuthentication()
client = Client(
credentials=credentials,
- subscription_id=subscription_id,
+ subscription_id=str(subscription_id),
)
client.config.add_user_agent('SaltCloud/{0}'.format(salt.version.__version__))
return client
--
2.19.0
2.17.1

View File

@ -1,4 +1,4 @@
From 9d9fb3fd787b40d9d27ad7c5eb69fa0cd4f5a304 Mon Sep 17 00:00:00 2001
From 8fe82178247ff3704915b578398ea55b0c6e4fa0 Mon Sep 17 00:00:00 2001
From: Joachim Gleissner <jgleissner@suse.com>
Date: Tue, 18 Sep 2018 15:07:13 +0200
Subject: [PATCH] loosen azure sdk dependencies in azurearm cloud driver
@ -8,55 +8,33 @@ Use azure-storage-sdk as fallback if multiapi version is not available.
remove unused import from azurearm driver
---
salt/cloud/clouds/azurearm.py | 14 ++++++++------
1 file changed, 8 insertions(+), 6 deletions(-)
salt/cloud/clouds/azurearm.py | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/salt/cloud/clouds/azurearm.py b/salt/cloud/clouds/azurearm.py
index 8b9a9e8903..50e5ce1f62 100644
index 229412adcd..ac59467fb3 100644
--- a/salt/cloud/clouds/azurearm.py
+++ b/salt/cloud/clouds/azurearm.py
@@ -67,6 +67,7 @@ import logging
import pprint
import base64
import collections
@@ -104,6 +104,7 @@ import time
# Salt libs
from salt.ext import six
+import pkgutil
import salt.cache
import salt.config as config
import salt.utils.cloud
@@ -74,7 +75,6 @@ import salt.utils.data
import salt.utils.files
import salt.utils.stringutils
import salt.utils.yaml
-from salt.utils.versions import LooseVersion
from salt.ext import six
import salt.version
from salt.exceptions import (
@@ -125,9 +125,12 @@ try:
from azure.mgmt.storage import StorageManagementClient
from azure.mgmt.web import WebSiteManagementClient
import salt.loader
@@ -126,6 +127,11 @@ try:
import azure.mgmt.network.models as network_models
from azure.storage.blob.blockblobservice import BlockBlobService
from msrestazure.azure_exceptions import CloudError
- from azure.multiapi.storage.v2016_05_31 import CloudStorageAccount
- from azure.cli import core
- HAS_LIBS = LooseVersion(core.__version__) >= LooseVersion("2.0.12")
+ if pkgutil.find_loader('azure.multiapi'):
+ # use multiapi version if available
+ from azure.multiapi.storage.v2016_05_31 import CloudStorageAccount
+ else:
+ from azure.storage import CloudStorageAccount
+ HAS_LIBS = True
HAS_LIBS = True
except ImportError:
pass
# pylint: enable=wrong-import-position,wrong-import-order
@@ -160,8 +163,7 @@ def __virtual__():
False,
'The following dependencies are required to use the AzureARM driver: '
'Microsoft Azure SDK for Python >= 2.0rc5, '
- 'Microsoft Azure Storage SDK for Python >= 0.32, '
- 'Microsoft Azure CLI >= 2.0.12'
+ 'Microsoft Azure Storage SDK for Python >= 0.32'
)
global cache # pylint: disable=global-statement,invalid-name
--
2.17.1

View File

@ -1,29 +0,0 @@
From f346e83f6d4651a1cdcaad8c995642b55f66ddbc Mon Sep 17 00:00:00 2001
From: Daniel Wallace <danielwallace@gtmanfred.com>
Date: Wed, 25 Jul 2018 09:48:29 -0500
Subject: [PATCH] only do reverse dns lookup on ips for salt-ssh
Fixes #48676
---
salt/client/ssh/__init__.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index 8a85cc2480..d6ff0c3479 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -349,7 +349,9 @@ class SSH(object):
return
hostname = self.opts['tgt'].split('@')[-1]
- needs_expansion = '*' not in hostname and salt.utils.network.is_reachable_host(hostname)
+ needs_expansion = '*' not in hostname and \
+ salt.utils.network.is_reachable_host(hostname) and \
+ salt.utils.network.is_ip(hostname)
if needs_expansion:
hostname = salt.utils.network.ip_to_host(hostname)
if hostname is None:
--
2.17.1

View File

@ -1,101 +0,0 @@
From e8c1b2c5a8af5cc6f4551918f695d1463a6eb584 Mon Sep 17 00:00:00 2001
From: Matei Albu <malbu@suse.de>
Date: Sun, 6 May 2018 21:15:58 +0200
Subject: [PATCH] Option to merge current pillar with opts['pillar']
during pillar compile
Fixes #47501
(cherry picked from commit 2f1485e)
---
doc/ref/configuration/minion.rst | 28 ++++++++++++++++++++++++++++
salt/config/__init__.py | 4 +++-
salt/pillar/__init__.py | 7 +++++++
3 files changed, 38 insertions(+), 1 deletion(-)
diff --git a/doc/ref/configuration/minion.rst b/doc/ref/configuration/minion.rst
index c9010a702b..d9823b78d8 100644
--- a/doc/ref/configuration/minion.rst
+++ b/doc/ref/configuration/minion.rst
@@ -3219,3 +3219,31 @@ URL of the repository:
Replace ``<commit_id>`` with the SHA1 hash of a commit ID. Specifying a commit
ID is useful in that it allows one to revert back to a previous version in the
event that an error is introduced in the latest revision of the repo.
+
+``ssh_merge_pillar``
+--------------------
+
+.. versionadded:: 2018.3.2
+
+Default: ``True``
+
+Merges the compiled pillar data with the pillar data already available globally.
+This is useful when using ``salt-ssh`` or ``salt-call --local`` and overriding the pillar
+data in a state file:
+
+.. code-block:: yaml
+
+ apply_showpillar:
+ module.run:
+ - name: state.apply
+ - mods:
+ - showpillar
+ - kwargs:
+ pillar:
+ test: "foo bar"
+
+If set to ``True`` the ``showpillar`` state will have access to the
+global pillar data.
+
+If set to ``False`` only the overriding pillar data will be available
+to the ``showpillar`` state.
diff --git a/salt/config/__init__.py b/salt/config/__init__.py
index 432364b201..feda0abac1 100644
--- a/salt/config/__init__.py
+++ b/salt/config/__init__.py
@@ -989,6 +989,7 @@ VALID_OPTS = {
'ssh_identities_only': bool,
'ssh_log_file': six.string_types,
'ssh_config_file': six.string_types,
+ 'ssh_merge_pillar': bool,
# Enable ioflo verbose logging. Warning! Very verbose!
'ioflo_verbose': int,
@@ -1485,6 +1486,7 @@ DEFAULT_MINION_OPTS = {
},
'discovery': False,
'schedule': {},
+ 'ssh_merge_pillar': True
}
DEFAULT_MASTER_OPTS = {
@@ -2089,7 +2091,7 @@ def _validate_ssh_minion_opts(opts):
for opt_name in list(ssh_minion_opts):
if re.match('^[a-z0-9]+fs_', opt_name, flags=re.IGNORECASE) \
- or 'pillar' in opt_name \
+ or ('pillar' in opt_name and not 'ssh_merge_pillar' == opt_name) \
or opt_name in ('fileserver_backend',):
log.warning(
'\'%s\' is not a valid ssh_minion_opts parameter, ignoring',
diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py
index fc1e34f75d..fc3ce0a5c0 100644
--- a/salt/pillar/__init__.py
+++ b/salt/pillar/__init__.py
@@ -1014,6 +1014,13 @@ class Pillar(object):
mopts['file_roots'] = self.actual_file_roots
mopts['saltversion'] = __version__
pillar['master'] = mopts
+ if 'pillar' in self.opts and self.opts.get('ssh_merge_pillar', False):
+ pillar = merge(
+ self.opts['pillar'],
+ pillar,
+ self.merge_strategy,
+ self.opts.get('renderer', 'yaml'),
+ self.opts.get('pillar_merge_lists', False))
if errors:
for error in errors:
log.critical('Pillar render error: %s', error)
--
2.13.7

View File

@ -1,38 +0,0 @@
From 341ee0c44cabf2f34bdd2f4b54e4b83053a3133e Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Thu, 23 Aug 2018 16:14:36 +0200
Subject: [PATCH] Prepend current directory when path is just filename
(bsc#1095942)
---
salt/utils/parsers.py | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py
index 5a415ab576..9a7f27ac11 100644
--- a/salt/utils/parsers.py
+++ b/salt/utils/parsers.py
@@ -591,10 +591,19 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
)
)
+ def _logfile_callback(option, opt, value, parser, *args, **kwargs):
+ if not os.path.dirname(value):
+ # if the path is only a file name (no parent directory), assume current directory
+ value = os.path.join(os.path.curdir, value)
+ setattr(parser.values, self._logfile_config_setting_name_, value)
+
group.add_option(
'--log-file',
dest=self._logfile_config_setting_name_,
default=None,
+ action='callback',
+ type='string',
+ callback=_logfile_callback,
help='Log file path. Default: \'{0}\'.'.format(
self._default_logging_logfile_
)
--
2.19.0

View File

@ -1,28 +0,0 @@
From d282de5c59e27c17bd5afb207c4eeaa754993368 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 22 May 2018 12:04:48 +0100
Subject: [PATCH] Prevent zypper from parsing repo configuration from not
.repo files
---
salt/modules/zypper.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 06f8335c18..05ba3d86c9 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -862,7 +862,7 @@ def _get_configured_repos():
'''
repos_cfg = configparser.ConfigParser()
- repos_cfg.read([REPOS + '/' + fname for fname in os.listdir(REPOS)])
+ repos_cfg.read([REPOS + '/' + fname for fname in os.listdir(REPOS) if fname.endswith(".repo")])
return repos_cfg
--
2.13.7

View File

@ -1,4 +1,4 @@
From 6488d91acb6f470bfa2b66ac8100cb67d6367612 Mon Sep 17 00:00:00 2001
From 51ccc41dd16564dea5b465d122218ca8047f9f3e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 19 Nov 2018 11:46:26 +0000
@ -38,19 +38,19 @@ Remove unnecessary lambda
Return None instead empty string for arch and release in pkg.list_pkgs
---
salt/modules/aptpkg.py | 38 ++++++++
salt/modules/pkg_resource.py | 17 +++-
salt/modules/pkg_resource.py | 20 +++-
salt/modules/yumpkg.py | 32 ++++++-
salt/modules/zypper.py | 29 +++++-
salt/modules/zypperpkg.py | 29 +++++-
tests/unit/modules/test_pkg_resource.py | 116 ++++++++++++++++++++++++
tests/unit/modules/test_yumpkg.py | 85 ++++++++++++++++-
tests/unit/modules/test_zypper.py | 81 ++++++++++++++++-
7 files changed, 382 insertions(+), 16 deletions(-)
tests/unit/modules/test_zypperpkg.py | 79 +++++++++++++++-
7 files changed, 383 insertions(+), 16 deletions(-)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 42d606926f..1fd4883f2c 100644
index 4a331444c9..f51b6958e5 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -77,6 +77,7 @@ except ImportError:
@@ -73,6 +73,7 @@ except ImportError:
# pylint: enable=import-error
APT_LISTS_PATH = "/var/lib/apt/lists"
@ -58,7 +58,7 @@ index 42d606926f..1fd4883f2c 100644
# Source format for urllib fallback on PPA handling
LP_SRC_FORMAT = 'deb http://ppa.launchpad.net/{0}/{1}/ubuntu {2} main'
@@ -218,6 +219,43 @@ def _warn_software_properties(repo):
@@ -185,6 +186,43 @@ def _warn_software_properties(repo):
log.warning('Best guess at ppa format: %s', repo)
@ -103,16 +103,17 @@ index 42d606926f..1fd4883f2c 100644
'''
Return the latest version of the named package available for upgrade or
diff --git a/salt/modules/pkg_resource.py b/salt/modules/pkg_resource.py
index 9b0a8287f5..0c872f1805 100644
index 8b83f1cda5..0c872f1805 100644
--- a/salt/modules/pkg_resource.py
+++ b/salt/modules/pkg_resource.py
@@ -311,22 +311,31 @@ def format_pkg_list(packages, versions_as_list, attr):
@@ -311,21 +311,31 @@ def format_pkg_list(packages, versions_as_list, attr):
'''
ret = copy.deepcopy(packages)
if attr:
- requested_attr = {'epoch', 'version', 'release', 'arch', 'install_date', 'install_date_time_t'}
+ ret_attr = {}
requested_attr = set(['epoch', 'version', 'release', 'arch',
'install_date', 'install_date_time_t'])
+ requested_attr = set(['epoch', 'version', 'release', 'arch',
+ 'install_date', 'install_date_time_t'])
if attr != 'all':
- requested_attr &= set(attr + ['version'])
@ -143,10 +144,10 @@ index 9b0a8287f5..0c872f1805 100644
for name in ret:
ret[name] = [format_version(d['epoch'], d['version'], d['release'])
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index 51832bf883..cf50d1a4c4 100644
index a56a2e8366..4f26a41670 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -65,6 +65,8 @@ log = logging.getLogger(__name__)
@@ -66,6 +66,8 @@ log = logging.getLogger(__name__)
__HOLD_PATTERN = r'[\w+]+(?:[.-][^-]+)*'
@ -155,7 +156,7 @@ index 51832bf883..cf50d1a4c4 100644
# Define the module's virtual name
__virtualname__ = 'pkg'
@@ -397,7 +399,7 @@ def normalize_name(name):
@@ -429,7 +431,7 @@ def normalize_name(name):
salt '*' pkg.normalize_name zsh.x86_64
'''
try:
@ -164,7 +165,7 @@ index 51832bf883..cf50d1a4c4 100644
if arch not in salt.utils.pkg.rpm.ARCHES + ('noarch',):
return name
except ValueError:
@@ -408,6 +410,30 @@ def normalize_name(name):
@@ -440,6 +442,30 @@ def normalize_name(name):
return name
@ -195,7 +196,7 @@ index 51832bf883..cf50d1a4c4 100644
def latest_version(*names, **kwargs):
'''
Return the latest version of the named package available for upgrade or
@@ -647,8 +673,8 @@ def list_pkgs(versions_as_list=False, **kwargs):
@@ -676,8 +702,8 @@ def list_pkgs(versions_as_list=False, **kwargs):
if pkginfo is not None:
# see rpm version string rules available at https://goo.gl/UGKPNd
pkgver = pkginfo.version
@ -206,11 +207,11 @@ index 51832bf883..cf50d1a4c4 100644
if ':' in pkgver:
epoch, pkgver = pkgver.split(":", 1)
if '-' in pkgver:
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 773354b2f3..ae66e4709d 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -52,6 +52,7 @@ ZYPP_HOME = '/etc/zypp'
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
index 0c26e2214c..92e7052020 100644
--- a/salt/modules/zypperpkg.py
+++ b/salt/modules/zypperpkg.py
@@ -53,6 +53,7 @@ ZYPP_HOME = '/etc/zypp'
LOCKS = '{0}/locks'.format(ZYPP_HOME)
REPOS = '{0}/repos.d'.format(ZYPP_HOME)
DEFAULT_PRIORITY = 99
@ -218,7 +219,7 @@ index 773354b2f3..ae66e4709d 100644
# Define the module's virtual name
__virtualname__ = 'pkg'
@@ -588,6 +589,30 @@ def info_available(*names, **kwargs):
@@ -590,6 +591,30 @@ def info_available(*names, **kwargs):
return ret
@ -249,8 +250,8 @@ index 773354b2f3..ae66e4709d 100644
def latest_version(*names, **kwargs):
'''
Return the latest version of the named package available for upgrade or
@@ -756,8 +781,8 @@ def list_pkgs(versions_as_list=False, **kwargs):
if pkginfo is not None:
@@ -760,8 +785,8 @@ def list_pkgs(versions_as_list=False, **kwargs):
if pkginfo:
# see rpm version string rules available at https://goo.gl/UGKPNd
pkgver = pkginfo.version
- epoch = ''
@ -261,7 +262,7 @@ index 773354b2f3..ae66e4709d 100644
epoch, pkgver = pkgver.split(":", 1)
if '-' in pkgver:
diff --git a/tests/unit/modules/test_pkg_resource.py b/tests/unit/modules/test_pkg_resource.py
index dd3ae9a1ac..2cfd6bb16a 100644
index b6d90cc92c..a9ffe43cdd 100644
--- a/tests/unit/modules/test_pkg_resource.py
+++ b/tests/unit/modules/test_pkg_resource.py
@@ -129,6 +129,122 @@ class PkgresTestCase(TestCase, LoaderModuleMockMixin):
@ -388,18 +389,18 @@ index dd3ae9a1ac..2cfd6bb16a 100644
'''
Test to takes a dict of package name/version information
diff --git a/tests/unit/modules/test_yumpkg.py b/tests/unit/modules/test_yumpkg.py
index c73f2582b9..324c2c8b66 100644
index 6113d3a4b1..6019a8179e 100644
--- a/tests/unit/modules/test_yumpkg.py
+++ b/tests/unit/modules/test_yumpkg.py
@@ -16,6 +16,7 @@ from tests.support.mock import (
)
@@ -18,6 +18,7 @@ from tests.support.mock import (
# Import Salt libs
from salt.exceptions import CommandExecutionError
import salt.modules.rpm_lowpkg as rpm
+from salt.ext import six
import salt.modules.yumpkg as yumpkg
import salt.modules.pkg_resource as pkg_resource
@@ -69,7 +70,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
@@ -76,7 +77,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
'os_family': 'RedHat',
'osmajorrelease': 7,
},
@ -409,7 +410,7 @@ index c73f2582b9..324c2c8b66 100644
}
def test_list_pkgs(self):
@@ -100,7 +102,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
@@ -107,7 +109,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
patch.dict(yumpkg.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
patch.dict(yumpkg.__salt__, {'pkg_resource.add_pkg': _add_data}), \
patch.dict(yumpkg.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
@ -419,7 +420,7 @@ index c73f2582b9..324c2c8b66 100644
pkgs = yumpkg.list_pkgs(versions_as_list=True)
for pkg_name, pkg_version in {
'python-urlgrabber': '3.10-8.el7',
@@ -147,7 +150,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
@@ -154,7 +157,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
patch.dict(yumpkg.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
patch.dict(yumpkg.__salt__, {'pkg_resource.add_pkg': _add_data}), \
patch.dict(yumpkg.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
@ -429,7 +430,7 @@ index c73f2582b9..324c2c8b66 100644
pkgs = yumpkg.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
for pkg_name, pkg_attr in {
'python-urlgrabber': {
@@ -155,54 +159,63 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
@@ -162,54 +166,63 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
'release': '8.el7',
'arch': 'noarch',
'install_date_time_t': 1487838471,
@ -493,7 +494,7 @@ index c73f2582b9..324c2c8b66 100644
},
'shadow-utils': {
'epoch': '2',
@@ -216,22 +229,88 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
@@ -223,22 +236,88 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
'release': '33.el7',
'arch': 'x86_64',
'install_date_time_t': 1487838484,
@ -582,10 +583,10 @@ index c73f2582b9..324c2c8b66 100644
def test_latest_version_with_options(self):
with patch.object(yumpkg, 'list_pkgs', MagicMock(return_value={})):
diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py
index 424438c8bf..a60e209b2c 100644
--- a/tests/unit/modules/test_zypper.py
+++ b/tests/unit/modules/test_zypper.py
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
index 3259e1810d..f586c23fd0 100644
--- a/tests/unit/modules/test_zypperpkg.py
+++ b/tests/unit/modules/test_zypperpkg.py
@@ -61,7 +61,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
'''
@ -595,7 +596,7 @@ index 424438c8bf..a60e209b2c 100644
def setUp(self):
self.new_repo_config = dict(
@@ -603,7 +603,8 @@ Repository 'DUMMY' not found by its alias, number, or URI.
@@ -605,7 +605,8 @@ Repository 'DUMMY' not found by its alias, number, or URI.
patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \
patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \
patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
@ -605,7 +606,7 @@ index 424438c8bf..a60e209b2c 100644
pkgs = zypper.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
self.assertFalse(pkgs.get('gpg-pubkey', False))
for pkg_name, pkg_attr in {
@@ -612,58 +613,130 @@ Repository 'DUMMY' not found by its alias, number, or URI.
@@ -614,58 +615,130 @@ Repository 'DUMMY' not found by its alias, number, or URI.
'release': '129.686',
'arch': 'noarch',
'install_date_time_t': 1498636511,
@ -654,8 +655,7 @@ index 424438c8bf..a60e209b2c 100644
'install_date_time_t': 1503572639,
+ 'epoch': None,
}],
- 'perseus-dummy.i586': [{
+ 'perseus-dummy': [{
'perseus-dummy.i586': [{
'version': '1.1',
'release': '1.1',
'arch': 'i586',
@ -688,8 +688,8 @@ index 424438c8bf..a60e209b2c 100644
+ 'virt-what_|-1.10_|-2.el7_|-x86_64_|-_|-1387838486',
+ ]
+
+ with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
+ patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \
+ with patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \
+ patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
+ patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \
+ patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
+ patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}), \
@ -739,6 +739,6 @@ index 424438c8bf..a60e209b2c 100644
'''
Test advisory patches listing.
--
2.17.1
2.20.1

View File

@ -1,51 +0,0 @@
From 11186ce52ae42967c49a6e238659a566e488a6b4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 23 Jul 2018 16:32:26 +0100
Subject: [PATCH] Remove old hack when reporting multiversion packages
Fix unit tests for zypper pkg.upgrade
---
salt/modules/zypper.py | 5 -----
tests/unit/modules/test_zypper.py | 8 +++++++-
2 files changed, 7 insertions(+), 6 deletions(-)
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 4689f84926..695bce4f4e 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -1480,11 +1480,6 @@ def upgrade(refresh=True,
__zypper__(systemd_scope=_systemd_scope()).noraise.call(*cmd_update)
_clean_cache()
new = list_pkgs()
-
- # Handle packages which report multiple new versions
- # (affects only kernel packages at this point)
- for pkg in new:
- new[pkg] = new[pkg].split(',')[-1]
ret = salt.utils.data.compare_dicts(old, new)
if __zypper__.exit_code not in __zypper__.SUCCESS_EXIT_CODES:
diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py
index bb15aca11a..424438c8bf 100644
--- a/tests/unit/modules/test_zypper.py
+++ b/tests/unit/modules/test_zypper.py
@@ -429,7 +429,13 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
zypper_mock.assert_any_call('update', '--auto-agree-with-licenses')
with patch('salt.modules.zypper.list_pkgs',
- MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1,1.2"}])):
+ MagicMock(side_effect=[{"kernel-default": "1.1"}, {"kernel-default": "1.1,1.2"}])):
+ ret = zypper.upgrade()
+ self.assertDictEqual(ret, {"kernel-default": {"old": "1.1", "new": "1.1,1.2"}})
+ zypper_mock.assert_any_call('update', '--auto-agree-with-licenses')
+
+ with patch('salt.modules.zypper.list_pkgs',
+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}])):
ret = zypper.upgrade()
self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}})
zypper_mock.assert_any_call('update', '--auto-agree-with-licenses')
--
2.17.1

View File

@ -1,43 +0,0 @@
From 0908344fae3edda3372ee03820ea30ebcfe8980e Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 13 Sep 2018 12:00:55 +0200
Subject: [PATCH] Retire MD5 checksum for pkg mgmt plugins
Use SHA256 algorithm for zyppnotify plugin
Remove an empty line
---
scripts/suse/yum/plugins/yumnotify.py | 2 +-
scripts/suse/zypper/plugins/commit/zyppnotify | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/scripts/suse/yum/plugins/yumnotify.py b/scripts/suse/yum/plugins/yumnotify.py
index 268e1e9531..dd2485c886 100644
--- a/scripts/suse/yum/plugins/yumnotify.py
+++ b/scripts/suse/yum/plugins/yumnotify.py
@@ -32,7 +32,7 @@ def _get_checksum():
Returns:
hexdigest
"""
- digest = hashlib.md5()
+ digest = hashlib.sha256()
with open(RPM_PATH, "rb") as rpm_db_fh:
while True:
buff = rpm_db_fh.read(0x1000)
diff --git a/scripts/suse/zypper/plugins/commit/zyppnotify b/scripts/suse/zypper/plugins/commit/zyppnotify
index 268298b108..b64badb119 100755
--- a/scripts/suse/zypper/plugins/commit/zyppnotify
+++ b/scripts/suse/zypper/plugins/commit/zyppnotify
@@ -35,7 +35,7 @@ class DriftDetector(Plugin):
Returns:
hexdigest
'''
- digest = hashlib.md5()
+ digest = hashlib.sha256()
with open(self.rpm_path, "rb") as rpm_db_fh:
while True:
buff = rpm_db_fh.read(0x1000)
--
2.20.1

View File

@ -1,3 +1,102 @@
-------------------------------------------------------------------
Thu Feb 28 16:18:38 UTC 2019 - Jochen Breuer <jbreuer@suse.de>
- No longer limiting Python3 version to <3.7
-------------------------------------------------------------------
Thu Feb 28 08:24:16 UTC 2019 - Jochen Breuer <jbreuer@suse.de>
- Async batch implementation
- Added:
* async-batch-implementation.patch
-------------------------------------------------------------------
Wed Feb 27 14:28:55 UTC 2019 - jbreuer@suse.de
- Update to Salt 2019.2.0 release
For further information see:
https://docs.saltstack.com/en/latest/topics/releases/2019.2.0.html
- Added:
* add-virt.all_capabilities.patch
* add-virt.volume_infos-and-virt.volume_delete.patch
* don-t-call-zypper-with-more-than-one-no-refresh.patch
* include-aliases-in-the-fqdns-grains.patch
* temporary-fix-extend-the-whitelist-of-allowed-comman.patch
- Removed:
* accounting-for-when-files-in-an-archive-contain-non-.patch
* add-engine-relaying-libvirt-events.patch
* add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch
* add-support-for-python-3.7.patch
* align-suse-salt-master.service-limitnofiles-limit-wi.patch
* avoid-incomprehensive-message-if-crashes.patch
* change-stringio-import-in-python2-to-import-the-clas.patch
* decode-file-contents-for-python2-bsc-1102013.patch
* do-not-override-jid-on-returners-only-sending-back-t.patch
* don-t-error-on-retcode-0-in-libcrypto.openssl_init_c.patch
* feat-add-grain-for-all-fqdns.patch
* fix-async-call-to-process-manager.patch
* fix-decrease-loglevel-when-unable-to-resolve-addr.patch
* fix-deprecation-warning-bsc-1095507.patch
* fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch
* fix-for-ec2-rate-limit-failures.patch
* fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch
* fix-for-sorting-of-multi-version-packages-bsc-109717.patch
* fix-index-error-when-running-on-python-3.patch
* fix-latin1-encoding-problems-on-file-module-bsc-1116.patch
* fix-mine.get-not-returning-data-workaround-for-48020.patch
* fix-unboundlocalerror-in-file.get_diff.patch
* fixed-usage-of-ipaddress.patch
* fixing-issue-when-a-valid-token-is-generated-even-wh.patch
* get-os_family-for-rpm-distros-from-the-rpm-macros.-u.patch
* improved-handling-of-ldap-group-id.patch
* only-do-reverse-dns-lookup-on-ips-for-salt-ssh.patch
* option-to-merge-current-pillar-with-opts-pillar-duri.patch
* prepend-current-directory-when-path-is-just-filename.patch
* prevent-zypper-from-parsing-repo-configuration-from-.patch
* remove-old-hack-when-reporting-multiversion-packages.patch
* retire-md5-checksum-for-pkg-mgmt-plugins.patch
* show-recommendations-for-salt-ssh-cross-version-pyth.patch
* strip-trailing-commas-on-linux-user-gecos-fields.patch
* support-use-of-gce-instance-credentials-109.patch
* update-error-list-for-zypper.patch
* x509-fixes-for-remote-signing-106.patch
- Modified:
* add-all_versions-parameter-to-include-all-installed-.patch
* add-cpe_name-for-osversion-grain-parsing-u-49946.patch
* add-environment-variable-to-know-if-yum-is-invoked-f.patch
* add-hold-unhold-functions.patch
* add-saltssh-multi-version-support-across-python-inte.patch
* azurefs-gracefully-handle-attributeerror.patch
* bugfix-any-unicode-string-of-length-16-will-raise-ty.patch
* debian-info_installed-compatibility-50453.patch
* do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch
* fall-back-to-pymysql.patch
* fix-for-suse-expanded-support-detection.patch
* fix-git_pillar-merging-across-multiple-__env__-repos.patch
* fix-ipv6-scope-bsc-1108557.patch
* fix-issue-2068-test.patch
* fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch
* fixes-cve-2018-15750-cve-2018-15751.patch
* get-os_arch-also-without-rpm-package-installed.patch
* integration-of-msi-authentication-with-azurearm-clou.patch
* loosen-azure-sdk-dependencies-in-azurearm-cloud-driv.patch
* remove-arch-from-name-when-pkg.list_pkgs-is-called-w.patch
* use-adler32-algorithm-to-compute-string-checksums.patch
* x509-fixes-111.patch
* zypper-add-root-configuration-parameter.patch
-------------------------------------------------------------------
Wed Jan 23 15:25:29 UTC 2019 - bo@suse.de
- Add root parameter to Zypper module
- Added:
* zypper-add-root-configuration-parameter.patch
-------------------------------------------------------------------
Wed Jan 16 16:28:09 UTC 2019 - psuarezhernandez@suse.com

200
salt.spec
View File

@ -52,13 +52,13 @@
%bcond_with builddocs
Name: salt
Version: 2018.3.2
Version: 2019.2.0
Release: 0
Summary: A parallel remote execution system
License: Apache-2.0
Group: System/Management
Url: http://saltstack.org/
Source: https://github.com/saltstack/salt/archive/v%{version}.tar.gz
Source: v%{version}.tar.gz
Source1: README.SUSE
Source2: salt-tmpfiles.d
Source3: html.tar.bz2
@ -69,156 +69,94 @@ Patch1: run-salt-master-as-dedicated-salt-user.patch
Patch2: run-salt-api-as-user-salt-bsc-1064520.patch
Patch3: activate-all-beacons-sources-config-pillar-grains.patch
Patch4: avoid-excessive-syslogging-by-watchdog-cronjob-58.patch
Patch5: feat-add-grain-for-all-fqdns.patch
Patch6: fix-bsc-1065792.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46575
Patch7: fix-decrease-loglevel-when-unable-to-resolve-addr.patch
Patch5: fix-bsc-1065792.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46684
Patch8: add-saltssh-multi-version-support-across-python-inte.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46635
Patch9: fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch
Patch6: add-saltssh-multi-version-support-across-python-inte.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46890
Patch10: fall-back-to-pymysql.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47149
Patch11: strip-trailing-commas-on-linux-user-gecos-fields.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47232
Patch12: fixed-usage-of-ipaddress.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47504
Patch13: option-to-merge-current-pillar-with-opts-pillar-duri.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47471
Patch14: do-not-override-jid-on-returners-only-sending-back-t.patch
Patch7: fall-back-to-pymysql.patch
# PATCH-FIX_OPENSUSE bsc#1091371
Patch15: enable-passing-a-unix_socket-for-mysql-returners-bsc.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47211
Patch16: fix-for-ec2-rate-limit-failures.patch
Patch8: enable-passing-a-unix_socket-for-mysql-returners-bsc.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47638
Patch17: add-all_versions-parameter-to-include-all-installed-.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47765
Patch18: prevent-zypper-from-parsing-repo-configuration-from-.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47149
Patch19: add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47908
Patch20: align-suse-salt-master.service-limitnofiles-limit-wi.patch
# PATCH-FIX_OPENSUSE bsc#1095507
Patch21: fix-deprecation-warning-bsc-1095507.patch
Patch9: add-all_versions-parameter-to-include-all-installed-.patch
# PATCH-FIX_OPENSUSE bsc#1057635
Patch22: add-environment-variable-to-know-if-yum-is-invoked-f.patch
Patch10: add-environment-variable-to-know-if-yum-is-invoked-f.patch
# PATCH-FIX_OPENSUSE
Patch23: add-custom-suse-capabilities-as-grains.patch
# PATCH-FIX_OPENSUSE bsc#1098394 https://github.com/saltstack/salt/pull/47061
Patch24: fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch
# PATCH-FIX_OPENSUSE bsc#1072599
Patch25: show-recommendations-for-salt-ssh-cross-version-pyth.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47405
Patch26: fix-unboundlocalerror-in-file.get_diff.patch
Patch11: add-custom-suse-capabilities-as-grains.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48294
Patch27: fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47572
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48015
Patch28: accounting-for-when-files-in-an-archive-contain-non-.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48712
Patch29: remove-old-hack-when-reporting-multiversion-packages.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46461
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46928
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46957
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47243
Patch30: add-engine-relaying-libvirt-events.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48781
Patch31: avoid-incomprehensive-message-if-crashes.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48765
Patch32: fix-mine.get-not-returning-data-workaround-for-48020.patch
# PATCH-FIX_OPENSUSE bsc#1097174 and bsc#1097413
Patch33: fix-for-sorting-of-multi-version-packages-bsc-109717.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48863
Patch34: decode-file-contents-for-python2-bsc-1102013.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49052
Patch35: add-support-for-python-3.7.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48771
Patch36: only-do-reverse-dns-lookup-on-ips-for-salt-ssh.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49277
Patch37: prepend-current-directory-when-path-is-just-filename.patch
Patch12: fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49063
Patch38: integration-of-msi-authentication-with-azurearm-clou.patch
Patch13: integration-of-msi-authentication-with-azurearm-clou.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49538
Patch39: fix-for-suse-expanded-support-detection.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49508
Patch40: x509-fixes-for-remote-signing-106.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49555
Patch41: change-stringio-import-in-python2-to-import-the-clas.patch
Patch14: fix-for-suse-expanded-support-detection.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48812
Patch42: use-adler32-algorithm-to-compute-string-checksums.patch
Patch15: use-adler32-algorithm-to-compute-string-checksums.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49497
Patch43: x509-fixes-111.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49605
Patch44: support-use-of-gce-instance-credentials-109.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49722
Patch45: fix-index-error-when-running-on-python-3.patch
Patch16: x509-fixes-111.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49696
Patch46: loosen-azure-sdk-dependencies-in-azurearm-cloud-driv.patch
Patch17: loosen-azure-sdk-dependencies-in-azurearm-cloud-driv.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49737
Patch47: do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49806
Patch48: update-error-list-for-zypper.patch
Patch18: do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49815
Patch49: fix-ipv6-scope-bsc-1108557.patch
Patch19: fix-ipv6-scope-bsc-1108557.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49480
Patch50: early-feature-support-config.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49085
Patch51: fix-async-call-to-process-manager.patch
Patch20: early-feature-support-config.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49908
Patch52: bugfix-any-unicode-string-of-length-16-will-raise-ty.patch
Patch21: bugfix-any-unicode-string-of-length-16-will-raise-ty.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49936
Patch53: make-profiles-a-package.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49930
Patch54: get-os_family-for-rpm-distros-from-the-rpm-macros.-u.patch
Patch22: make-profiles-a-package.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49946
Patch55: add-cpe_name-for-osversion-grain-parsing-u-49946.patch
Patch23: add-cpe_name-for-osversion-grain-parsing-u-49946.patch
# PATCH-FIX_OPENSUSE: Fix unit test for grains core
Patch56: fix-unit-test-for-grains-core.patch
Patch24: fix-unit-test-for-grains-core.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50049
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50072
Patch57: preserving-signature-in-module.run-state-u-50049.patch
Patch25: preserving-signature-in-module.run-state-u-50049.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50095
Patch58: support-config-non-root-permission-issues-fixes-u-50.patch
Patch26: support-config-non-root-permission-issues-fixes-u-50.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50018
Patch59: add-multi-file-support-and-globbing-to-the-filetree-.patch
Patch27: add-multi-file-support-and-globbing-to-the-filetree-.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49761
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50201
Patch60: fixes-cve-2018-15750-cve-2018-15751.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48491
Patch61: improved-handling-of-ldap-group-id.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48901
Patch62: fixing-issue-when-a-valid-token-is-generated-even-wh.patch
Patch28: fixes-cve-2018-15750-cve-2018-15751.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50417
Patch63: fix-git_pillar-merging-across-multiple-__env__-repos.patch
Patch29: fix-git_pillar-merging-across-multiple-__env__-repos.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50523
Patch64: get-os_arch-also-without-rpm-package-installed.patch
Patch30: get-os_arch-also-without-rpm-package-installed.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50392
Patch65: make-aptpkg.list_repos-compatible-on-enabled-disable.patch
Patch31: make-aptpkg.list_repos-compatible-on-enabled-disable.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50453
Patch66: debian-info_installed-compatibility-50453.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48580
Patch67: don-t-error-on-retcode-0-in-libcrypto.openssl_init_c.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48503
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48934
Patch68: fix-latin1-encoding-problems-on-file-module-bsc-1116.patch
Patch32: debian-info_installed-compatibility-50453.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50742
Patch69: decide-if-the-source-should-be-actually-skipped.patch
Patch33: decide-if-the-source-should-be-actually-skipped.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50773
Patch70: add-hold-unhold-functions.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49639
Patch71: retire-md5-checksum-for-pkg-mgmt-plugins.patch
Patch34: add-hold-unhold-functions.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50401
# NOTE: This is a techpreview as well as in Fluorine! Release only in Neon.
Patch72: add-supportconfig-module-for-remote-calls-and-saltss.patch
Patch35: add-supportconfig-module-for-remote-calls-and-saltss.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50567
Patch73: azurefs-gracefully-handle-attributeerror.patch
Patch36: azurefs-gracefully-handle-attributeerror.patch
# PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/116
Patch74: return-the-expected-powerpc-os-arch-bsc-1117995.patch
Patch37: return-the-expected-powerpc-os-arch-bsc-1117995.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51108
Patch75: remove-arch-from-name-when-pkg.list_pkgs-is-called-w.patch
Patch38: remove-arch-from-name-when-pkg.list_pkgs-is-called-w.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51119
Patch76: fix-issue-2068-test.patch
Patch39: fix-issue-2068-test.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50125
Patch40: zypper-add-root-configuration-parameter.patch
# PATCH_FIX_OPENSUSE: Temporary fix allowing "id_" and "force" params while upstrem figures it out
Patch41: temporary-fix-extend-the-whitelist-of-allowed-comman.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51382
Patch42: don-t-call-zypper-with-more-than-one-no-refresh.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50109
# PATCH_FIX_OPENSUSE https://github.com/openSUSE/salt/pull/121
Patch43: add-virt.all_capabilities.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51691
Patch44: add-virt.volume_infos-and-virt.volume_delete.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51384
Patch45: include-aliases-in-the-fqdns-grains.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50546
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51863
Patch46: async-batch-implementation.patch
# BuildRoot: %{_tmppath}/%{name}-%{version}-build
BuildRoot: %{_tmppath}/%{name}-%{version}-build
@ -674,7 +612,7 @@ Zsh command line completion support for %{name}.
%prep
# %setup -q -n salt-%{version}
%setup -q -n salt-%{version}
%setup -q -n salt-2019.2.0-suse
cp %{S:1} .
cp %{S:5} ./.travis.yml
%patch1 -p1
@ -723,36 +661,6 @@ cp %{S:5} ./.travis.yml
%patch44 -p1
%patch45 -p1
%patch46 -p1
%patch47 -p1
%patch48 -p1
%patch49 -p1
%patch50 -p1
%patch51 -p1
%patch52 -p1
%patch53 -p1
%patch54 -p1
%patch55 -p1
%patch56 -p1
%patch57 -p1
%patch58 -p1
%patch59 -p1
%patch60 -p1
%patch61 -p1
%patch62 -p1
%patch63 -p1
%patch64 -p1
%patch65 -p1
%patch66 -p1
%patch67 -p1
%patch68 -p1
%patch69 -p1
%patch70 -p1
%patch71 -p1
%patch72 -p1
%patch73 -p1
%patch74 -p1
%patch75 -p1
%patch76 -p1
%build
%if 0%{?build_py2}

View File

@ -1,63 +0,0 @@
From 15e97fd2916176fe850850fe90983ac95a1f8e7b Mon Sep 17 00:00:00 2001
From: Erik Johnson <palehose@gmail.com>
Date: Mon, 11 Jun 2018 14:46:58 -0500
Subject: [PATCH] Show recommendations for salt-ssh cross-version python
errors
This shows more accurate information on how to resolve version issues
(e.g. master only has Salt deps installed for Python 3 but remote host
has no Python 3 installed).
Use parenthesis for line continuation
---
salt/client/ssh/__init__.py | 26 +++++++++++++++++++++++++-
1 file changed, 25 insertions(+), 1 deletion(-)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index f1300b5698..8a85cc2480 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -1387,6 +1387,30 @@ ARGS = {arguments}\n'''.format(config=self.minion_config,
perm_error_fmt = 'Permissions problem, target user may need '\
'to be root or use sudo:\n {0}'
+ def _version_mismatch_error():
+ messages = {
+ 2: {
+ 6: 'Install Python 2.7 / Python 3 Salt dependencies on the Salt SSH master \n'
+ 'to interact with Python 2.7 / Python 3 targets',
+ 7: 'Install Python 2.6 / Python 3 Salt dependencies on the Salt SSH master \n'
+ 'to interact with Python 2.6 / Python 3 targets',
+ },
+ 3: {
+ 'default': '- Install Python 2.6/2.7 Salt dependencies on the Salt SSH \n'
+ ' master to interact with Python 2.6/2.7 targets\n'
+ '- Install Python 3 on the target machine(s)',
+ },
+ 'default': 'Matching major/minor Python release (>=2.6) needed both on the Salt SSH \n'
+ 'master and target machine',
+ }
+ major, minor = sys.version_info[:2]
+ help_msg = (
+ messages.get(major, {}).get(minor)
+ or messages.get(major, {}).get('default')
+ or messages['default']
+ )
+ return 'Python version error. Recommendation(s) follow:\n' + help_msg
+
errors = [
(
(),
@@ -1396,7 +1420,7 @@ ARGS = {arguments}\n'''.format(config=self.minion_config,
(
(salt.defaults.exitcodes.EX_THIN_PYTHON_INVALID,),
'Python interpreter is too old',
- 'salt requires python 2.6 or newer on target hosts, must have same major version as origin host'
+ _version_mismatch_error()
),
(
(salt.defaults.exitcodes.EX_THIN_CHECKSUM,),
--
2.13.7

View File

@ -1,55 +0,0 @@
From f515f99ee42ffaba30cee2e1941a7e9af9db7453 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Wed, 18 Apr 2018 12:05:35 +0100
Subject: [PATCH] Strip trailing commas on Linux user GECOS fields
Add unit tests for GECOS fields
---
salt/modules/useradd.py | 2 +-
tests/unit/modules/test_useradd.py | 18 ++++++++++++++++++
2 files changed, 19 insertions(+), 1 deletion(-)
diff --git a/salt/modules/useradd.py b/salt/modules/useradd.py
index 545fe2a6f1..a61ba0e960 100644
--- a/salt/modules/useradd.py
+++ b/salt/modules/useradd.py
@@ -81,7 +81,7 @@ def _build_gecos(gecos_dict):
return '{0},{1},{2},{3}'.format(gecos_dict.get('fullname', ''),
gecos_dict.get('roomnumber', ''),
gecos_dict.get('workphone', ''),
- gecos_dict.get('homephone', ''))
+ gecos_dict.get('homephone', '')).rstrip(',')
def _update_gecos(name, key, value, root=None):
diff --git a/tests/unit/modules/test_useradd.py b/tests/unit/modules/test_useradd.py
index eb983685bb..fa30a0df71 100644
--- a/tests/unit/modules/test_useradd.py
+++ b/tests/unit/modules/test_useradd.py
@@ -393,3 +393,21 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin):
mock = MagicMock(side_effect=[{'name': ''}, False, {'name': ''}])
with patch.object(useradd, 'info', mock):
self.assertFalse(useradd.rename('salt', 'salt'))
+
+ def test_build_gecos_field(self):
+ '''
+ Test if gecos fields are built correctly (removing trailing commas)
+ '''
+ test_gecos = {'fullname': 'Testing',
+ 'roomnumber': 1234,
+ 'workphone': 22222,
+ 'homephone': 99999}
+ expected_gecos_fields = 'Testing,1234,22222,99999'
+ self.assertEqual(useradd._build_gecos(test_gecos), expected_gecos_fields)
+ test_gecos.pop('roomnumber')
+ test_gecos.pop('workphone')
+ expected_gecos_fields = 'Testing,,,99999'
+ self.assertEqual(useradd._build_gecos(test_gecos), expected_gecos_fields)
+ test_gecos.pop('homephone')
+ expected_gecos_fields = 'Testing'
+ self.assertEqual(useradd._build_gecos(test_gecos), expected_gecos_fields)
--
2.13.7

View File

@ -1,33 +0,0 @@
From 4571116a54ff51683cb695ce795f04f8b318b440 Mon Sep 17 00:00:00 2001
From: jgleissner <jgleissner@suse.com>
Date: Wed, 19 Sep 2018 14:37:12 +0200
Subject: [PATCH] Support use of GCE instance credentials (#109)
* Integration of MSI authentication with azurearm cloud driver (#105)
* allow empty service_account_private_key in GCE driver
Passing an emoty service_account_private_key to libcloud will enable
authentication using instance credentials, which is used by CaaSP in GCE.
---
salt/cloud/clouds/gce.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/salt/cloud/clouds/gce.py b/salt/cloud/clouds/gce.py
index 75109491be..1018e36ed5 100644
--- a/salt/cloud/clouds/gce.py
+++ b/salt/cloud/clouds/gce.py
@@ -134,7 +134,8 @@ def __virtual__():
parameters = details['gce']
pathname = os.path.expanduser(parameters['service_account_private_key'])
- if salt.utils.cloud.check_key_path_and_mode(
+ # empty pathname will tell libcloud to use instance credentials
+ if pathname and salt.utils.cloud.check_key_path_and_mode(
provider, pathname
) is False:
return False
--
2.17.1

View File

@ -0,0 +1,26 @@
From c9c50ab75b4a8a73f57e8c2eeaa24401409e8c3c Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 24 Jan 2019 18:12:35 +0100
Subject: [PATCH] temporary fix: extend the whitelist of allowed commands
---
salt/auth/__init__.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/salt/auth/__init__.py b/salt/auth/__init__.py
index ca7168d00e..aa4c5c3670 100644
--- a/salt/auth/__init__.py
+++ b/salt/auth/__init__.py
@@ -46,6 +46,8 @@ AUTH_INTERNAL_KEYWORDS = frozenset([
'gather_job_timeout',
'kwarg',
'match',
+ "id_",
+ "force",
'metadata',
'print_event',
'raw',
--
2.20.1

View File

@ -1,62 +0,0 @@
From 71e7ecfbb07cf14680a2a39de48a6e60cd20cb07 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 26 Sep 2018 17:54:53 +0200
Subject: [PATCH] Update error list for zypper
Add error logging
---
salt/modules/zypper.py | 30 ++++++++++++++++++++++++++++--
1 file changed, 28 insertions(+), 2 deletions(-)
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index e4423cf1fc..6845e44ab6 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -75,7 +75,25 @@ class _Zypper(object):
Allows serial zypper calls (first came, first won).
'''
- SUCCESS_EXIT_CODES = [0, 100, 101, 102, 103]
+ SUCCESS_EXIT_CODES = {
+ 0: 'Successful run of zypper with no special info.',
+ 100: 'Patches are available for installation.',
+ 101: 'Security patches are available for installation.',
+ 102: 'Installation successful, reboot required.',
+ 103: 'Installation succesful, restart of the package manager itself required.',
+ }
+
+ WARNING_EXIT_CODES = {
+ 6: 'No repositories are defined.',
+ 7: 'The ZYPP library is locked.',
+ 106: 'Some repository had to be disabled temporarily because it failed to refresh. '
+ 'You should check your repository configuration (e.g. zypper ref -f).',
+ 107: 'Installation basically succeeded, but some of the packages %post install scripts returned an error. '
+ 'These packages were successfully unpacked to disk and are registered in the rpm database, '
+ 'but due to the failed install script they may not work as expected. The failed scripts output might '
+ 'reveal what actually went wrong. Any scripts output is also logged to /var/log/zypp/history.'
+ }
+
LOCK_EXIT_CODE = 7
XML_DIRECTIVES = ['-x', '--xmlout']
ZYPPER_LOCK = '/var/run/zypp.pid'
@@ -188,7 +206,15 @@ class _Zypper(object):
:return:
'''
- return self.exit_code not in self.SUCCESS_EXIT_CODES
+ if self.exit_code:
+ msg = self.SUCCESS_EXIT_CODES.get(self.exit_code)
+ if msg:
+ log.info(msg)
+ msg = self.WARNING_EXIT_CODES.get(self.exit_code)
+ if msg:
+ log.warning(msg)
+
+ return self.exit_code not in self.SUCCESS_EXIT_CODES and self.exit_code not in self.WARNING_EXIT_CODES
def _is_lock(self):
'''
--
2.19.0

View File

@ -1,4 +1,4 @@
From 1cb2d2bc6c1cf1a39e735120c184d6ade9e64c34 Mon Sep 17 00:00:00 2001
From 9d09fcb60b8babd415af76812c93d38b6cbce661 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Sat, 28 Jul 2018 22:59:04 +0200
Subject: [PATCH] Use Adler32 algorithm to compute string checksums
@ -15,24 +15,18 @@ Choose CRC method, default to faster but less reliable "adler32", if crc is in u
Add warning for Sodium.
---
salt/config/__init__.py | 13 +++++++++-
salt/grains/core.py | 54 +++++++++++++++++++++++++++--------------
2 files changed, 48 insertions(+), 19 deletions(-)
salt/config/__init__.py | 7 +++++-
salt/grains/core.py | 53 +++++++++++++++++++++++++++--------------
2 files changed, 41 insertions(+), 19 deletions(-)
diff --git a/salt/config/__init__.py b/salt/config/__init__.py
index feda0abac1..59df7e1cba 100644
index 6b74b90ce0..5d0c18b5d1 100644
--- a/salt/config/__init__.py
+++ b/salt/config/__init__.py
@@ -1186,6 +1186,16 @@ VALID_OPTS = {
@@ -1212,6 +1212,10 @@ VALID_OPTS = {
# Enable calling ssh minions from the salt master
'enable_ssh_minions': bool,
+
+ # Thorium saltenv
+ 'thoriumenv': (type(None), six.string_types),
+
+ # Thorium top file location
+ 'thorium_top': six.string_types,
# Thorium top file location
'thorium_top': six.string_types,
+
+ # Use Adler32 hashing algorithm for server_id (default False until Sodium, "adler32" after)
+ # Possible values are: False, adler32, crc32
@ -40,7 +34,7 @@ index feda0abac1..59df7e1cba 100644
}
# default configurations
@@ -1486,7 +1496,8 @@ DEFAULT_MINION_OPTS = {
@@ -1520,7 +1524,8 @@ DEFAULT_MINION_OPTS = {
},
'discovery': False,
'schedule': {},
@ -51,7 +45,7 @@ index feda0abac1..59df7e1cba 100644
DEFAULT_MASTER_OPTS = {
diff --git a/salt/grains/core.py b/salt/grains/core.py
index a5c3a6a8cf..6aaf38096d 100644
index 85a929a485..378d3cb786 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -20,6 +20,7 @@ import platform
@ -61,17 +55,16 @@ index a5c3a6a8cf..6aaf38096d 100644
+import zlib
from errno import EACCES, EPERM
import datetime
@@ -46,6 +47,8 @@ import salt.utils.files
import salt.utils.network
import salt.utils.path
import warnings
@@ -61,6 +62,7 @@ import salt.utils.path
import salt.utils.pkg.rpm
import salt.utils.platform
+import salt.utils.stringutils
import salt.utils.stringutils
+import salt.utils.versions
from salt.ext import six
from salt.ext.six.moves import range
@@ -2420,40 +2423,55 @@ def _hw_data(osdata):
@@ -2730,40 +2732,55 @@ def _hw_data(osdata):
return grains
@ -146,6 +139,6 @@ index a5c3a6a8cf..6aaf38096d 100644
def get_master():
--
2.19.0
2.20.1

View File

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:515df2eed05b1a31101dc8d7cfb52f554ced6db52417a3e9c2096f055807235b
size 13024996

3
v2019.2.0.tar.gz Normal file
View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:263b7f6fd915eb8876795714cc4d2b6bb8ccb97423858b479eefb1c5429424d5
size 15065369

View File

@ -1,4 +1,4 @@
From 053d97afcc7486f7300e339bc56cb3c850cc523b Mon Sep 17 00:00:00 2001
From c5adc0c126e593d12c9b18bcf60f96336c75e4a8 Mon Sep 17 00:00:00 2001
From: Florian Bergmann <bergmannf@users.noreply.github.com>
Date: Fri, 14 Sep 2018 10:30:39 +0200
Subject: [PATCH] X509 fixes (#111)
@ -33,17 +33,16 @@ PEP8: line too long
* Fix unit tests
---
salt/modules/publish.py | 8 +-
salt/modules/x509.py | 132 ++++++++++++--------------------
salt/states/x509.py | 22 ++++--
tests/unit/modules/test_x509.py | 9 ++-
4 files changed, 74 insertions(+), 97 deletions(-)
salt/modules/publish.py | 8 +--
salt/modules/x509.py | 132 ++++++++++++++++------------------------
salt/states/x509.py | 22 ++++---
3 files changed, 69 insertions(+), 93 deletions(-)
diff --git a/salt/modules/publish.py b/salt/modules/publish.py
index 2de99583f4..ac31b4b65f 100644
index 62e3e98f2f..fda848d1ec 100644
--- a/salt/modules/publish.py
+++ b/salt/modules/publish.py
@@ -83,10 +83,8 @@ def _publish(
@@ -82,10 +82,8 @@ def _publish(
in minion configuration but `via_master` was specified.')
else:
# Find the master in the list of master_uris generated by the minion base class
@ -56,9 +55,9 @@ index 2de99583f4..ac31b4b65f 100644
if not matching_master_uris:
raise SaltInvocationError('Could not find match for {0} in \
@@ -176,6 +174,8 @@ def _publish(
else:
return ret
@@ -178,6 +176,8 @@ def _publish(
finally:
channel.close()
+ return {}
+
@ -66,10 +65,10 @@ index 2de99583f4..ac31b4b65f 100644
def publish(tgt,
fun,
diff --git a/salt/modules/x509.py b/salt/modules/x509.py
index 9901bc5bd9..45afcccd99 100644
index 8689bfad35..4126f34960 100644
--- a/salt/modules/x509.py
+++ b/salt/modules/x509.py
@@ -36,14 +36,13 @@ from salt.state import STATE_INTERNAL_KEYWORDS as _STATE_INTERNAL_KEYWORDS
@@ -38,14 +38,13 @@ from salt.state import STATE_INTERNAL_KEYWORDS as _STATE_INTERNAL_KEYWORDS
# Import 3rd Party Libs
try:
import M2Crypto
@ -87,7 +86,7 @@ index 9901bc5bd9..45afcccd99 100644
__virtualname__ = 'x509'
@@ -81,10 +80,7 @@ def __virtual__():
@@ -83,10 +82,7 @@ def __virtual__():
'''
only load this module if m2crypto is available
'''
@ -99,7 +98,7 @@ index 9901bc5bd9..45afcccd99 100644
class _Ctx(ctypes.Structure):
@@ -127,10 +123,8 @@ def _new_extension(name, value, critical=0, issuer=None, _pyfree=1):
@@ -129,10 +125,8 @@ def _new_extension(name, value, critical=0, issuer=None, _pyfree=1):
doesn't support getting the publickeyidentifier from the issuer
to create the authoritykeyidentifier extension.
'''
@ -112,7 +111,7 @@ index 9901bc5bd9..45afcccd99 100644
# ensure name and value are bytes
name = salt.utils.stringutils.to_str(name)
@@ -145,7 +139,7 @@ def _new_extension(name, value, critical=0, issuer=None, _pyfree=1):
@@ -147,7 +141,7 @@ def _new_extension(name, value, critical=0, issuer=None, _pyfree=1):
x509_ext_ptr = M2Crypto.m2.x509v3_ext_conf(None, ctx, name, value)
lhash = None
except AttributeError:
@ -121,7 +120,7 @@ index 9901bc5bd9..45afcccd99 100644
ctx = M2Crypto.m2.x509v3_set_conf_lhash(
lhash) # pylint: disable=no-member
# ctx not zeroed
@@ -196,10 +190,8 @@ def _get_csr_extensions(csr):
@@ -198,10 +192,8 @@ def _get_csr_extensions(csr):
csrtempfile.flush()
csryaml = _parse_openssl_req(csrtempfile.name)
csrtempfile.close()
@ -134,7 +133,7 @@ index 9901bc5bd9..45afcccd99 100644
if not csrexts:
return ret
@@ -294,7 +286,7 @@ def _get_signing_policy(name):
@@ -296,7 +288,7 @@ def _get_signing_policy(name):
signing_policy = policies.get(name)
if signing_policy:
return signing_policy
@ -143,9 +142,9 @@ index 9901bc5bd9..45afcccd99 100644
def _pretty_hex(hex_str):
@@ -321,9 +313,11 @@ def _text_or_file(input_):
@@ -335,9 +327,11 @@ def _text_or_file(input_):
'''
if os.path.isfile(input_):
if _isfile(input_):
with salt.utils.files.fopen(input_) as fp_:
- return salt.utils.stringutils.to_str(fp_.read())
+ out = salt.utils.stringutils.to_str(fp_.read())
@ -157,7 +156,7 @@ index 9901bc5bd9..45afcccd99 100644
def _parse_subject(subject):
@@ -341,7 +335,7 @@ def _parse_subject(subject):
@@ -355,7 +349,7 @@ def _parse_subject(subject):
ret[nid_name] = val
nids.append(nid_num)
except TypeError as err:
@ -166,7 +165,7 @@ index 9901bc5bd9..45afcccd99 100644
return ret
@@ -520,8 +514,8 @@ def get_pem_entries(glob_path):
@@ -533,8 +527,8 @@ def get_pem_entries(glob_path):
if os.path.isfile(path):
try:
ret[path] = get_pem_entry(text=path)
@ -177,7 +176,7 @@ index 9901bc5bd9..45afcccd99 100644
return ret
@@ -599,8 +593,8 @@ def read_certificates(glob_path):
@@ -612,8 +606,8 @@ def read_certificates(glob_path):
if os.path.isfile(path):
try:
ret[path] = read_certificate(certificate=path)
@ -188,7 +187,7 @@ index 9901bc5bd9..45afcccd99 100644
return ret
@@ -629,12 +623,10 @@ def read_csr(csr):
@@ -642,12 +636,10 @@ def read_csr(csr):
# Get size returns in bytes. The world thinks of key sizes in bits.
'Subject': _parse_subject(csr.get_subject()),
'Subject Hash': _dec2hex(csr.get_subject().as_hash()),
@ -203,7 +202,7 @@ index 9901bc5bd9..45afcccd99 100644
return ret
@@ -937,7 +929,7 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals
@@ -943,7 +935,7 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals
# pyOpenSSL Note due to current limitations in pyOpenSSL it is impossible
# to specify a digest For signing the CRL. This will hopefully be fixed
# soon: https://github.com/pyca/pyopenssl/pull/161
@ -212,7 +211,7 @@ index 9901bc5bd9..45afcccd99 100644
raise salt.exceptions.SaltInvocationError(
'Could not load OpenSSL module, OpenSSL unavailable'
)
@@ -962,8 +954,7 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals
@@ -969,8 +961,7 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals
continue
if 'revocation_date' not in rev_item:
@ -222,7 +221,7 @@ index 9901bc5bd9..45afcccd99 100644
rev_date = datetime.datetime.strptime(
rev_item['revocation_date'], '%Y-%m-%d %H:%M:%S')
@@ -1002,8 +993,9 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals
@@ -1011,8 +1002,9 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals
try:
crltext = crl.export(**export_kwargs)
except (TypeError, ValueError):
@ -234,7 +233,7 @@ index 9901bc5bd9..45afcccd99 100644
export_kwargs.pop('digest', None)
crltext = crl.export(**export_kwargs)
@@ -1042,8 +1034,7 @@ def sign_remote_certificate(argdic, **kwargs):
@@ -1050,8 +1042,7 @@ def sign_remote_certificate(argdic, **kwargs):
if 'signing_policy' in argdic:
signing_policy = _get_signing_policy(argdic['signing_policy'])
if not signing_policy:
@ -244,7 +243,7 @@ index 9901bc5bd9..45afcccd99 100644
if isinstance(signing_policy, list):
dict_ = {}
@@ -1080,6 +1071,7 @@ def get_signing_policy(signing_policy_name):
@@ -1091,6 +1082,7 @@ def get_signing_policy(signing_policy_name):
signing_policy = _get_signing_policy(signing_policy_name)
if not signing_policy:
return 'Signing policy {0} does not exist.'.format(signing_policy_name)
@ -252,7 +251,7 @@ index 9901bc5bd9..45afcccd99 100644
if isinstance(signing_policy, list):
dict_ = {}
for item in signing_policy:
@@ -1092,10 +1084,9 @@ def get_signing_policy(signing_policy_name):
@@ -1103,10 +1095,9 @@ def get_signing_policy(signing_policy_name):
pass
try:
@ -265,8 +264,8 @@ index 9901bc5bd9..45afcccd99 100644
return signing_policy
@@ -1346,8 +1337,7 @@ def create_certificate(
signing_private_key='/etc/pki/myca.key' csr='/etc/pki/myca.csr'}
@@ -1356,8 +1347,7 @@ def create_certificate(
salt '*' x509.create_certificate path=/etc/pki/myca.crt signing_private_key='/etc/pki/myca.key' csr='/etc/pki/myca.csr'}
'''
- if not path and not text and \
@ -275,7 +274,7 @@ index 9901bc5bd9..45afcccd99 100644
raise salt.exceptions.SaltInvocationError(
'Either path or text must be specified.')
if path and text:
@@ -1376,8 +1366,7 @@ def create_certificate(
@@ -1386,8 +1376,7 @@ def create_certificate(
# Including listen_in and preqreuired because they are not included
# in STATE_INTERNAL_KEYWORDS
# for salt 2014.7.2
@ -285,7 +284,7 @@ index 9901bc5bd9..45afcccd99 100644
kwargs.pop(ignore, None)
certs = __salt__['publish.publish'](
@@ -1484,8 +1473,7 @@ def create_certificate(
@@ -1500,8 +1489,7 @@ def create_certificate(
continue
# Use explicitly set values first, fall back to CSR values.
@ -295,7 +294,7 @@ index 9901bc5bd9..45afcccd99 100644
critical = False
if extval.startswith('critical '):
@@ -1608,8 +1596,8 @@ def create_csr(path=None, text=False, **kwargs):
@@ -1623,8 +1611,8 @@ def create_csr(path=None, text=False, **kwargs):
if 'private_key' not in kwargs and 'public_key' in kwargs:
kwargs['private_key'] = kwargs['public_key']
@ -306,7 +305,7 @@ index 9901bc5bd9..45afcccd99 100644
if 'private_key' not in kwargs:
raise salt.exceptions.SaltInvocationError('private_key is required')
@@ -1621,11 +1609,9 @@ def create_csr(path=None, text=False, **kwargs):
@@ -1636,11 +1624,9 @@ def create_csr(path=None, text=False, **kwargs):
kwargs['private_key_passphrase'] = None
if 'public_key_passphrase' not in kwargs:
kwargs['public_key_passphrase'] = None
@ -320,7 +319,7 @@ index 9901bc5bd9..45afcccd99 100644
kwargs['public_key_passphrase'] = kwargs['private_key_passphrase']
csr.set_pubkey(get_public_key(kwargs['public_key'],
@@ -1669,18 +1655,10 @@ def create_csr(path=None, text=False, **kwargs):
@@ -1684,18 +1670,10 @@ def create_csr(path=None, text=False, **kwargs):
extstack.push(ext)
csr.add_extensions(extstack)
@ -340,7 +339,7 @@ index 9901bc5bd9..45afcccd99 100644
def verify_private_key(private_key, public_key, passphrase=None):
@@ -1705,8 +1683,7 @@ def verify_private_key(private_key, public_key, passphrase=None):
@@ -1720,8 +1698,7 @@ def verify_private_key(private_key, public_key, passphrase=None):
salt '*' x509.verify_private_key private_key=/etc/pki/myca.key \\
public_key=/etc/pki/myca.crt
'''
@ -350,7 +349,7 @@ index 9901bc5bd9..45afcccd99 100644
def verify_signature(certificate, signing_pub_key=None,
@@ -1760,9 +1737,8 @@ def verify_crl(crl, cert):
@@ -1775,9 +1752,8 @@ def verify_crl(crl, cert):
salt '*' x509.verify_crl crl=/etc/pki/myca.crl cert=/etc/pki/myca.crt
'''
if not salt.utils.path.which('openssl'):
@ -362,7 +361,7 @@ index 9901bc5bd9..45afcccd99 100644
crltext = _text_or_file(crl)
crltext = get_pem_entry(crltext, pem_type='X509 CRL')
crltempfile = tempfile.NamedTemporaryFile()
@@ -1783,10 +1759,7 @@ def verify_crl(crl, cert):
@@ -1798,10 +1774,7 @@ def verify_crl(crl, cert):
crltempfile.close()
certtempfile.close()
@ -374,7 +373,7 @@ index 9901bc5bd9..45afcccd99 100644
def expired(certificate):
@@ -1823,8 +1796,9 @@ def expired(certificate):
@@ -1838,8 +1811,9 @@ def expired(certificate):
ret['expired'] = True
else:
ret['expired'] = False
@ -386,7 +385,7 @@ index 9901bc5bd9..45afcccd99 100644
return ret
@@ -1847,6 +1821,7 @@ def will_expire(certificate, days):
@@ -1862,6 +1836,7 @@ def will_expire(certificate, days):
salt '*' x509.will_expire "/etc/pki/mycert.crt" days=30
'''
@ -394,7 +393,7 @@ index 9901bc5bd9..45afcccd99 100644
ret = {}
if os.path.isfile(certificate):
@@ -1856,18 +1831,13 @@ def will_expire(certificate, days):
@@ -1871,18 +1846,13 @@ def will_expire(certificate, days):
cert = _get_certificate_obj(certificate)
@ -419,7 +418,7 @@ index 9901bc5bd9..45afcccd99 100644
return ret
diff --git a/salt/states/x509.py b/salt/states/x509.py
index 7bb941f393..3ba4f79c79 100644
index 209cbc6738..8c79c6d034 100644
--- a/salt/states/x509.py
+++ b/salt/states/x509.py
@@ -163,6 +163,7 @@ import copy
@ -458,7 +457,7 @@ index 7bb941f393..3ba4f79c79 100644
overwrite:
Overwrite an existing private key if the provided passphrase cannot decrypt it.
@@ -453,8 +455,10 @@ def certificate_managed(name,
@@ -459,8 +461,10 @@ def certificate_managed(name,
private_key_args['name'], pem_type='RSA PRIVATE KEY')
else:
new_private_key = True
@ -471,7 +470,7 @@ index 7bb941f393..3ba4f79c79 100644
kwargs['public_key'] = private_key
@@ -664,8 +668,10 @@ def crl_managed(name,
@@ -671,8 +675,10 @@ def crl_managed(name,
else:
current = '{0} does not exist.'.format(name)
@ -484,35 +483,15 @@ index 7bb941f393..3ba4f79c79 100644
new = __salt__['x509.read_crl'](crl=new_crl)
new_comp = new.copy()
@@ -707,6 +713,6 @@ def pem_managed(name,
Any arguments supported by :state:`file.managed <salt.states.file.managed>` are supported.
@@ -714,6 +720,6 @@ def pem_managed(name,
Any arguments supported by :py:func:`file.managed <salt.states.file.managed>` are supported.
'''
file_args, kwargs = _get_file_args(name, **kwargs)
- file_args['contents'] = __salt__['x509.get_pem_entry'](text=text)
+ file_args['contents'] = salt.utils.stringutils.to_str(__salt__['x509.get_pem_entry'](text=text))
return __states__['file.managed'](**file_args)
diff --git a/tests/unit/modules/test_x509.py b/tests/unit/modules/test_x509.py
index c300a56d64..7e00c97140 100644
--- a/tests/unit/modules/test_x509.py
+++ b/tests/unit/modules/test_x509.py
@@ -67,10 +67,11 @@ class X509TestCase(TestCase, LoaderModuleMockMixin):
subj = FakeSubject()
x509._parse_subject(subj)
- x509.log.trace.assert_called_once()
- assert x509.log.trace.call_args[0][0] == "Missing attribute '%s'. Error: %s"
- assert x509.log.trace.call_args[0][1] == list(subj.nid.keys())[0]
- assert isinstance(x509.log.trace.call_args[0][2], TypeError)
+ x509.log.debug.assert_called_once()
+
+ assert x509.log.debug.call_args[0][0] == "Missing attribute '%s'. Error: %s"
+ assert x509.log.debug.call_args[0][1] == list(subj.nid.keys())[0]
+ assert isinstance(x509.log.debug.call_args[0][2], TypeError)
@skipIf(not HAS_M2CRYPTO, 'Skipping, M2Crypt is unavailble')
def test_get_pem_entry(self):
--
2.19.0
2.17.1

View File

@ -1,80 +0,0 @@
From 6276eb2cd3f2b396c13118a111998230477cc65a Mon Sep 17 00:00:00 2001
From: Florian Bergmann <bergmannf@users.noreply.github.com>
Date: Tue, 11 Sep 2018 14:02:55 +0200
Subject: [PATCH] X509 fixes for remote signing (#106)
* Use to_str salt.utils when writing to a file.
* Assign the certificate as a string.
* Convert to string before sending via 'publish'.
Otherwise the publish call with receive a "b''" string, which can not be used
in the functions.
* Do not silently ignore errors.
At least log the occurring errors to debug and trace.
---
salt/modules/x509.py | 10 +++++-----
salt/states/x509.py | 2 +-
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/salt/modules/x509.py b/salt/modules/x509.py
index 15de06e200..9901bc5bd9 100644
--- a/salt/modules/x509.py
+++ b/salt/modules/x509.py
@@ -658,7 +658,7 @@ def read_crl(crl):
text = get_pem_entry(text, pem_type='X509 CRL')
crltempfile = tempfile.NamedTemporaryFile()
- crltempfile.write(text)
+ crltempfile.write(salt.utils.stringutils.to_str(text))
crltempfile.flush()
crlparsed = _parse_openssl_crl(crltempfile.name)
crltempfile.close()
@@ -1368,9 +1368,9 @@ def create_certificate(
pem_type='CERTIFICATE REQUEST').replace('\n', '')
if 'public_key' in kwargs:
# Strip newlines to make passing through as cli functions easier
- kwargs['public_key'] = get_public_key(
+ kwargs['public_key'] = salt.utils.stringutils.to_str(get_public_key(
kwargs['public_key'],
- passphrase=kwargs['public_key_passphrase']).replace('\n', '')
+ passphrase=kwargs['public_key_passphrase'])).replace('\n', '')
# Remove system entries in kwargs
# Including listen_in and preqreuired because they are not included
@@ -1766,13 +1766,13 @@ def verify_crl(crl, cert):
crltext = _text_or_file(crl)
crltext = get_pem_entry(crltext, pem_type='X509 CRL')
crltempfile = tempfile.NamedTemporaryFile()
- crltempfile.write(crltext)
+ crltempfile.write(salt.utils.stringutils.to_str(crltext))
crltempfile.flush()
certtext = _text_or_file(cert)
certtext = get_pem_entry(certtext, pem_type='CERTIFICATE')
certtempfile = tempfile.NamedTemporaryFile()
- certtempfile.write(certtext)
+ certtempfile.write(salt.utils.stringutils.to_str(certtext))
certtempfile.flush()
cmd = ('openssl crl -noout -in {0} -CAfile {1}'.format(
diff --git a/salt/states/x509.py b/salt/states/x509.py
index 832f74168c..7bb941f393 100644
--- a/salt/states/x509.py
+++ b/salt/states/x509.py
@@ -545,7 +545,7 @@ def certificate_managed(name,
if not private_ret['result']:
return private_ret
- file_args['contents'] += certificate
+ file_args['contents'] += salt.utils.stringutils.to_str(certificate)
if not append_certs:
append_certs = []
--
2.19.0

File diff suppressed because it is too large Load Diff