diff --git a/_lastrevision b/_lastrevision
index 5e581a6..0d45aab 100644
--- a/_lastrevision
+++ b/_lastrevision
@@ -1 +1 @@
-d20075a576c1c9931e083798899099cde1a4109e
\ No newline at end of file
+eb0ca38e07c96eb021ac7490ac1f61a54dc9d904
diff --git a/_service b/_service
index 998165c..0a94560 100644
--- a/_service
+++ b/_service
@@ -3,7 +3,7 @@
https://github.com/openSUSE/salt-packaging.git
salt
package
- 2018.3.0
+ 2018.3.2
git
@@ -12,8 +12,8 @@
codeload.github.com
- saltstack/salt/tar.gz/v2018.3.0
- v2018.3.0.tar.gz
+ saltstack/salt/tar.gz/v2018.3.2
+ v2018.3.2.tar.gz
diff --git a/accounting-for-when-files-in-an-archive-contain-non-.patch b/accounting-for-when-files-in-an-archive-contain-non-.patch
new file mode 100644
index 0000000..3901761
--- /dev/null
+++ b/accounting-for-when-files-in-an-archive-contain-non-.patch
@@ -0,0 +1,158 @@
+From 5305ee8bf07e40dc54aefcbb92016ff868135749 Mon Sep 17 00:00:00 2001
+From: "Gareth J. Greenaway"
+Date: Wed, 9 May 2018 09:33:58 -0700
+Subject: [PATCH] Accounting for when files in an archive contain
+ non-ascii characters
+
+Updating integration/modules/test_archive to include filenames with unicode characters.
+
+only convert to bytes when using Python2
+
+Updating with requested changes.
+
+Ensure member names are decoded before adding to various lists.
+
+Adding a test to ensure archive.list returns the right results when a tar file contains a file with unicode in it's name.
+---
+ salt/modules/archive.py | 13 +++---
+ salt/states/archive.py | 4 +-
+ tests/integration/modules/test_archive.py | 52 ++++++++++++++++++++++-
+ 3 files changed, 59 insertions(+), 10 deletions(-)
+
+diff --git a/salt/modules/archive.py b/salt/modules/archive.py
+index 48f0efa18e..76cd3eeb97 100644
+--- a/salt/modules/archive.py
++++ b/salt/modules/archive.py
+@@ -186,12 +186,13 @@ def list_(name,
+ else {'fileobj': cached.stdout, 'mode': 'r|'}
+ with contextlib.closing(tarfile.open(**open_kwargs)) as tar_archive:
+ for member in tar_archive.getmembers():
++ _member = salt.utils.data.decode(member.name)
+ if member.issym():
+- links.append(member.name)
++ links.append(_member)
+ elif member.isdir():
+- dirs.append(member.name + '/')
++ dirs.append(_member + '/')
+ else:
+- files.append(member.name)
++ files.append(_member)
+ return dirs, files, links
+
+ except tarfile.ReadError:
+@@ -410,9 +411,9 @@ def list_(name,
+ item.sort()
+
+ if verbose:
+- ret = {'dirs': sorted(dirs),
+- 'files': sorted(files),
+- 'links': sorted(links)}
++ ret = {'dirs': sorted(salt.utils.data.decode_list(dirs)),
++ 'files': sorted(salt.utils.data.decode_list(files)),
++ 'links': sorted(salt.utils.data.decode_list(links))}
+ ret['top_level_dirs'] = [x for x in ret['dirs']
+ if x.count('/') == 1]
+ ret['top_level_files'] = [x for x in ret['files']
+diff --git a/salt/states/archive.py b/salt/states/archive.py
+index 847c5e9914..6838b2202d 100644
+--- a/salt/states/archive.py
++++ b/salt/states/archive.py
+@@ -1090,7 +1090,7 @@ def extracted(name,
+ and not stat.S_ISDIR(x)),
+ (contents['links'], stat.S_ISLNK)):
+ for path in path_list:
+- full_path = os.path.join(name, path)
++ full_path = salt.utils.path.join(name, path)
+ try:
+ path_mode = os.lstat(full_path.rstrip(os.sep)).st_mode
+ if not func(path_mode):
+@@ -1259,7 +1259,7 @@ def extracted(name,
+ if options is None:
+ try:
+ with closing(tarfile.open(cached, 'r')) as tar:
+- tar.extractall(name)
++ tar.extractall(salt.utils.stringutils.to_str(name))
+ files = tar.getnames()
+ if trim_output:
+ files = files[:trim_output]
+diff --git a/tests/integration/modules/test_archive.py b/tests/integration/modules/test_archive.py
+index 59fe2f5f61..4301b9e3b0 100644
+--- a/tests/integration/modules/test_archive.py
++++ b/tests/integration/modules/test_archive.py
+@@ -47,7 +47,7 @@ class ArchiveTest(ModuleCase):
+ self.arch = os.path.join(self.base_path, 'archive.{0}'.format(arch_fmt))
+ self.dst = os.path.join(self.base_path, '{0}_dst_dir'.format(arch_fmt))
+
+- def _set_up(self, arch_fmt):
++ def _set_up(self, arch_fmt, unicode_filename=False):
+ '''
+ Create source file tree and destination directory
+
+@@ -62,7 +62,11 @@ class ArchiveTest(ModuleCase):
+
+ # Create source
+ os.makedirs(self.src)
+- with salt.utils.files.fopen(os.path.join(self.src, 'file'), 'w') as theorem:
++ if unicode_filename:
++ filename = 'file®'
++ else:
++ filename = 'file'
++ with salt.utils.files.fopen(os.path.join(self.src, filename), 'w') as theorem:
+ theorem.write(textwrap.dedent(salt.utils.stringutils.to_str(r'''\
+ Compression theorem of computational complexity theory:
+
+@@ -150,6 +154,50 @@ class ArchiveTest(ModuleCase):
+
+ self._tear_down()
+
++ @skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable')
++ def test_tar_pack_unicode(self):
++ '''
++ Validate using the tar function to create archives
++ '''
++ self._set_up(arch_fmt='tar', unicode_filename=True)
++
++ # Test create archive
++ ret = self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
++ self.assertTrue(isinstance(ret, list), six.text_type(ret))
++ self._assert_artifacts_in_ret(ret)
++
++ self._tear_down()
++
++ @skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable')
++ def test_tar_unpack_unicode(self):
++ '''
++ Validate using the tar function to extract archives
++ '''
++ self._set_up(arch_fmt='tar', unicode_filename=True)
++ self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
++
++ # Test extract archive
++ ret = self.run_function('archive.tar', ['-xvf', self.arch], dest=self.dst)
++ self.assertTrue(isinstance(ret, list), six.text_type(ret))
++ self._assert_artifacts_in_ret(ret)
++
++ self._tear_down()
++
++ @skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable')
++ def test_tar_list_unicode(self):
++ '''
++ Validate using the tar function to extract archives
++ '''
++ self._set_up(arch_fmt='tar', unicode_filename=True)
++ self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
++
++ # Test list archive
++ ret = self.run_function('archive.list', name=self.arch)
++ self.assertTrue(isinstance(ret, list), six.text_type(ret))
++ self._assert_artifacts_in_ret(ret)
++
++ self._tear_down()
++
+ @skipIf(not salt.utils.path.which('gzip'), 'Cannot find gzip executable')
+ def test_gzip(self):
+ '''
+--
+2.17.1
+
+
diff --git a/activate-all-beacons-sources-config-pillar-grains.patch b/activate-all-beacons-sources-config-pillar-grains.patch
index 39986ff..1c39061 100644
--- a/activate-all-beacons-sources-config-pillar-grains.patch
+++ b/activate-all-beacons-sources-config-pillar-grains.patch
@@ -1,4 +1,4 @@
-From 957ac8fe161db2c4b3b8fe8b84027bc15e144a49 Mon Sep 17 00:00:00 2001
+From 5b48dee2f1b9a8203490e97620581b3a04d42632 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk
Date: Tue, 17 Oct 2017 16:52:33 +0200
Subject: [PATCH] Activate all beacons sources: config/pillar/grains
@@ -8,7 +8,7 @@ Subject: [PATCH] Activate all beacons sources: config/pillar/grains
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/minion.py b/salt/minion.py
-index df69d3c7bd..4a30e70be5 100644
+index 9468695880..0a6771dccd 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -439,7 +439,7 @@ class MinionBase(object):
@@ -21,6 +21,6 @@ index df69d3c7bd..4a30e70be5 100644
return self.beacons.process(b_conf, self.opts['grains']) # pylint: disable=no-member
return []
--
-2.16.2
+2.13.7
diff --git a/add-all_versions-parameter-to-include-all-installed-.patch b/add-all_versions-parameter-to-include-all-installed-.patch
new file mode 100644
index 0000000..324744c
--- /dev/null
+++ b/add-all_versions-parameter-to-include-all-installed-.patch
@@ -0,0 +1,450 @@
+From 9de54cf6f7d8d6da4212842fef8c4c658a2a9b9c Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
+
+Date: Mon, 14 May 2018 11:33:13 +0100
+Subject: [PATCH] Add "all_versions" parameter to include all installed
+ version on rpm.info
+
+Enable "all_versions" parameter for zypper.info_installed
+
+Enable "all_versions" parameter for yumpkg.info_installed
+
+Prevent adding failed packages when pkg name contains the arch (on SUSE)
+
+Add 'all_versions' documentation for info_installed on yum/zypper modules
+
+Add unit tests for info_installed with all_versions
+
+Refactor: use dict.setdefault instead if-else statement
+
+Allow removing only specific package versions with zypper and yum
+---
+ salt/modules/rpm.py | 18 ++++++++---
+ salt/modules/yumpkg.py | 49 ++++++++++++++++++++++--------
+ salt/modules/zypper.py | 64 ++++++++++++++++++++++++++++++++-------
+ salt/states/pkg.py | 33 +++++++++++++++++++-
+ tests/unit/modules/test_yumpkg.py | 50 ++++++++++++++++++++++++++++++
+ tests/unit/modules/test_zypper.py | 50 ++++++++++++++++++++++++++++++
+ 6 files changed, 236 insertions(+), 28 deletions(-)
+
+diff --git a/salt/modules/rpm.py b/salt/modules/rpm.py
+index d065f1e2d9..3683234f59 100644
+--- a/salt/modules/rpm.py
++++ b/salt/modules/rpm.py
+@@ -453,7 +453,7 @@ def diff(package, path):
+ return res
+
+
+-def info(*packages, **attr):
++def info(*packages, **kwargs):
+ '''
+ Return a detailed package(s) summary information.
+ If no packages specified, all packages will be returned.
+@@ -467,6 +467,9 @@ def info(*packages, **attr):
+ version, vendor, release, build_date, build_date_time_t, install_date, install_date_time_t,
+ build_host, group, source_rpm, arch, epoch, size, license, signature, packager, url, summary, description.
+
++ :param all_versions:
++ Return information for all installed versions of the packages
++
+ :return:
+
+ CLI example:
+@@ -476,7 +479,9 @@ def info(*packages, **attr):
+ salt '*' lowpkg.info apache2 bash
+ salt '*' lowpkg.info apache2 bash attr=version
+ salt '*' lowpkg.info apache2 bash attr=version,build_date_iso,size
++ salt '*' lowpkg.info apache2 bash attr=version,build_date_iso,size all_versions=True
+ '''
++ all_versions = kwargs.get('all_versions', False)
+ # LONGSIZE is not a valid tag for all versions of rpm. If LONGSIZE isn't
+ # available, then we can just use SIZE for older versions. See Issue #31366.
+ rpm_tags = __salt__['cmd.run_stdout'](
+@@ -516,7 +521,7 @@ def info(*packages, **attr):
+ "edition": "edition: %|EPOCH?{%{EPOCH}:}|%{VERSION}-%{RELEASE}\\n",
+ }
+
+- attr = attr.get('attr', None) and attr['attr'].split(",") or None
++ attr = kwargs.get('attr', None) and kwargs['attr'].split(",") or None
+ query = list()
+ if attr:
+ for attr_k in attr:
+@@ -610,8 +615,13 @@ def info(*packages, **attr):
+ if pkg_name.startswith('gpg-pubkey'):
+ continue
+ if pkg_name not in ret:
+- ret[pkg_name] = pkg_data.copy()
+- del ret[pkg_name]['edition']
++ if all_versions:
++ ret[pkg_name] = [pkg_data.copy()]
++ else:
++ ret[pkg_name] = pkg_data.copy()
++ del ret[pkg_name]['edition']
++ elif all_versions:
++ ret[pkg_name].append(pkg_data.copy())
+
+ return ret
+
+diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
+index 747142264d..9ce4926790 100644
+--- a/salt/modules/yumpkg.py
++++ b/salt/modules/yumpkg.py
+@@ -994,31 +994,39 @@ def list_downloaded():
+ return ret
+
+
+-def info_installed(*names):
++def info_installed(*names, **kwargs):
+ '''
+ .. versionadded:: 2015.8.1
+
+ Return the information of the named package(s), installed on the system.
+
++ :param all_versions:
++ Include information for all versions of the packages installed on the minion.
++
+ CLI example:
+
+ .. code-block:: bash
+
+ salt '*' pkg.info_installed
+ salt '*' pkg.info_installed ...
++ salt '*' pkg.info_installed all_versions=True
+ '''
++ all_versions = kwargs.get('all_versions', False)
+ ret = dict()
+- for pkg_name, pkg_nfo in __salt__['lowpkg.info'](*names).items():
+- t_nfo = dict()
+- # Translate dpkg-specific keys to a common structure
+- for key, value in pkg_nfo.items():
+- if key == 'source_rpm':
+- t_nfo['source'] = value
++ for pkg_name, pkgs_nfo in __salt__['lowpkg.info'](*names, **kwargs).items():
++ pkg_nfo = pkgs_nfo if all_versions else [pkgs_nfo]
++ for _nfo in pkg_nfo:
++ t_nfo = dict()
++ # Translate dpkg-specific keys to a common structure
++ for key, value in _nfo.items():
++ if key == 'source_rpm':
++ t_nfo['source'] = value
++ else:
++ t_nfo[key] = value
++ if not all_versions:
++ ret[pkg_name] = t_nfo
+ else:
+- t_nfo[key] = value
+-
+- ret[pkg_name] = t_nfo
+-
++ ret.setdefault(pkg_name, []).append(t_nfo)
+ return ret
+
+
+@@ -1919,7 +1927,24 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613
+ raise CommandExecutionError(exc)
+
+ old = list_pkgs()
+- targets = [x for x in pkg_params if x in old]
++ targets = []
++ for target in pkg_params:
++ # Check if package version set to be removed is actually installed:
++ # old[target] contains a comma-separated list of installed versions
++ if target in old and not pkg_params[target]:
++ targets.append(target)
++ elif target in old and pkg_params[target] in old[target].split(','):
++ arch = ''
++ pkgname = target
++ try:
++ namepart, archpart = target.rsplit('.', 1)
++ except ValueError:
++ pass
++ else:
++ if archpart in salt.utils.pkg.rpm.ARCHES:
++ arch = '.' + archpart
++ pkgname = namepart
++ targets.append('{0}-{1}{2}'.format(pkgname, pkg_params[target], arch))
+ if not targets:
+ return {}
+
+diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
+index 668143bdd9..06f8335c18 100644
+--- a/salt/modules/zypper.py
++++ b/salt/modules/zypper.py
+@@ -470,28 +470,37 @@ def info_installed(*names, **kwargs):
+ Valid attributes are:
+ ignore, report
+
++ :param all_versions:
++ Include information for all versions of the packages installed on the minion.
++
+ CLI example:
+
+ .. code-block:: bash
+
+ salt '*' pkg.info_installed
+ salt '*' pkg.info_installed ...
+- salt '*' pkg.info_installed attr=version,vendor
++ salt '*' pkg.info_installed all_versions=True
++ salt '*' pkg.info_installed attr=version,vendor all_versions=True
+ salt '*' pkg.info_installed ... attr=version,vendor
+ salt '*' pkg.info_installed ... attr=version,vendor errors=ignore
+ salt '*' pkg.info_installed ... attr=version,vendor errors=report
+ '''
++ all_versions = kwargs.get('all_versions', False)
+ ret = dict()
+- for pkg_name, pkg_nfo in __salt__['lowpkg.info'](*names, **kwargs).items():
+- t_nfo = dict()
+- # Translate dpkg-specific keys to a common structure
+- for key, value in six.iteritems(pkg_nfo):
+- if key == 'source_rpm':
+- t_nfo['source'] = value
++ for pkg_name, pkgs_nfo in __salt__['lowpkg.info'](*names, **kwargs).items():
++ pkg_nfo = pkgs_nfo if all_versions else [pkgs_nfo]
++ for _nfo in pkg_nfo:
++ t_nfo = dict()
++ # Translate dpkg-specific keys to a common structure
++ for key, value in six.iteritems(_nfo):
++ if key == 'source_rpm':
++ t_nfo['source'] = value
++ else:
++ t_nfo[key] = value
++ if not all_versions:
++ ret[pkg_name] = t_nfo
+ else:
+- t_nfo[key] = value
+- ret[pkg_name] = t_nfo
+-
++ ret.setdefault(pkg_name, []).append(t_nfo)
+ return ret
+
+
+@@ -1494,7 +1503,14 @@ def _uninstall(name=None, pkgs=None):
+ raise CommandExecutionError(exc)
+
+ old = list_pkgs()
+- targets = [target for target in pkg_params if target in old]
++ targets = []
++ for target in pkg_params:
++ # Check if package version set to be removed is actually installed:
++ # old[target] contains a comma-separated list of installed versions
++ if target in old and pkg_params[target] in old[target].split(','):
++ targets.append(target + "-" + pkg_params[target])
++ elif target in old and not pkg_params[target]:
++ targets.append(target)
+ if not targets:
+ return {}
+
+@@ -1517,6 +1533,32 @@ def _uninstall(name=None, pkgs=None):
+ return ret
+
+
++def normalize_name(name):
++ '''
++ Strips the architecture from the specified package name, if necessary.
++ Circumstances where this would be done include:
++
++ * If the arch is 32 bit and the package name ends in a 32-bit arch.
++ * If the arch matches the OS arch, or is ``noarch``.
++
++ CLI Example:
++
++ .. code-block:: bash
++
++ salt '*' pkg.normalize_name zsh.x86_64
++ '''
++ try:
++ arch = name.rsplit('.', 1)[-1]
++ if arch not in salt.utils.pkg.rpm.ARCHES + ('noarch',):
++ return name
++ except ValueError:
++ return name
++ if arch in (__grains__['osarch'], 'noarch') \
++ or salt.utils.pkg.rpm.check_32(arch, osarch=__grains__['osarch']):
++ return name[:-(len(arch) + 1)]
++ return name
++
++
+ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=unused-argument
+ '''
+ .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0
+diff --git a/salt/states/pkg.py b/salt/states/pkg.py
+index 2682ee17f9..ed405cb6b5 100644
+--- a/salt/states/pkg.py
++++ b/salt/states/pkg.py
+@@ -415,6 +415,16 @@ def _find_remove_targets(name=None,
+
+ if __grains__['os'] == 'FreeBSD' and origin:
+ cver = [k for k, v in six.iteritems(cur_pkgs) if v['origin'] == pkgname]
++ elif __grains__['os_family'] == 'Suse':
++ # On SUSE systems. Zypper returns packages without "arch" in name
++ try:
++ namepart, archpart = pkgname.rsplit('.', 1)
++ except ValueError:
++ cver = cur_pkgs.get(pkgname, [])
++ else:
++ if archpart in salt.utils.pkg.rpm.ARCHES + ("noarch",):
++ pkgname = namepart
++ cver = cur_pkgs.get(pkgname, [])
+ else:
+ cver = cur_pkgs.get(pkgname, [])
+
+@@ -844,6 +854,17 @@ def _verify_install(desired, new_pkgs, ignore_epoch=False, new_caps=None):
+ cver = new_pkgs.get(pkgname.split('%')[0])
+ elif __grains__['os_family'] == 'Debian':
+ cver = new_pkgs.get(pkgname.split('=')[0])
++ elif __grains__['os_family'] == 'Suse':
++ # On SUSE systems. Zypper returns packages without "arch" in name
++ try:
++ namepart, archpart = pkgname.rsplit('.', 1)
++ except ValueError:
++ cver = new_pkgs.get(pkgname)
++ else:
++ if archpart in salt.utils.pkg.rpm.ARCHES + ("noarch",):
++ cver = new_pkgs.get(namepart)
++ else:
++ cver = new_pkgs.get(pkgname)
+ else:
+ cver = new_pkgs.get(pkgname)
+ if not cver and pkgname in new_caps:
+@@ -2674,7 +2695,17 @@ def _uninstall(
+
+ changes = __salt__['pkg.{0}'.format(action)](name, pkgs=pkgs, version=version, **kwargs)
+ new = __salt__['pkg.list_pkgs'](versions_as_list=True, **kwargs)
+- failed = [x for x in pkg_params if x in new]
++ failed = []
++ for x in pkg_params:
++ if __grains__['os_family'] in ['Suse', 'RedHat']:
++ # Check if the package version set to be removed is actually removed:
++ if x in new and not pkg_params[x]:
++ failed.append(x)
++ elif x in new and pkg_params[x] in new[x]:
++ failed.append(x + "-" + pkg_params[x])
++ elif x in new:
++ failed.append(x)
++
+ if action == 'purge':
+ new_removed = __salt__['pkg.list_pkgs'](versions_as_list=True,
+ removed=True,
+diff --git a/tests/unit/modules/test_yumpkg.py b/tests/unit/modules/test_yumpkg.py
+index 28b6e1294c..c73f2582b9 100644
+--- a/tests/unit/modules/test_yumpkg.py
++++ b/tests/unit/modules/test_yumpkg.py
+@@ -601,3 +601,53 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
+ '--branch=foo', '--exclude=kernel*', 'upgrade'],
+ output_loglevel='trace',
+ python_shell=False)
++
++ def test_info_installed_with_all_versions(self):
++ '''
++ Test the return information of all versions for the named package(s), installed on the system.
++
++ :return:
++ '''
++ run_out = {
++ 'virgo-dummy': [
++ {'build_date': '2015-07-09T10:55:19Z',
++ 'vendor': 'openSUSE Build Service',
++ 'description': 'This is the Virgo dummy package used for testing SUSE Manager',
++ 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com',
++ 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)',
++ 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z',
++ 'install_date_time_t': 1456241517, 'summary': 'Virgo dummy package', 'version': '1.0',
++ 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9',
++ 'release': '1.1', 'group': 'Applications/System', 'arch': 'i686', 'size': '17992'},
++ {'build_date': '2015-07-09T10:15:19Z',
++ 'vendor': 'openSUSE Build Service',
++ 'description': 'This is the Virgo dummy package used for testing SUSE Manager',
++ 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com',
++ 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)',
++ 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z',
++ 'install_date_time_t': 14562415127, 'summary': 'Virgo dummy package', 'version': '1.0',
++ 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9',
++ 'release': '1.1', 'group': 'Applications/System', 'arch': 'x86_64', 'size': '13124'}
++ ],
++ 'libopenssl1_0_0': [
++ {'build_date': '2015-11-04T23:20:34Z', 'vendor': 'SUSE LLC ',
++ 'description': 'The OpenSSL Project is a collaborative effort.',
++ 'license': 'OpenSSL', 'build_host': 'sheep11', 'url': 'https://www.openssl.org/',
++ 'build_date_time_t': 1446675634, 'relocations': '(not relocatable)',
++ 'source_rpm': 'openssl-1.0.1i-34.1.src.rpm', 'install_date': '2016-02-23T16:31:35Z',
++ 'install_date_time_t': 1456241495, 'summary': 'Secure Sockets and Transport Layer Security',
++ 'version': '1.0.1i', 'signature': 'RSA/SHA256, Wed Nov 4 22:21:34 2015, Key ID 70af9e8139db7c82',
++ 'release': '34.1', 'group': 'Productivity/Networking/Security', 'packager': 'https://www.suse.com/',
++ 'arch': 'x86_64', 'size': '2576912'}
++ ]
++ }
++ with patch.dict(yumpkg.__salt__, {'lowpkg.info': MagicMock(return_value=run_out)}):
++ installed = yumpkg.info_installed(all_versions=True)
++ # Test overall products length
++ self.assertEqual(len(installed), 2)
++
++ # Test multiple versions for the same package
++ for pkg_name, pkg_info_list in installed.items():
++ self.assertEqual(len(pkg_info_list), 2 if pkg_name == "virgo-dummy" else 1)
++ for info in pkg_info_list:
++ self.assertTrue(info['arch'] in ('x86_64', 'i686'))
+diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py
+index 539a950252..6eccee568b 100644
+--- a/tests/unit/modules/test_zypper.py
++++ b/tests/unit/modules/test_zypper.py
+@@ -327,6 +327,56 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
+ installed = zypper.info_installed()
+ self.assertEqual(installed['vīrgô']['description'], 'vīrgô d€šçripţiǫñ')
+
++ def test_info_installed_with_all_versions(self):
++ '''
++ Test the return information of all versions for the named package(s), installed on the system.
++
++ :return:
++ '''
++ run_out = {
++ 'virgo-dummy': [
++ {'build_date': '2015-07-09T10:55:19Z',
++ 'vendor': 'openSUSE Build Service',
++ 'description': 'This is the Virgo dummy package used for testing SUSE Manager',
++ 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com',
++ 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)',
++ 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z',
++ 'install_date_time_t': 1456241517, 'summary': 'Virgo dummy package', 'version': '1.0',
++ 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9',
++ 'release': '1.1', 'group': 'Applications/System', 'arch': 'i686', 'size': '17992'},
++ {'build_date': '2015-07-09T10:15:19Z',
++ 'vendor': 'openSUSE Build Service',
++ 'description': 'This is the Virgo dummy package used for testing SUSE Manager',
++ 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com',
++ 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)',
++ 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z',
++ 'install_date_time_t': 14562415127, 'summary': 'Virgo dummy package', 'version': '1.0',
++ 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9',
++ 'release': '1.1', 'group': 'Applications/System', 'arch': 'x86_64', 'size': '13124'}
++ ],
++ 'libopenssl1_0_0': [
++ {'build_date': '2015-11-04T23:20:34Z', 'vendor': 'SUSE LLC ',
++ 'description': 'The OpenSSL Project is a collaborative effort.',
++ 'license': 'OpenSSL', 'build_host': 'sheep11', 'url': 'https://www.openssl.org/',
++ 'build_date_time_t': 1446675634, 'relocations': '(not relocatable)',
++ 'source_rpm': 'openssl-1.0.1i-34.1.src.rpm', 'install_date': '2016-02-23T16:31:35Z',
++ 'install_date_time_t': 1456241495, 'summary': 'Secure Sockets and Transport Layer Security',
++ 'version': '1.0.1i', 'signature': 'RSA/SHA256, Wed Nov 4 22:21:34 2015, Key ID 70af9e8139db7c82',
++ 'release': '34.1', 'group': 'Productivity/Networking/Security', 'packager': 'https://www.suse.com/',
++ 'arch': 'x86_64', 'size': '2576912'}
++ ]
++ }
++ with patch.dict(zypper.__salt__, {'lowpkg.info': MagicMock(return_value=run_out)}):
++ installed = zypper.info_installed(all_versions=True)
++ # Test overall products length
++ self.assertEqual(len(installed), 2)
++
++ # Test multiple versions for the same package
++ for pkg_name, pkg_info_list in installed.items():
++ self.assertEqual(len(pkg_info_list), 2 if pkg_name == "virgo-dummy" else 1)
++ for info in pkg_info_list:
++ self.assertTrue(info['arch'] in ('x86_64', 'i686'))
++
+ def test_info_available(self):
+ '''
+ Test return the information of the named package available for the system.
+--
+2.13.7
+
+
diff --git a/add-custom-suse-capabilities-as-grains.patch b/add-custom-suse-capabilities-as-grains.patch
new file mode 100644
index 0000000..9e21b8c
--- /dev/null
+++ b/add-custom-suse-capabilities-as-grains.patch
@@ -0,0 +1,29 @@
+From b02aee33a3aa1676cbfdf3a0ed936eef8a40adfe Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
+
+Date: Thu, 21 Jun 2018 11:57:57 +0100
+Subject: [PATCH] Add custom SUSE capabilities as Grains
+
+---
+ salt/grains/extra.py | 7 +++++++
+ 1 file changed, 7 insertions(+)
+
+diff --git a/salt/grains/extra.py b/salt/grains/extra.py
+index fff70e9f5b..4fb58674bf 100644
+--- a/salt/grains/extra.py
++++ b/salt/grains/extra.py
+@@ -75,3 +75,10 @@ def config():
+ log.warning("Bad syntax in grains file! Skipping.")
+ return {}
+ return {}
++
++
++def suse_backported_capabilities():
++ return {
++ '__suse_reserved_pkg_all_versions_support': True,
++ '__suse_reserved_pkg_patches_support': True
++ }
+--
+2.13.7
+
+
diff --git a/add-engine-relaying-libvirt-events.patch b/add-engine-relaying-libvirt-events.patch
new file mode 100644
index 0000000..0da11aa
--- /dev/null
+++ b/add-engine-relaying-libvirt-events.patch
@@ -0,0 +1,894 @@
+From 5c41a5b8c9925bf788946e334cb3912ca9b09190 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?C=C3=A9dric=20Bosdonnat?=
+Date: Fri, 9 Mar 2018 15:46:12 +0100
+Subject: [PATCH] Add engine relaying libvirt events
+
+Libvirt API offers clients to register callbacks for various events.
+libvirt_events engine will listen on a libvirt URI (local or remote)
+for events and send them to the salt event bus.
+
+Special thanks to @isbm for the code cleanup help
+---
+ salt/engines/libvirt_events.py | 702 ++++++++++++++++++++++
+ tests/unit/engines/test_libvirt_events.py | 159 +++++
+ 2 files changed, 861 insertions(+)
+ create mode 100644 salt/engines/libvirt_events.py
+ create mode 100644 tests/unit/engines/test_libvirt_events.py
+
+diff --git a/salt/engines/libvirt_events.py b/salt/engines/libvirt_events.py
+new file mode 100644
+index 0000000000..a1c9d09067
+--- /dev/null
++++ b/salt/engines/libvirt_events.py
+@@ -0,0 +1,702 @@
++# -*- coding: utf-8 -*-
++
++'''
++An engine that listens for libvirt events and resends them to the salt event bus.
++
++The minimal configuration is the following and will listen to all events on the
++local hypervisor and send them with a tag starting with ``salt/engines/libvirt_events``:
++
++.. code-block:: yaml
++
++ engines:
++ - libvirt_events
++
++Note that the automatically-picked libvirt connection will depend on the value
++of ``uri_default`` in ``/etc/libvirt/libvirt.conf``. To force using another
++connection like the local LXC libvirt driver, set the ``uri`` property as in the
++following example configuration.
++
++.. code-block:: yaml
++
++ engines:
++ - libvirt_events:
++ uri: lxc:///
++ tag_prefix: libvirt
++ filters:
++ - domain/lifecycle
++ - domain/reboot
++ - pool
++
++Filters is a list of event types to relay to the event bus. Items in this list
++can be either one of the main types (``domain``, ``network``, ``pool``,
++``nodedev``, ``secret``), ``all`` or a more precise filter. These can be done
++with values like /. The possible values are in the
++CALLBACK_DEFS constant. If the filters list contains ``all``, all
++events will be relayed.
++
++Be aware that the list of events increases with libvirt versions, for example
++network events have been added in libvirt 1.2.1.
++
++Running the engine on non-root
++------------------------------
++
++Running this engine as non-root requires a special attention, which is surely
++the case for the master running as user `salt`. The engine is likely to fail
++to connect to libvirt with an error like this one:
++
++ [ERROR ] authentication unavailable: no polkit agent available to authenticate action 'org.libvirt.unix.monitor'
++
++
++To fix this, the user running the engine, for example the salt-master, needs
++to have the rights to connect to libvirt in the machine polkit config.
++A polkit rule like the following one will allow `salt` user to connect to libvirt:
++
++.. code-block:: javascript
++
++ polkit.addRule(function(action, subject) {
++ if (action.id.indexOf("org.libvirt") == 0 &&
++ subject.user == "salt") {
++ return polkit.Result.YES;
++ }
++ });
++
++:depends: libvirt 1.0.0+ python binding
++
++.. versionadded:: Fluorine
++'''
++
++from __future__ import absolute_import, unicode_literals, print_function
++import logging
++
++# Import salt libs
++import salt.utils.event
++
++# pylint: disable=no-name-in-module,import-error
++from salt.ext.six.moves.urllib.parse import urlparse
++# pylint: enable=no-name-in-module,import-error
++
++log = logging.getLogger(__name__)
++
++
++try:
++ import libvirt
++except ImportError:
++ libvirt = None # pylint: disable=invalid-name
++
++
++def __virtual__():
++ '''
++ Only load if libvirt python binding is present
++ '''
++ if libvirt is None:
++ msg = 'libvirt module not found'
++ elif libvirt.getVersion() < 1000000:
++ msg = 'libvirt >= 1.0.0 required'
++ else:
++ msg = ''
++ return not bool(msg), msg
++
++
++REGISTER_FUNCTIONS = {
++ 'domain': 'domainEventRegisterAny',
++ 'network': 'networkEventRegisterAny',
++ 'pool': 'storagePoolEventRegisterAny',
++ 'nodedev': 'nodeDeviceEventRegisterAny',
++ 'secret': 'secretEventRegisterAny'
++}
++
++# Handle either BLOCK_JOB or BLOCK_JOB_2, but prefer the latter
++if hasattr(libvirt, 'VIR_DOMAIN_EVENT_ID_BLOCK_JOB_2'):
++ BLOCK_JOB_ID = 'VIR_DOMAIN_EVENT_ID_BLOCK_JOB_2'
++else:
++ BLOCK_JOB_ID = 'VIR_DOMAIN_EVENT_ID_BLOCK_JOB'
++
++CALLBACK_DEFS = {
++ 'domain': (('lifecycle', None),
++ ('reboot', None),
++ ('rtc_change', None),
++ ('watchdog', None),
++ ('graphics', None),
++ ('io_error', 'VIR_DOMAIN_EVENT_ID_IO_ERROR_REASON'),
++ ('control_error', None),
++ ('disk_change', None),
++ ('tray_change', None),
++ ('pmwakeup', None),
++ ('pmsuspend', None),
++ ('balloon_change', None),
++ ('pmsuspend_disk', None),
++ ('device_removed', None),
++ ('block_job', BLOCK_JOB_ID),
++ ('tunable', None),
++ ('agent_lifecycle', None),
++ ('device_added', None),
++ ('migration_iteration', None),
++ ('job_completed', None),
++ ('device_removal_failed', None),
++ ('metadata_change', None),
++ ('block_threshold', None)),
++ 'network': (('lifecycle', None),),
++ 'pool': (('lifecycle', None),
++ ('refresh', None)),
++ 'nodedev': (('lifecycle', None),
++ ('update', None)),
++ 'secret': (('lifecycle', None),
++ ('value_changed', None))
++}
++
++
++def _compute_subprefix(attr):
++ '''
++ Get the part before the first '_' or the end of attr including
++ the potential '_'
++ '''
++ return ''.join((attr.split('_')[0], '_' if len(attr.split('_')) > 1 else ''))
++
++
++def _get_libvirt_enum_string(prefix, value):
++ '''
++ Convert the libvirt enum integer value into a human readable string.
++
++ :param prefix: start of the libvirt attribute to look for.
++ :param value: integer to convert to string
++ '''
++ attributes = [attr[len(prefix):] for attr in libvirt.__dict__ if attr.startswith(prefix)]
++
++ # Filter out the values starting with a common base as they match another enum
++ prefixes = [_compute_subprefix(p) for p in attributes]
++ counts = {p: prefixes.count(p) for p in prefixes}
++ sub_prefixes = [p for p, count in counts.items() if count > 1]
++ filtered = [attr for attr in attributes if _compute_subprefix(attr) not in sub_prefixes]
++
++ for candidate in filtered:
++ if value == getattr(libvirt, ''.join((prefix, candidate))):
++ name = candidate.lower().replace('_', ' ')
++ return name
++ return 'unknown'
++
++
++def _get_domain_event_detail(event, detail):
++ '''
++ Convert event and detail numeric values into a tuple of human readable strings
++ '''
++ event_name = _get_libvirt_enum_string('VIR_DOMAIN_EVENT_', event)
++ if event_name == 'unknown':
++ return event_name, 'unknown'
++
++ prefix = 'VIR_DOMAIN_EVENT_{0}_'.format(event_name.upper())
++ detail_name = _get_libvirt_enum_string(prefix, detail)
++
++ return event_name, detail_name
++
++
++def _salt_send_event(opaque, conn, data):
++ '''
++ Convenience function adding common data to the event and sending it
++ on the salt event bus.
++
++ :param opaque: the opaque data that is passed to the callback.
++ This is a dict with 'prefix', 'object' and 'event' keys.
++ :param conn: libvirt connection
++ :param data: additional event data dict to send
++ '''
++ tag_prefix = opaque['prefix']
++ object_type = opaque['object']
++ event_type = opaque['event']
++
++ # Prepare the connection URI to fit in the tag
++ # qemu+ssh://user@host:1234/system -> qemu+ssh/user@host:1234/system
++ uri = urlparse(conn.getURI())
++ uri_tag = [uri.scheme]
++ if uri.netloc:
++ uri_tag.append(uri.netloc)
++ path = uri.path.strip('/')
++ if path:
++ uri_tag.append(path)
++ uri_str = "/".join(uri_tag)
++
++ # Append some common data
++ all_data = {
++ 'uri': conn.getURI()
++ }
++ all_data.update(data)
++
++ tag = '/'.join((tag_prefix, uri_str, object_type, event_type))
++
++ # Actually send the event in salt
++ if __opts__.get('__role') == 'master':
++ salt.utils.event.get_master_event(
++ __opts__,
++ __opts__['sock_dir']).fire_event(all_data, tag)
++ else:
++ __salt__['event.send'](tag, all_data)
++
++
++def _salt_send_domain_event(opaque, conn, domain, event, event_data):
++ '''
++ Helper function send a salt event for a libvirt domain.
++
++ :param opaque: the opaque data that is passed to the callback.
++ This is a dict with 'prefix', 'object' and 'event' keys.
++ :param conn: libvirt connection
++ :param domain: name of the domain related to the event
++ :param event: name of the event
++ :param event_data: additional event data dict to send
++ '''
++ data = {
++ 'domain': {
++ 'name': domain.name(),
++ 'id': domain.ID(),
++ 'uuid': domain.UUIDString()
++ },
++ 'event': event
++ }
++ data.update(event_data)
++ _salt_send_event(opaque, conn, data)
++
++
++def _domain_event_lifecycle_cb(conn, domain, event, detail, opaque):
++ '''
++ Domain lifecycle events handler
++ '''
++ event_str, detail_str = _get_domain_event_detail(event, detail)
++
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'event': event_str,
++ 'detail': detail_str
++ })
++
++
++def _domain_event_reboot_cb(conn, domain, opaque):
++ '''
++ Domain reboot events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {})
++
++
++def _domain_event_rtc_change_cb(conn, domain, utcoffset, opaque):
++ '''
++ Domain RTC change events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'utcoffset': utcoffset
++ })
++
++
++def _domain_event_watchdog_cb(conn, domain, action, opaque):
++ '''
++ Domain watchdog events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'action': _get_libvirt_enum_string('VIR_DOMAIN_EVENT_WATCHDOG_', action)
++ })
++
++
++def _domain_event_io_error_cb(conn, domain, srcpath, devalias, action, reason, opaque):
++ '''
++ Domain I/O Error events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'srcPath': srcpath,
++ 'dev': devalias,
++ 'action': _get_libvirt_enum_string('VIR_DOMAIN_EVENT_IO_ERROR_', action),
++ 'reason': reason
++ })
++
++
++def _domain_event_graphics_cb(conn, domain, phase, local, remote, auth, subject, opaque):
++ '''
++ Domain graphics events handler
++ '''
++ prefix = 'VIR_DOMAIN_EVENT_GRAPHICS_'
++
++ def get_address(addr):
++ '''
++ transform address structure into event data piece
++ '''
++ data = {'family': _get_libvirt_enum_string('{0}_ADDRESS_'.format(prefix), addr['family']),
++ 'node': addr['node'],
++ 'service': addr['service']}
++ return addr
++
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'phase': _get_libvirt_enum_string(prefix, phase),
++ 'local': get_address(local),
++ 'remote': get_address(remote),
++ 'authScheme': auth,
++ 'subject': [{'type': item[0], 'name': item[1]} for item in subject]
++ })
++
++
++def _domain_event_control_error_cb(conn, domain, opaque):
++ '''
++ Domain control error events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {})
++
++
++def _domain_event_disk_change_cb(conn, domain, old_src, new_src, dev, reason, opaque):
++ '''
++ Domain disk change events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'oldSrcPath': old_src,
++ 'newSrcPath': new_src,
++ 'dev': dev,
++ 'reason': _get_libvirt_enum_string('VIR_DOMAIN_EVENT_DISK_', reason)
++ })
++
++
++def _domain_event_tray_change_cb(conn, domain, dev, reason, opaque):
++ '''
++ Domain tray change events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'dev': dev,
++ 'reason': _get_libvirt_enum_string('VIR_DOMAIN_EVENT_TRAY_CHANGE_', reason)
++ })
++
++
++def _domain_event_pmwakeup_cb(conn, domain, reason, opaque):
++ '''
++ Domain wakeup events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'reason': 'unknown' # currently unused
++ })
++
++
++def _domain_event_pmsuspend_cb(conn, domain, reason, opaque):
++ '''
++ Domain suspend events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'reason': 'unknown' # currently unused
++ })
++
++
++def _domain_event_balloon_change_cb(conn, domain, actual, opaque):
++ '''
++ Domain balloon change events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'actual': actual
++ })
++
++
++def _domain_event_pmsuspend_disk_cb(conn, domain, reason, opaque):
++ '''
++ Domain disk suspend events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'reason': 'unknown' # currently unused
++ })
++
++
++def _domain_event_block_job_cb(conn, domain, disk, job_type, status, opaque):
++ '''
++ Domain block job events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'disk': disk,
++ 'type': _get_libvirt_enum_string('VIR_DOMAIN_BLOCK_JOB_TYPE_', job_type),
++ 'status': _get_libvirt_enum_string('VIR_DOMAIN_BLOCK_JOB_', status)
++ })
++
++
++def _domain_event_device_removed_cb(conn, domain, dev, opaque):
++ '''
++ Domain device removal events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'dev': dev
++ })
++
++
++def _domain_event_tunable_cb(conn, domain, params, opaque):
++ '''
++ Domain tunable events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'params': params
++ })
++
++
++# pylint: disable=invalid-name
++def _domain_event_agent_lifecycle_cb(conn, domain, state, reason, opaque):
++ '''
++ Domain agent lifecycle events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'state': _get_libvirt_enum_string('VIR_CONNECT_DOMAIN_EVENT_AGENT_LIFECYCLE_STATE_', state),
++ 'reason': _get_libvirt_enum_string('VIR_CONNECT_DOMAIN_EVENT_AGENT_LIFECYCLE_REASON_', reason)
++ })
++
++
++def _domain_event_device_added_cb(conn, domain, dev, opaque):
++ '''
++ Domain device addition events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'dev': dev
++ })
++
++
++# pylint: disable=invalid-name
++def _domain_event_migration_iteration_cb(conn, domain, iteration, opaque):
++ '''
++ Domain migration iteration events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'iteration': iteration
++ })
++
++
++def _domain_event_job_completed_cb(conn, domain, params, opaque):
++ '''
++ Domain job completion events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'params': params
++ })
++
++
++def _domain_event_device_removal_failed_cb(conn, domain, dev, opaque):
++ '''
++ Domain device removal failure events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'dev': dev
++ })
++
++
++def _domain_event_metadata_change_cb(conn, domain, mtype, nsuri, opaque):
++ '''
++ Domain metadata change events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'type': _get_libvirt_enum_string('VIR_DOMAIN_METADATA_', mtype),
++ 'nsuri': nsuri
++ })
++
++
++def _domain_event_block_threshold_cb(conn, domain, dev, path, threshold, excess, opaque):
++ '''
++ Domain block threshold events handler
++ '''
++ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {
++ 'dev': dev,
++ 'path': path,
++ 'threshold': threshold,
++ 'excess': excess
++ })
++
++
++def _network_event_lifecycle_cb(conn, net, event, detail, opaque):
++ '''
++ Network lifecycle events handler
++ '''
++
++ _salt_send_event(opaque, conn, {
++ 'network': {
++ 'name': net.name(),
++ 'uuid': net.UUIDString()
++ },
++ 'event': _get_libvirt_enum_string('VIR_NETWORK_EVENT_', event),
++ 'detail': 'unknown' # currently unused
++ })
++
++
++def _pool_event_lifecycle_cb(conn, pool, event, detail, opaque):
++ '''
++ Storage pool lifecycle events handler
++ '''
++ _salt_send_event(opaque, conn, {
++ 'pool': {
++ 'name': pool.name(),
++ 'uuid': pool.UUIDString()
++ },
++ 'event': _get_libvirt_enum_string('VIR_STORAGE_POOL_EVENT_', event),
++ 'detail': 'unknown' # currently unused
++ })
++
++
++def _pool_event_refresh_cb(conn, pool, opaque):
++ '''
++ Storage pool refresh events handler
++ '''
++ _salt_send_event(opaque, conn, {
++ 'pool': {
++ 'name': pool.name(),
++ 'uuid': pool.UUIDString()
++ },
++ 'event': opaque['event']
++ })
++
++
++def _nodedev_event_lifecycle_cb(conn, dev, event, detail, opaque):
++ '''
++ Node device lifecycle events handler
++ '''
++ _salt_send_event(opaque, conn, {
++ 'nodedev': {
++ 'name': dev.name()
++ },
++ 'event': _get_libvirt_enum_string('VIR_NODE_DEVICE_EVENT_', event),
++ 'detail': 'unknown' # currently unused
++ })
++
++
++def _nodedev_event_update_cb(conn, dev, opaque):
++ '''
++ Node device update events handler
++ '''
++ _salt_send_event(opaque, conn, {
++ 'nodedev': {
++ 'name': dev.name()
++ },
++ 'event': opaque['event']
++ })
++
++
++def _secret_event_lifecycle_cb(conn, secret, event, detail, opaque):
++ '''
++ Secret lifecycle events handler
++ '''
++ _salt_send_event(opaque, conn, {
++ 'secret': {
++ 'uuid': secret.UUIDString()
++ },
++ 'event': _get_libvirt_enum_string('VIR_SECRET_EVENT_', event),
++ 'detail': 'unknown' # currently unused
++ })
++
++
++def _secret_event_value_changed_cb(conn, secret, opaque):
++ '''
++ Secret value change events handler
++ '''
++ _salt_send_event(opaque, conn, {
++ 'secret': {
++ 'uuid': secret.UUIDString()
++ },
++ 'event': opaque['event']
++ })
++
++
++def _cleanup(cnx):
++ '''
++ Close the libvirt connection
++
++ :param cnx: libvirt connection
++ '''
++ log.debug('Closing libvirt connection: %s', cnx.getURI())
++ cnx.close()
++
++
++def _callbacks_cleanup(cnx, callback_ids):
++ '''
++ Unregister all the registered callbacks
++
++ :param cnx: libvirt connection
++ :param callback_ids: dictionary mapping a libvirt object type to an ID list
++ of callbacks to deregister
++ '''
++ for obj, ids in callback_ids.items():
++ register_name = REGISTER_FUNCTIONS[obj]
++ deregister_name = register_name.replace('Reg', 'Dereg')
++ deregister = getattr(cnx, deregister_name)
++ for callback_id in ids:
++ deregister(callback_id)
++
++
++def _register_callback(cnx, tag_prefix, obj, event, real_id):
++ '''
++ Helper function registering a callback
++
++ :param cnx: libvirt connection
++ :param tag_prefix: salt event tag prefix to use
++ :param obj: the libvirt object name for the event. Needs to
++ be one of the REGISTER_FUNCTIONS keys.
++ :param event: the event type name.
++ :param real_id: the libvirt name of an alternative event id to use or None
++
++ :rtype integer value needed to deregister the callback
++ '''
++ libvirt_name = real_id
++ if real_id is None:
++ libvirt_name = 'VIR_{0}_EVENT_ID_{1}'.format(obj, event).upper()
++
++ if not hasattr(libvirt, libvirt_name):
++ log.warning('Skipping "%s/%s" events: libvirt too old', obj, event)
++ return None
++
++ libvirt_id = getattr(libvirt, libvirt_name)
++ callback_name = "_{0}_event_{1}_cb".format(obj, event)
++ callback = globals().get(callback_name, None)
++ if callback is None:
++ log.error('Missing function %s in engine', callback_name)
++ return None
++
++ register = getattr(cnx, REGISTER_FUNCTIONS[obj])
++ return register(None, libvirt_id, callback,
++ {'prefix': tag_prefix,
++ 'object': obj,
++ 'event': event})
++
++
++def _append_callback_id(ids, obj, callback_id):
++ '''
++ Helper function adding a callback ID to the IDs dict.
++ The callback ids dict maps an object to event callback ids.
++
++ :param ids: dict of callback IDs to update
++ :param obj: one of the keys of REGISTER_FUNCTIONS
++ :param callback_id: the result of _register_callback
++ '''
++ if obj not in ids:
++ ids[obj] = []
++ ids[obj].append(callback_id)
++
++
++def start(uri=None,
++ tag_prefix='salt/engines/libvirt_events',
++ filters=None):
++ '''
++ Listen to libvirt events and forward them to salt.
++
++ :param uri: libvirt URI to listen on.
++ Defaults to None to pick the first available local hypervisor
++ :param tag_prefix: the begining of the salt event tag to use.
++ Defaults to 'salt/engines/libvirt_events'
++ :param filters: the list of event of listen on. Defaults to 'all'
++ '''
++ if filters is None:
++ filters = ['all']
++ try:
++ libvirt.virEventRegisterDefaultImpl()
++
++ cnx = libvirt.openReadOnly(uri)
++ log.debug('Opened libvirt uri: %s', cnx.getURI())
++
++ callback_ids = {}
++ all_filters = "all" in filters
++
++ for obj, event_defs in CALLBACK_DEFS.items():
++ for event, real_id in event_defs:
++ event_filter = "/".join((obj, event))
++ if event_filter not in filters and obj not in filters and not all_filters:
++ continue
++ registered_id = _register_callback(cnx, tag_prefix,
++ obj, event, real_id)
++ if registered_id:
++ _append_callback_id(callback_ids, obj, registered_id)
++
++ exit_loop = False
++ while not exit_loop:
++ exit_loop = libvirt.virEventRunDefaultImpl() < 0
++
++ except Exception as err: # pylint: disable=broad-except
++ log.exception(err)
++ finally:
++ _callbacks_cleanup(cnx, callback_ids)
++ _cleanup(cnx)
+diff --git a/tests/unit/engines/test_libvirt_events.py b/tests/unit/engines/test_libvirt_events.py
+new file mode 100644
+index 0000000000..6608aaf648
+--- /dev/null
++++ b/tests/unit/engines/test_libvirt_events.py
+@@ -0,0 +1,159 @@
++# -*- coding: utf-8 -*-
++'''
++unit tests for the libvirt_events engine
++'''
++# Import Python libs
++from __future__ import absolute_import, print_function, unicode_literals
++
++# Import Salt Testing Libs
++from tests.support.mixins import LoaderModuleMockMixin
++from tests.support.unit import skipIf, TestCase
++from tests.support.mock import (
++ NO_MOCK,
++ NO_MOCK_REASON,
++ MagicMock,
++ patch)
++
++# Import Salt Libs
++import salt.engines.libvirt_events as libvirt_events
++
++
++# pylint: disable=protected-access,attribute-defined-outside-init,invalid-name,unused-argument,no-self-use
++
++
++@skipIf(NO_MOCK, NO_MOCK_REASON)
++class EngineLibvirtEventTestCase(TestCase, LoaderModuleMockMixin):
++ '''
++ Test cases for salt.engine.libvirt_events
++ '''
++
++ def setup_loader_modules(self):
++ patcher = patch('salt.engines.libvirt_events.libvirt')
++ self.mock_libvirt = patcher.start()
++ self.mock_libvirt.getVersion.return_value = 2000000
++ self.mock_libvirt.virEventRunDefaultImpl.return_value = -1 # Don't loop for ever
++ self.mock_libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE = 0
++ self.mock_libvirt.VIR_DOMAIN_EVENT_ID_REBOOT = 1
++ self.addCleanup(patcher.stop)
++ self.addCleanup(delattr, self, 'mock_libvirt')
++ return {libvirt_events: {}}
++
++ @patch('salt.engines.libvirt_events.libvirt',
++ VIR_PREFIX_NONE=0,
++ VIR_PREFIX_ONE=1,
++ VIR_PREFIX_TWO=2,
++ VIR_PREFIX_SUB_FOO=0,
++ VIR_PREFIX_SUB_BAR=1,
++ VIR_PREFIX_SUB_FOOBAR=2)
++ def test_get_libvirt_enum_string_subprefix(self, libvirt_mock):
++ '''
++ Make sure the libvirt enum value to string works reliably with
++ elements with a sub prefix, eg VIR_PREFIX_SUB_* in this case.
++ '''
++ # Test case with a sub prefix
++
++ assert libvirt_events._get_libvirt_enum_string('VIR_PREFIX_', 2) == 'two'
++
++ @patch('salt.engines.libvirt_events.libvirt',
++ VIR_PREFIX_FOO=0,
++ VIR_PREFIX_FOO_BAR=1,
++ VIR_PREFIX_BAR_FOO=2)
++ def test_get_libvirt_enum_string_underscores(self, libvirt_mock):
++ '''
++ Make sure the libvirt enum value to string works reliably and items
++ with an underscore aren't confused with sub prefixes.
++ '''
++ assert libvirt_events._get_libvirt_enum_string('VIR_PREFIX_', 1) == 'foo bar'
++
++ @patch('salt.engines.libvirt_events.libvirt',
++ VIR_DOMAIN_EVENT_DEFINED=0,
++ VIR_DOMAIN_EVENT_UNDEFINED=1,
++ VIR_DOMAIN_EVENT_DEFINED_ADDED=0,
++ VIR_DOMAIN_EVENT_DEFINED_UPDATED=1)
++ def test_get_domain_event_detail(self, mock_libvirt):
++ '''
++ Test get_domain_event_detail function
++ '''
++ assert libvirt_events._get_domain_event_detail(1, 2) == ('undefined', 'unknown')
++ assert libvirt_events._get_domain_event_detail(0, 1) == ('defined', 'updated')
++ assert libvirt_events._get_domain_event_detail(4, 2) == ('unknown', 'unknown')
++
++ @patch('salt.engines.libvirt_events.libvirt', VIR_NETWORK_EVENT_ID_LIFECYCLE=1000)
++ def test_event_register(self, mock_libvirt):
++ '''
++ Test that the libvirt_events engine actually registers events catch them and cleans
++ before leaving the place.
++ '''
++ mock_cnx = MagicMock()
++ mock_libvirt.openReadOnly.return_value = mock_cnx
++
++ mock_cnx.networkEventRegisterAny.return_value = 10000
++
++ libvirt_events.start('test:///', 'test/prefix')
++
++ # Check that the connection has been opened
++ mock_libvirt.openReadOnly.assert_called_once_with('test:///')
++
++ # Check that the connection has been closed
++ mock_cnx.close.assert_called_once()
++
++ # Check events registration and deregistration
++ mock_cnx.domainEventRegisterAny.assert_any_call(
++ None, mock_libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE,
++ libvirt_events._domain_event_lifecycle_cb,
++ {'prefix': 'test/prefix', 'object': 'domain', 'event': 'lifecycle'})
++ mock_cnx.networkEventRegisterAny.assert_any_call(
++ None, mock_libvirt.VIR_NETWORK_EVENT_ID_LIFECYCLE,
++ libvirt_events._network_event_lifecycle_cb,
++ {'prefix': 'test/prefix', 'object': 'network', 'event': 'lifecycle'})
++
++ # Check that the deregister events are called with the result of register
++ mock_cnx.networkEventDeregisterAny.assert_called_with(
++ mock_cnx.networkEventRegisterAny.return_value)
++
++ # Check that the default 'all' filter actually worked
++ counts = {obj: len(callback_def) for obj, callback_def in libvirt_events.CALLBACK_DEFS.items()}
++ for obj, count in counts.items():
++ register = libvirt_events.REGISTER_FUNCTIONS[obj]
++ assert getattr(mock_cnx, register).call_count == count
++
++ def test_event_skipped(self):
++ '''
++ Test that events are skipped if their ID isn't defined in the libvirt
++ module (older libvirt)
++ '''
++ self.mock_libvirt.mock_add_spec([
++ 'openReadOnly',
++ 'virEventRegisterDefaultImpl',
++ 'virEventRunDefaultImpl',
++ 'VIR_DOMAIN_EVENT_ID_LIFECYCLE'], spec_set=True)
++
++ libvirt_events.start('test:///', 'test/prefix')
++
++ # Check events registration and deregistration
++ mock_cnx = self.mock_libvirt.openReadOnly.return_value
++
++ mock_cnx.domainEventRegisterAny.assert_any_call(
++ None, self.mock_libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE,
++ libvirt_events._domain_event_lifecycle_cb,
++ {'prefix': 'test/prefix', 'object': 'domain', 'event': 'lifecycle'})
++
++ # Network events should have been skipped
++ mock_cnx.networkEventRegisterAny.assert_not_called()
++
++ def test_event_filtered(self):
++ '''
++ Test that events are skipped if their ID isn't defined in the libvirt
++ module (older libvirt)
++ '''
++ libvirt_events.start('test', 'test/prefix', 'domain/lifecycle')
++
++ # Check events registration and deregistration
++ mock_cnx = self.mock_libvirt.openReadOnly.return_value
++
++ mock_cnx.domainEventRegisterAny.assert_any_call(
++ None, 0, libvirt_events._domain_event_lifecycle_cb,
++ {'prefix': 'test/prefix', 'object': 'domain', 'event': 'lifecycle'})
++
++ # Network events should have been filtered out
++ mock_cnx.networkEventRegisterAny.assert_not_called()
+--
+2.17.1
+
+
diff --git a/add-environment-variable-to-know-if-yum-is-invoked-f.patch b/add-environment-variable-to-know-if-yum-is-invoked-f.patch
new file mode 100644
index 0000000..4cdb448
--- /dev/null
+++ b/add-environment-variable-to-know-if-yum-is-invoked-f.patch
@@ -0,0 +1,214 @@
+From 39d9d9fb26f9aff83fce4ce67d5b2a6bd4f60b95 Mon Sep 17 00:00:00 2001
+From: Marcelo Chiaradia
+Date: Thu, 7 Jun 2018 10:29:41 +0200
+Subject: [PATCH] Add environment variable to know if yum is invoked from
+ Salt(bsc#1057635)
+
+---
+ salt/modules/yumpkg.py | 59 +++++++++++++++++++++++++++++++++-----------------
+ 1 file changed, 39 insertions(+), 20 deletions(-)
+
+diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
+index 9ce4926790..51832bf883 100644
+--- a/salt/modules/yumpkg.py
++++ b/salt/modules/yumpkg.py
+@@ -452,7 +452,8 @@ def latest_version(*names, **kwargs):
+ out = __salt__['cmd.run_all'](cmd,
+ output_loglevel='trace',
+ ignore_retcode=True,
+- python_shell=False)
++ python_shell=False,
++ env={"SALT_RUNNING": '1'})
+ if out['retcode'] != 0:
+ if out['stderr']:
+ # Check first if this is just a matter of the packages being
+@@ -850,7 +851,8 @@ def list_repo_pkgs(*args, **kwargs):
+ yum_version = None if _yum() != 'yum' else _LooseVersion(
+ __salt__['cmd.run'](
+ ['yum', '--version'],
+- python_shell=False
++ python_shell=False,
++ env={"SALT_RUNNING": '1'}
+ ).splitlines()[0].strip()
+ )
+ # Really old version of yum; does not even have --showduplicates option
+@@ -865,7 +867,8 @@ def list_repo_pkgs(*args, **kwargs):
+ cmd_prefix + [pkg_src],
+ output_loglevel='trace',
+ ignore_retcode=True,
+- python_shell=False
++ python_shell=False,
++ env={"SALT_RUNNING": '1'}
+ )
+ if out['retcode'] == 0:
+ _parse_output(out['stdout'], strict=True)
+@@ -882,7 +885,8 @@ def list_repo_pkgs(*args, **kwargs):
+ cmd_prefix + [pkg_src],
+ output_loglevel='trace',
+ ignore_retcode=True,
+- python_shell=False
++ python_shell=False,
++ env={"SALT_RUNNING": '1'}
+ )
+ if out['retcode'] == 0:
+ _parse_output(out['stdout'], strict=True)
+@@ -898,7 +902,8 @@ def list_repo_pkgs(*args, **kwargs):
+ out = __salt__['cmd.run_all'](cmd,
+ output_loglevel='trace',
+ ignore_retcode=True,
+- python_shell=False)
++ python_shell=False,
++ env={"SALT_RUNNING": '1'})
+ if out['retcode'] != 0 and 'Error:' in out['stdout']:
+ continue
+ _parse_output(out['stdout'])
+@@ -955,7 +960,8 @@ def list_upgrades(refresh=True, **kwargs):
+ out = __salt__['cmd.run_all'](cmd,
+ output_loglevel='trace',
+ ignore_retcode=True,
+- python_shell=False)
++ python_shell=False,
++ env={"SALT_RUNNING": '1'})
+ if out['retcode'] != 0 and 'Error:' in out:
+ return {}
+
+@@ -1090,12 +1096,13 @@ def refresh_db(**kwargs):
+ clean_cmd.extend(options)
+ update_cmd.extend(options)
+
+- __salt__['cmd.run'](clean_cmd, python_shell=False)
++ __salt__['cmd.run'](clean_cmd, python_shell=False, env={"SALT_RUNNING": '1'})
+ if check_update_:
+ result = __salt__['cmd.retcode'](update_cmd,
+ output_loglevel='trace',
+ ignore_retcode=True,
+- python_shell=False)
++ python_shell=False,
++ env={"SALT_RUNNING": '1'})
+ return retcodes.get(result, False)
+ return True
+
+@@ -1634,7 +1641,8 @@ def install(name=None,
+ cmd,
+ output_loglevel='trace',
+ python_shell=False,
+- redirect_stderr=True
++ redirect_stderr=True,
++ env={"SALT_RUNNING": '1'}
+ )
+ if out['retcode'] != 0:
+ errors.append(out['stdout'])
+@@ -1654,7 +1662,8 @@ def install(name=None,
+ cmd,
+ output_loglevel='trace',
+ python_shell=False,
+- redirect_stderr=True
++ redirect_stderr=True,
++ env={"SALT_RUNNING": '1'}
+ )
+ if out['retcode'] != 0:
+ errors.append(out['stdout'])
+@@ -1674,7 +1683,8 @@ def install(name=None,
+ cmd,
+ output_loglevel='trace',
+ python_shell=False,
+- redirect_stderr=True
++ redirect_stderr=True,
++ env={"SALT_RUNNING": '1'}
+ )
+ if out['retcode'] != 0:
+ errors.append(out['stdout'])
+@@ -1866,7 +1876,8 @@ def upgrade(name=None,
+
+ result = __salt__['cmd.run_all'](cmd,
+ output_loglevel='trace',
+- python_shell=False)
++ python_shell=False,
++ env={"SALT_RUNNING": '1'})
+ __context__.pop('pkg.list_pkgs', None)
+ new = list_pkgs()
+ ret = salt.utils.data.compare_dicts(old, new)
+@@ -1957,7 +1968,8 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613
+ out = __salt__['cmd.run_all'](
+ [_yum(), '-y', 'remove'] + targets,
+ output_loglevel='trace',
+- python_shell=False
++ python_shell=False,
++ env={"SALT_RUNNING": '1'}
+ )
+
+ if out['retcode'] != 0 and out['stderr']:
+@@ -2094,7 +2106,8 @@ def hold(name=None, pkgs=None, sources=None, normalize=True, **kwargs): # pylin
+ else:
+ out = __salt__['cmd.run_all'](
+ [_yum(), 'versionlock', target],
+- python_shell=False
++ python_shell=False,
++ env={"SALT_RUNNING": '1'}
+ )
+
+ if out['retcode'] == 0:
+@@ -2203,7 +2216,8 @@ def unhold(name=None, pkgs=None, sources=None, **kwargs): # pylint: disable=W06
+ else:
+ out = __salt__['cmd.run_all'](
+ [_yum(), 'versionlock', 'delete'] + search_locks,
+- python_shell=False
++ python_shell=False,
++ env={"SALT_RUNNING": '1'}
+ )
+
+ if out['retcode'] == 0:
+@@ -2254,7 +2268,8 @@ def list_holds(pattern=__HOLD_PATTERN, full=True):
+ _check_versionlock()
+
+ out = __salt__['cmd.run']([_yum(), 'versionlock', 'list'],
+- python_shell=False)
++ python_shell=False,
++ env={"SALT_RUNNING": '1'})
+ ret = []
+ for line in salt.utils.itertools.split(out, '\n'):
+ match = _get_hold(line, pattern=pattern, full=full)
+@@ -2319,7 +2334,8 @@ def group_list():
+ out = __salt__['cmd.run_stdout'](
+ [_yum(), 'grouplist', 'hidden'],
+ output_loglevel='trace',
+- python_shell=False
++ python_shell=False,
++ env={"SALT_RUNNING": '1'}
+ )
+ key = None
+ for line in salt.utils.itertools.split(out, '\n'):
+@@ -2386,7 +2402,8 @@ def group_info(name, expand=False):
+ out = __salt__['cmd.run_stdout'](
+ cmd,
+ output_loglevel='trace',
+- python_shell=False
++ python_shell=False,
++ env={"SALT_RUNNING": '1'}
+ )
+
+ g_info = {}
+@@ -3055,7 +3072,8 @@ def download(*packages):
+ __salt__['cmd.run'](
+ cmd,
+ output_loglevel='trace',
+- python_shell=False
++ python_shell=False,
++ env={"SALT_RUNNING": '1'}
+ )
+ ret = {}
+ for dld_result in os.listdir(CACHE_DIR):
+@@ -3130,7 +3148,8 @@ def _get_patches(installed_only=False):
+ cmd = [_yum(), '--quiet', 'updateinfo', 'list', 'all']
+ ret = __salt__['cmd.run_stdout'](
+ cmd,
+- python_shell=False
++ python_shell=False,
++ env={"SALT_RUNNING": '1'}
+ )
+ for line in salt.utils.itertools.split(ret, os.linesep):
+ inst, advisory_id, sev, pkg = re.match(r'([i|\s]) ([^\s]+) +([^\s]+) +([^\s]+)',
+--
+2.13.7
+
+
diff --git a/add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch b/add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch
new file mode 100644
index 0000000..c99ca59
--- /dev/null
+++ b/add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch
@@ -0,0 +1,294 @@
+From cc8d6eaddf59973a94512779853558789b56ca3e Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
+
+Date: Wed, 25 Apr 2018 12:55:36 +0100
+Subject: [PATCH] Add 'other' attribute to GECOS fields to avoid
+ inconsistencies with chfn
+
+Fix unsupported chars checking on GECOS fields
+
+Add unit test for new method 'user.chother'
+
+Do make comparisons in a single line
+
+Add 'other' as valid kwargs for 'user.add' method
+---
+ salt/modules/useradd.py | 41 ++++++++++++++++++++++++++++----------
+ salt/states/user.py | 28 ++++++++++++++++++--------
+ tests/unit/modules/test_useradd.py | 36 +++++++++++++++++++++++++++++++--
+ 3 files changed, 84 insertions(+), 21 deletions(-)
+
+diff --git a/salt/modules/useradd.py b/salt/modules/useradd.py
+index a61ba0e960..fc3c82a8bc 100644
+--- a/salt/modules/useradd.py
++++ b/salt/modules/useradd.py
+@@ -60,17 +60,18 @@ def _get_gecos(name):
+ Retrieve GECOS field info and return it in dictionary form
+ '''
+ gecos_field = salt.utils.stringutils.to_unicode(
+- pwd.getpwnam(_quote_username(name)).pw_gecos).split(',', 3)
++ pwd.getpwnam(_quote_username(name)).pw_gecos).split(',', 4)
+ if not gecos_field:
+ return {}
+ else:
+ # Assign empty strings for any unspecified trailing GECOS fields
+- while len(gecos_field) < 4:
++ while len(gecos_field) < 5:
+ gecos_field.append('')
+ return {'fullname': salt.utils.locales.sdecode(gecos_field[0]),
+ 'roomnumber': salt.utils.locales.sdecode(gecos_field[1]),
+ 'workphone': salt.utils.locales.sdecode(gecos_field[2]),
+- 'homephone': salt.utils.locales.sdecode(gecos_field[3])}
++ 'homephone': salt.utils.locales.sdecode(gecos_field[3]),
++ 'other': salt.utils.locales.sdecode(gecos_field[4])}
+
+
+ def _build_gecos(gecos_dict):
+@@ -78,10 +79,11 @@ def _build_gecos(gecos_dict):
+ Accepts a dictionary entry containing GECOS field names and their values,
+ and returns a full GECOS comment string, to be used with usermod.
+ '''
+- return '{0},{1},{2},{3}'.format(gecos_dict.get('fullname', ''),
+- gecos_dict.get('roomnumber', ''),
+- gecos_dict.get('workphone', ''),
+- gecos_dict.get('homephone', '')).rstrip(',')
++ return '{0},{1},{2},{3},{4}'.format(gecos_dict.get('fullname', ''),
++ gecos_dict.get('roomnumber', ''),
++ gecos_dict.get('workphone', ''),
++ gecos_dict.get('homephone', ''),
++ gecos_dict.get('other', ''),).rstrip(',')
+
+
+ def _update_gecos(name, key, value, root=None):
+@@ -124,6 +126,7 @@ def add(name,
+ roomnumber='',
+ workphone='',
+ homephone='',
++ other='',
+ createhome=True,
+ loginclass=None,
+ root=None,
+@@ -237,6 +240,8 @@ def add(name,
+ chworkphone(name, workphone)
+ if homephone:
+ chhomephone(name, homephone)
++ if other:
++ chother(name, other)
+ return True
+
+
+@@ -507,6 +512,19 @@ def chhomephone(name, homephone):
+ return _update_gecos(name, 'homephone', homephone)
+
+
++def chother(name, other):
++ '''
++ Change the user's other GECOS attribute
++
++ CLI Example:
++
++ .. code-block:: bash
++
++ salt '*' user.chother foobar
++ '''
++ return _update_gecos(name, 'other', other)
++
++
+ def chloginclass(name, loginclass, root=None):
+ '''
+ Change the default login class of the user
+@@ -588,9 +606,9 @@ def _format_info(data):
+ Return user information in a pretty way
+ '''
+ # Put GECOS info into a list
+- gecos_field = salt.utils.stringutils.to_unicode(data.pw_gecos).split(',', 3)
+- # Make sure our list has at least four elements
+- while len(gecos_field) < 4:
++ gecos_field = salt.utils.stringutils.to_unicode(data.pw_gecos).split(',', 4)
++ # Make sure our list has at least five elements
++ while len(gecos_field) < 5:
+ gecos_field.append('')
+
+ return {'gid': data.pw_gid,
+@@ -603,7 +621,8 @@ def _format_info(data):
+ 'fullname': gecos_field[0],
+ 'roomnumber': gecos_field[1],
+ 'workphone': gecos_field[2],
+- 'homephone': gecos_field[3]}
++ 'homephone': gecos_field[3],
++ 'other': gecos_field[4]}
+
+
+ @salt.utils.decorators.path.which('id')
+diff --git a/salt/states/user.py b/salt/states/user.py
+index f4ae81dd31..34f5a9d541 100644
+--- a/salt/states/user.py
++++ b/salt/states/user.py
+@@ -68,6 +68,7 @@ def _changes(name,
+ roomnumber='',
+ workphone='',
+ homephone='',
++ other='',
+ loginclass=None,
+ date=None,
+ mindays=0,
+@@ -170,24 +171,26 @@ def _changes(name,
+
+ # MacOS doesn't have full GECOS support, so check for the "ch" functions
+ # and ignore these parameters if these functions do not exist.
+- if 'user.chroomnumber' in __salt__ \
+- and roomnumber is not None:
++ if 'user.chroomnumber' in __salt__ and roomnumber is not None:
+ roomnumber = sdecode_if_string(roomnumber)
+ lusr['roomnumber'] = sdecode_if_string(lusr['roomnumber'])
+ if lusr['roomnumber'] != roomnumber:
+ change['roomnumber'] = roomnumber
+- if 'user.chworkphone' in __salt__ \
+- and workphone is not None:
++ if 'user.chworkphone' in __salt__ and workphone is not None:
+ workphone = sdecode_if_string(workphone)
+ lusr['workphone'] = sdecode_if_string(lusr['workphone'])
+ if lusr['workphone'] != workphone:
+ change['workphone'] = workphone
+- if 'user.chhomephone' in __salt__ \
+- and homephone is not None:
++ if 'user.chhomephone' in __salt__ and homephone is not None:
+ homephone = sdecode_if_string(homephone)
+ lusr['homephone'] = sdecode_if_string(lusr['homephone'])
+ if lusr['homephone'] != homephone:
+ change['homephone'] = homephone
++ if 'user.chother' in __salt__ and other is not None:
++ other = sdecode_if_string(other)
++ lusr['other'] = sdecode_if_string(lusr['other'])
++ if lusr['other'] != other:
++ change['other'] = other
+ # OpenBSD/FreeBSD login class
+ if __grains__['kernel'] in ('OpenBSD', 'FreeBSD'):
+ if loginclass:
+@@ -236,6 +239,7 @@ def present(name,
+ roomnumber=None,
+ workphone=None,
+ homephone=None,
++ other=None,
+ loginclass=None,
+ date=None,
+ mindays=None,
+@@ -377,7 +381,10 @@ def present(name,
+
+ homephone
+ The user's home phone number (not supported in MacOS)
+- If GECOS field contains more than 3 commas, this field will have the rest of 'em
++
++ other
++ The user's other attribute (not supported in MacOS)
++ If GECOS field contains more than 4 commas, this field will have the rest of 'em
+
+ .. versionchanged:: 2014.7.0
+ Shadow attribute support added.
+@@ -448,6 +455,8 @@ def present(name,
+ workphone = sdecode(workphone)
+ if homephone is not None:
+ homephone = sdecode(homephone)
++ if other is not None:
++ other = sdecode(other)
+
+ # createhome not supported on Windows or Mac
+ if __grains__['kernel'] in ('Darwin', 'Windows'):
+@@ -460,7 +469,7 @@ def present(name,
+
+ # the comma is used to separate field in GECOS, thus resulting into
+ # salt adding the end of fullname each time this function is called
+- for gecos_field in ['fullname', 'roomnumber', 'workphone']:
++ for gecos_field in [fullname, roomnumber, workphone]:
+ if isinstance(gecos_field, string_types) and ',' in gecos_field:
+ ret['comment'] = "Unsupported char ',' in {0}".format(gecos_field)
+ ret['result'] = False
+@@ -519,6 +528,7 @@ def present(name,
+ roomnumber,
+ workphone,
+ homephone,
++ other,
+ loginclass,
+ date,
+ mindays,
+@@ -654,6 +664,7 @@ def present(name,
+ roomnumber,
+ workphone,
+ homephone,
++ other,
+ loginclass,
+ date,
+ mindays,
+@@ -705,6 +716,7 @@ def present(name,
+ 'roomnumber': roomnumber,
+ 'workphone': workphone,
+ 'homephone': homephone,
++ 'other': other,
+ 'createhome': createhome,
+ 'nologinit': nologinit,
+ 'loginclass': loginclass}
+diff --git a/tests/unit/modules/test_useradd.py b/tests/unit/modules/test_useradd.py
+index fa30a0df71..e79c78c663 100644
+--- a/tests/unit/modules/test_useradd.py
++++ b/tests/unit/modules/test_useradd.py
+@@ -46,7 +46,8 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin):
+ 'fullname': 'root',
+ 'roomnumber': '',
+ 'workphone': '',
+- 'homephone': ''}
++ 'homephone': '',
++ 'other': ''}
+
+ @classmethod
+ def tearDownClass(cls):
+@@ -96,7 +97,8 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin):
+ 'fullname': 'root',
+ 'roomnumber': '',
+ 'workphone': '',
+- 'homephone': ''}]
++ 'homephone': '',
++ 'other': ''}]
+ with patch('salt.modules.useradd._format_info', MagicMock(return_value=self.mock_pwall)):
+ self.assertEqual(useradd.getent(), ret)
+
+@@ -330,6 +332,36 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin):
+ with patch.object(useradd, 'info', mock):
+ self.assertFalse(useradd.chhomephone('salt', 1))
+
++ # 'chother' function tests: 1
++
++ def test_chother(self):
++ '''
++ Test if the user's other GECOS attribute is changed
++ '''
++ mock = MagicMock(return_value=False)
++ with patch.object(useradd, '_get_gecos', mock):
++ self.assertFalse(useradd.chother('salt', 1))
++
++ mock = MagicMock(return_value={'other': 'foobar'})
++ with patch.object(useradd, '_get_gecos', mock):
++ self.assertTrue(useradd.chother('salt', 'foobar'))
++
++ mock = MagicMock(return_value={'other': 'foobar2'})
++ with patch.object(useradd, '_get_gecos', mock):
++ mock = MagicMock(return_value=None)
++ with patch.dict(useradd.__salt__, {'cmd.run': mock}):
++ mock = MagicMock(return_value={'other': 'foobar3'})
++ with patch.object(useradd, 'info', mock):
++ self.assertFalse(useradd.chother('salt', 'foobar'))
++
++ mock = MagicMock(return_value={'other': 'foobar3'})
++ with patch.object(useradd, '_get_gecos', mock):
++ mock = MagicMock(return_value=None)
++ with patch.dict(useradd.__salt__, {'cmd.run': mock}):
++ mock = MagicMock(return_value={'other': 'foobar3'})
++ with patch.object(useradd, 'info', mock):
++ self.assertFalse(useradd.chother('salt', 'foobar'))
++
+ # 'info' function tests: 1
+
+ @skipIf(HAS_PWD is False, 'The pwd module is not available')
+--
+2.13.7
+
+
diff --git a/add-saltssh-multi-version-support-across-python-inte.patch b/add-saltssh-multi-version-support-across-python-inte.patch
index 43652c9..4538647 100644
--- a/add-saltssh-multi-version-support-across-python-inte.patch
+++ b/add-saltssh-multi-version-support-across-python-inte.patch
@@ -1,4 +1,4 @@
-From 36bc22560e050b7afe3d872aed99c0cdb9fde282 Mon Sep 17 00:00:00 2001
+From 23aba97ccbdf9952f6a8107a8d90b40d0d2c41ee Mon Sep 17 00:00:00 2001
From: Bo Maryniuk
Date: Mon, 12 Mar 2018 12:01:39 +0100
Subject: [PATCH] Add SaltSSH multi-version support across Python
@@ -255,17 +255,15 @@ Lintfix
Set master_top_first to False by default
---
doc/topics/releases/fluorine.rst | 178 +++++++++++
- salt/client/ssh/__init__.py | 66 ++--
- salt/client/ssh/ssh_py_shim.py | 95 ++++--
+ salt/client/ssh/__init__.py | 60 ++--
+ salt/client/ssh/ssh_py_shim.py | 93 ++++--
salt/client/ssh/wrapper/__init__.py | 2 +-
salt/config/__init__.py | 1 +
- salt/modules/zfs.py | 4 +-
- salt/modules/zpool.py | 4 +-
salt/state.py | 2 +-
salt/utils/hashutils.py | 37 +++
salt/utils/thin.py | 450 +++++++++++++++++++-------
tests/unit/utils/test_thin.py | 612 ++++++++++++++++++++++++++++++++++++
- 11 files changed, 1265 insertions(+), 186 deletions(-)
+ 9 files changed, 1258 insertions(+), 177 deletions(-)
create mode 100644 doc/topics/releases/fluorine.rst
create mode 100644 tests/unit/utils/test_thin.py
@@ -454,10 +452,10 @@ index 0000000000..40c69e25cc
+Salt version is also available on the Master machine, although does not need to be directly
+installed together with the older Python interpreter.
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
-index f1c1ad9a22..399facf5c8 100644
+index 141e1c6850..f1300b5698 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
-@@ -150,14 +150,10 @@ EX_PYTHON_INVALID={EX_THIN_PYTHON_INVALID}
+@@ -150,9 +150,7 @@ EX_PYTHON_INVALID={EX_THIN_PYTHON_INVALID}
PYTHON_CMDS="python3 python27 python2.7 python26 python2.6 python2 python"
for py_cmd in $PYTHON_CMDS
do
@@ -466,16 +464,9 @@ index f1c1ad9a22..399facf5c8 100644
- and sys.version_info[0] == {{HOST_PY_MAJOR}}));"
+ if command -v "$py_cmd" >/dev/null 2>&1 && "$py_cmd" -c "import sys; sys.exit(not (sys.version_info >= (2, 6)));"
then
-- py_cmd_path=`"$py_cmd" -c \
-- 'from __future__ import print_function;
-- import sys; print(sys.executable);'`
-- cmdpath=$(command -v $py_cmd 2>/dev/null || which $py_cmd 2>/dev/null)
-+ py_cmd_path=`"$py_cmd" -c 'from __future__ import print_function;import sys; print(sys.executable);'`
-+ cmdpath=`command -v $py_cmd 2>/dev/null || which $py_cmd 2>/dev/null`
- if file $cmdpath | grep "shell script" > /dev/null
- then
- ex_vars="'PATH', 'LD_LIBRARY_PATH', 'MANPATH', \
-@@ -323,7 +319,8 @@ class SSH(object):
+ py_cmd_path=`"$py_cmd" -c \
+ 'from __future__ import print_function;
+@@ -323,7 +321,8 @@ class SSH(object):
extra_mods=self.opts.get('thin_extra_mods'),
overwrite=self.opts['regen_thin'],
python2_bin=self.opts['python2_bin'],
@@ -485,7 +476,7 @@ index f1c1ad9a22..399facf5c8 100644
self.mods = mod_data(self.fsclient)
def _get_roster(self):
-@@ -834,10 +831,10 @@ class Single(object):
+@@ -850,10 +849,10 @@ class Single(object):
self.opts = opts
self.tty = tty
@@ -499,7 +490,7 @@ index f1c1ad9a22..399facf5c8 100644
if kwargs.get('thin_dir'):
self.thin_dir = kwargs['thin_dir']
elif self.winrm:
-@@ -1161,38 +1158,39 @@ class Single(object):
+@@ -1178,38 +1177,39 @@ class Single(object):
cachedir = self.opts['_caller_cachedir']
else:
cachedir = self.opts['cachedir']
@@ -563,7 +554,7 @@ index f1c1ad9a22..399facf5c8 100644
if six.PY2:
py_code_enc = py_code.encode('base64')
diff --git a/salt/client/ssh/ssh_py_shim.py b/salt/client/ssh/ssh_py_shim.py
-index e46220fc80..21d03343b9 100644
+index 5e5dbdc55e..92ede14930 100644
--- a/salt/client/ssh/ssh_py_shim.py
+++ b/salt/client/ssh/ssh_py_shim.py
@@ -16,11 +16,13 @@ import sys
@@ -597,7 +588,7 @@ index e46220fc80..21d03343b9 100644
ARGS = None
# The below line is where OPTIONS can be redefined with internal options
# (rather than cli arguments) when the shim is bundled by
-@@ -128,7 +129,7 @@ def need_deployment():
+@@ -130,7 +131,7 @@ def need_deployment():
os.chmod(OPTIONS.saltdir, stt.st_mode | stat.S_IWGRP | stat.S_IRGRP | stat.S_IXGRP)
except OSError:
sys.stdout.write('\n\nUnable to set permissions on thin directory.\nIf sudo_user is set '
@@ -606,15 +597,14 @@ index e46220fc80..21d03343b9 100644
sys.exit(1)
# Delimiter emitted on stdout *only* to indicate shim message to master.
-@@ -161,11 +162,15 @@ def unpack_thin(thin_path):
- old_umask = os.umask(0o077)
+@@ -163,11 +164,15 @@ def unpack_thin(thin_path):
+ old_umask = os.umask(0o077) # pylint: disable=blacklisted-function
tfile.extractall(path=OPTIONS.saltdir)
tfile.close()
-- os.umask(old_umask)
+ checksum_path = os.path.normpath(os.path.join(OPTIONS.saltdir, "thin_checksum"))
+ with open(checksum_path, 'w') as chk:
+ chk.write(OPTIONS.checksum + '\n')
-+ os.umask(old_umask) # pylint: disable=blacklisted-function
+ os.umask(old_umask) # pylint: disable=blacklisted-function
try:
os.unlink(thin_path)
except OSError:
@@ -623,7 +613,7 @@ index e46220fc80..21d03343b9 100644
def need_ext():
-@@ -199,6 +204,47 @@ def unpack_ext(ext_path):
+@@ -201,6 +206,47 @@ def unpack_ext(ext_path):
shutil.move(ver_path, ver_dst)
@@ -671,7 +661,7 @@ index e46220fc80..21d03343b9 100644
def main(argv): # pylint: disable=W0613
'''
Main program body
-@@ -215,32 +261,25 @@ def main(argv): # pylint: disable=W0613
+@@ -217,32 +263,25 @@ def main(argv): # pylint: disable=W0613
if scpstat != 0:
sys.exit(EX_SCP_NOT_FOUND)
@@ -716,7 +706,7 @@ index e46220fc80..21d03343b9 100644
need_deployment()
# Salt thin exists and is up-to-date - fall through and use it
-@@ -270,7 +309,7 @@ def main(argv): # pylint: disable=W0613
+@@ -272,7 +311,7 @@ def main(argv): # pylint: disable=W0613
argv_prepared = ARGS
salt_argv = [
@@ -725,7 +715,7 @@ index e46220fc80..21d03343b9 100644
salt_call_path,
'--retcode-passthrough',
'--local',
-@@ -303,7 +342,10 @@ def main(argv): # pylint: disable=W0613
+@@ -305,7 +344,10 @@ def main(argv): # pylint: disable=W0613
if OPTIONS.tty:
# Returns bytes instead of string on python 3
stdout, _ = subprocess.Popen(salt_argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
@@ -737,9 +727,9 @@ index e46220fc80..21d03343b9 100644
sys.stdout.flush()
if OPTIONS.wipe:
shutil.rmtree(OPTIONS.saltdir)
-@@ -315,5 +357,6 @@ def main(argv): # pylint: disable=W0613
+@@ -317,5 +359,6 @@ def main(argv): # pylint: disable=W0613
if OPTIONS.cmd_umask is not None:
- os.umask(old_umask)
+ os.umask(old_umask) # pylint: disable=blacklisted-function
+
if __name__ == '__main__':
@@ -758,10 +748,10 @@ index 04d751b51a..09f9344642 100644
minion_opts=self.minion_opts,
**self.kwargs
diff --git a/salt/config/__init__.py b/salt/config/__init__.py
-index df0e1388b7..b3de3820b0 100644
+index 289991771d..432364b201 100644
--- a/salt/config/__init__.py
+++ b/salt/config/__init__.py
-@@ -1652,6 +1652,7 @@ DEFAULT_MASTER_OPTS = {
+@@ -1663,6 +1663,7 @@ DEFAULT_MASTER_OPTS = {
'state_top': 'top.sls',
'state_top_saltenv': None,
'master_tops': {},
@@ -769,45 +759,11 @@ index df0e1388b7..b3de3820b0 100644
'order_masters': False,
'job_cache': True,
'ext_job_cache': '',
-diff --git a/salt/modules/zfs.py b/salt/modules/zfs.py
-index bc54044b5c..d8fbfc76be 100644
---- a/salt/modules/zfs.py
-+++ b/salt/modules/zfs.py
-@@ -37,10 +37,10 @@ def __virtual__():
- '''
- Only load when the platform has zfs support
- '''
-- if __grains__['zfs_support']:
-+ if __grains__.get('zfs_support'):
- return __virtualname__
- else:
-- return (False, "The zfs module cannot be loaded: zfs not supported")
-+ return False, "The zfs module cannot be loaded: zfs not supported"
-
-
- @decorators.memoize
-diff --git a/salt/modules/zpool.py b/salt/modules/zpool.py
-index f955175664..5e03418919 100644
---- a/salt/modules/zpool.py
-+++ b/salt/modules/zpool.py
-@@ -31,10 +31,10 @@ def __virtual__():
- '''
- Only load when the platform has zfs support
- '''
-- if __grains__['zfs_support']:
-+ if __grains__.get('zfs_support'):
- return __virtualname__
- else:
-- return (False, "The zpool module cannot be loaded: zfs not supported")
-+ return False, "The zpool module cannot be loaded: zfs not supported"
-
-
- @salt.utils.decorators.memoize
diff --git a/salt/state.py b/salt/state.py
-index 49d68d2edf..8c0b90545c 100644
+index 09709347b1..e7288bce2e 100644
--- a/salt/state.py
+++ b/salt/state.py
-@@ -3332,7 +3332,7 @@ class BaseHighState(object):
+@@ -3383,7 +3383,7 @@ class BaseHighState(object):
ext_matches = self._master_tops()
for saltenv in ext_matches:
top_file_matches = matches.get(saltenv, [])
@@ -817,7 +773,7 @@ index 49d68d2edf..8c0b90545c 100644
second = top_file_matches
else:
diff --git a/salt/utils/hashutils.py b/salt/utils/hashutils.py
-index 4c9cb4a50c..18f7459d3c 100644
+index b42a60d222..ee01be7377 100644
--- a/salt/utils/hashutils.py
+++ b/salt/utils/hashutils.py
@@ -9,6 +9,7 @@ import base64
@@ -828,7 +784,7 @@ index 4c9cb4a50c..18f7459d3c 100644
# Import Salt libs
from salt.ext import six
-@@ -163,3 +164,39 @@ def get_hash(path, form='sha256', chunk_size=65536):
+@@ -150,3 +151,39 @@ def get_hash(path, form='sha256', chunk_size=65536):
for chunk in iter(lambda: ifile.read(chunk_size), b''):
hash_obj.update(chunk)
return hash_obj.hexdigest()
@@ -2137,6 +2093,6 @@ index 0000000000..549d48a703
+ for t_line in ['second-system-effect:2:7', 'solar-interference:2:6']:
+ assert t_line in out
--
-2.15.1
+2.13.7
diff --git a/align-suse-salt-master.service-limitnofiles-limit-wi.patch b/align-suse-salt-master.service-limitnofiles-limit-wi.patch
new file mode 100644
index 0000000..292dee4
--- /dev/null
+++ b/align-suse-salt-master.service-limitnofiles-limit-wi.patch
@@ -0,0 +1,28 @@
+From 816c7ec3b72510346deef17deb2990a09ddab03a Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
+
+Date: Thu, 31 May 2018 10:58:16 +0100
+Subject: [PATCH] Align SUSE salt-master.service 'LimitNOFILES' limit
+ with upstream Salt
+
+---
+ pkg/suse/salt-master.service | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/pkg/suse/salt-master.service b/pkg/suse/salt-master.service
+index c0ea4606d8..b31c1a1373 100644
+--- a/pkg/suse/salt-master.service
++++ b/pkg/suse/salt-master.service
+@@ -4,7 +4,7 @@ Documentation=man:salt-master(1) file:///usr/share/doc/salt/html/contents.html h
+ After=network.target
+
+ [Service]
+-LimitNOFILE=16384
++LimitNOFILE=100000
+ Type=simple
+ ExecStart=/usr/bin/salt-master
+ TasksMax=infinity
+--
+2.13.7
+
+
diff --git a/avoid-excessive-syslogging-by-watchdog-cronjob-58.patch b/avoid-excessive-syslogging-by-watchdog-cronjob-58.patch
index 8f33ff3..6c54333 100644
--- a/avoid-excessive-syslogging-by-watchdog-cronjob-58.patch
+++ b/avoid-excessive-syslogging-by-watchdog-cronjob-58.patch
@@ -1,4 +1,4 @@
-From edb1c95fa06b8bb1d7e6d91beaaddec6d22c966b Mon Sep 17 00:00:00 2001
+From 310f8eb22db6010ba48ab371a7223c1345cfbcf0 Mon Sep 17 00:00:00 2001
From: Hubert Mantel
Date: Mon, 27 Nov 2017 13:55:13 +0100
Subject: [PATCH] avoid excessive syslogging by watchdog cronjob (#58)
@@ -21,6 +21,6 @@ index 2e418094ed..73a91ebd62 100755
/usr/bin/salt-daemon-watcher --with-init & disown
fi
--
-2.16.2
+2.13.7
diff --git a/avoid-incomprehensive-message-if-crashes.patch b/avoid-incomprehensive-message-if-crashes.patch
new file mode 100644
index 0000000..01679d8
--- /dev/null
+++ b/avoid-incomprehensive-message-if-crashes.patch
@@ -0,0 +1,57 @@
+From c4d9227b6da4407348e181f092445f17e3c14b51 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
+
+Date: Thu, 26 Jul 2018 16:42:10 +0100
+Subject: [PATCH] Avoid incomprehensive message if crashes
+
+Check dmidecoder executable on each call to avoid crashing
+
+Fix pylint issues
+---
+ salt/modules/smbios.py | 11 +++++++++++
+ 1 file changed, 11 insertions(+)
+
+diff --git a/salt/modules/smbios.py b/salt/modules/smbios.py
+index c8a0e54a5c..c0b94c2a65 100644
+--- a/salt/modules/smbios.py
++++ b/salt/modules/smbios.py
+@@ -19,6 +19,7 @@ import re
+
+ # Import salt libs
+ import salt.utils.path
++from salt.exceptions import CommandExecutionError
+
+ # Solve the Chicken and egg problem where grains need to run before any
+ # of the modules are loaded and are generally available for any usage.
+@@ -32,10 +33,16 @@ log = logging.getLogger(__name__)
+ DMIDECODER = salt.utils.path.which_bin(['dmidecode', 'smbios'])
+
+
++def _refresh_dmidecoder():
++ global DMIDECODER
++ DMIDECODER = salt.utils.path.which_bin(['dmidecode', 'smbios'])
++
++
+ def __virtual__():
+ '''
+ Only work when dmidecode is installed.
+ '''
++ _refresh_dmidecoder()
+ if DMIDECODER is None:
+ log.debug('SMBIOS: neither dmidecode nor smbios found!')
+ return (False, 'The smbios execution module failed to load: neither dmidecode nor smbios in the path.')
+@@ -327,6 +334,10 @@ def _dmidecoder(args=None):
+ '''
+ Call DMIdecode
+ '''
++ _refresh_dmidecoder()
++ if DMIDECODER is None:
++ raise CommandExecutionError('SMBIOS: neither dmidecode nor smbios found!')
++
+ if args is None:
+ return salt.modules.cmdmod._run_quiet(DMIDECODER)
+ else:
+--
+2.17.1
+
+
diff --git a/do-not-override-jid-on-returners-only-sending-back-t.patch b/do-not-override-jid-on-returners-only-sending-back-t.patch
index 0566ba6..b80bab9 100644
--- a/do-not-override-jid-on-returners-only-sending-back-t.patch
+++ b/do-not-override-jid-on-returners-only-sending-back-t.patch
@@ -1,4 +1,4 @@
-From c49a9aca3519d1baef2f9d82963a6e80eabb26d4 Mon Sep 17 00:00:00 2001
+From 911d61d1479d89ed31b23b038874505b731c6d86 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
Date: Fri, 4 May 2018 09:34:13 +0100
@@ -10,10 +10,10 @@ Subject: [PATCH] Do not override jid on returners, only sending back to
1 file changed, 7 insertions(+), 5 deletions(-)
diff --git a/salt/utils/schedule.py b/salt/utils/schedule.py
-index 6cb3ce0ef8..e81ae143bd 100644
+index 65c2e3fbda..32fdae9786 100644
--- a/salt/utils/schedule.py
+++ b/salt/utils/schedule.py
-@@ -753,11 +753,13 @@ class Schedule(object):
+@@ -755,11 +755,13 @@ class Schedule(object):
else:
# Send back to master so the job is included in the job list
mret = ret.copy()
@@ -33,6 +33,6 @@ index 6cb3ce0ef8..e81ae143bd 100644
for key, value in six.iteritems(mret):
load[key] = value
--
-2.15.1
+2.13.7
diff --git a/enable-passing-a-unix_socket-for-mysql-returners-bsc.patch b/enable-passing-a-unix_socket-for-mysql-returners-bsc.patch
index e1fef6a..a9d6b69 100644
--- a/enable-passing-a-unix_socket-for-mysql-returners-bsc.patch
+++ b/enable-passing-a-unix_socket-for-mysql-returners-bsc.patch
@@ -1,4 +1,4 @@
-From cdfb19b6c2801a7d5a883492a0ccc57c803dcd7f Mon Sep 17 00:00:00 2001
+From d937d1edb837bc084c1eaa320e8433382135e2d9 Mon Sep 17 00:00:00 2001
From: Maximilian Meister
Date: Thu, 3 May 2018 15:52:23 +0200
Subject: [PATCH] enable passing a unix_socket for mysql returners
@@ -69,6 +69,6 @@ index 85892cb06c..a286731d5c 100644
try:
__context__['mysql_returner_conn'] = conn
--
-2.13.6
+2.13.7
diff --git a/explore-module.run-response-to-catch-the-result-in-d.patch b/explore-module.run-response-to-catch-the-result-in-d.patch
deleted file mode 100644
index 0b23ccd..0000000
--- a/explore-module.run-response-to-catch-the-result-in-d.patch
+++ /dev/null
@@ -1,131 +0,0 @@
-From 8c6b77bfd913b3b47d3d4206ec0a9e08754b6f93 Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
-
-Date: Wed, 7 Mar 2018 09:42:46 +0000
-Subject: [PATCH] Explore 'module.run' response to catch the 'result' in
- depth
-
-Fix Python3 and pylint issue
-
-Rename and fix recursive method
-
-Add new unit test to check state.apply within module.run
----
- salt/states/module.py | 18 ++++++++++++
- tests/unit/states/test_module.py | 62 ++++++++++++++++++++++++++++++++++++++++
- 2 files changed, 80 insertions(+)
-
-diff --git a/salt/states/module.py b/salt/states/module.py
-index fda8bdf17a..2190ffa3d2 100644
---- a/salt/states/module.py
-+++ b/salt/states/module.py
-@@ -531,7 +531,25 @@ def _get_result(func_ret, changes):
- res = changes_ret.get('result', {})
- elif changes_ret.get('retcode', 0) != 0:
- res = False
-+ # Explore dict in depth to determine if there is a
-+ # 'result' key set to False which sets the global
-+ # state result.
-+ else:
-+ res = _get_dict_result(changes_ret)
-
- return res
-
-+
-+def _get_dict_result(node):
-+ ret = True
-+ for key, val in six.iteritems(node):
-+ if key == 'result' and val is False:
-+ ret = False
-+ break
-+ elif isinstance(val, dict):
-+ ret = _get_dict_result(val)
-+ if ret is False:
-+ break
-+ return ret
-+
- mod_watch = salt.utils.functools.alias_function(run, 'mod_watch')
-diff --git a/tests/unit/states/test_module.py b/tests/unit/states/test_module.py
-index 12ad54f979..bf4ddcc5b4 100644
---- a/tests/unit/states/test_module.py
-+++ b/tests/unit/states/test_module.py
-@@ -25,6 +25,57 @@ log = logging.getLogger(__name__)
-
- CMD = 'foo.bar'
-
-+STATE_APPLY_RET = {
-+ 'module_|-test2_|-state.apply_|-run': {
-+ 'comment': 'Module function state.apply executed',
-+ 'name': 'state.apply',
-+ 'start_time': '16:11:48.818932',
-+ 'result': False,
-+ 'duration': 179.439,
-+ '__run_num__': 0,
-+ 'changes': {
-+ 'ret': {
-+ 'module_|-test3_|-state.apply_|-run': {
-+ 'comment': 'Module function state.apply executed',
-+ 'name': 'state.apply',
-+ 'start_time': '16:11:48.904796',
-+ 'result': True,
-+ 'duration': 89.522,
-+ '__run_num__': 0,
-+ 'changes': {
-+ 'ret': {
-+ 'module_|-test4_|-cmd.run_|-run': {
-+ 'comment': 'Module function cmd.run executed',
-+ 'name': 'cmd.run',
-+ 'start_time': '16:11:48.988574',
-+ 'result': True,
-+ 'duration': 4.543,
-+ '__run_num__': 0,
-+ 'changes': {
-+ 'ret': 'Wed Mar 7 16:11:48 CET 2018'
-+ },
-+ '__id__': 'test4'
-+ }
-+ }
-+ },
-+ '__id__': 'test3'
-+ },
-+ 'module_|-test3_fail_|-test3_fail_|-run': {
-+ 'comment': 'Module function test3_fail is not available',
-+ 'name': 'test3_fail',
-+ 'start_time': '16:11:48.994607',
-+ 'result': False,
-+ 'duration': 0.466,
-+ '__run_num__': 1,
-+ 'changes': {},
-+ '__id__': 'test3_fail'
-+ }
-+ }
-+ },
-+ '__id__': 'test2'
-+ }
-+}
-+
-
- def _mocked_func_named(name, names=('Fred', 'Swen',)):
- '''
-@@ -140,6 +191,17 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
- if ret['comment'] != '{0}: Success'.format(CMD) or not ret['result']:
- self.fail('module.run failed: {0}'.format(ret))
-
-+ def test_run_state_apply_result_false(self):
-+ '''
-+ Tests the 'result' of module.run that calls state.apply execution module
-+ :return:
-+ '''
-+ with patch.dict(module.__salt__, {"state.apply": MagicMock(return_value=STATE_APPLY_RET)}):
-+ with patch.dict(module.__opts__, {'use_deprecated': ['module.run']}):
-+ ret = module.run(**{"name": "state.apply", 'mods': 'test2'})
-+ if ret['result']:
-+ self.fail('module.run did not report false result: {0}'.format(ret))
-+
- def test_run_unexpected_keywords(self):
- with patch.dict(module.__salt__, {CMD: _mocked_func_args}):
- with patch.dict(module.__opts__, {'use_superseded': ['module.run']}):
---
-2.16.2
-
-
diff --git a/extra-filerefs-include-files-even-if-no-refs-in-stat.patch b/extra-filerefs-include-files-even-if-no-refs-in-stat.patch
deleted file mode 100644
index 38c772f..0000000
--- a/extra-filerefs-include-files-even-if-no-refs-in-stat.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-From 544dfd7dbaa1c837b75976f15ad67159b1bdedbb Mon Sep 17 00:00:00 2001
-From: Matei Albu
-Date: Sun, 6 May 2018 11:56:18 +0200
-Subject: [PATCH] --extra-filerefs include files even if no refs in
- states to apply
-
-Fixes #47496
-(cherry picked from commit d67239a)
----
- salt/client/ssh/state.py | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/salt/client/ssh/state.py b/salt/client/ssh/state.py
-index 8fa11d031e..08d4846bb6 100644
---- a/salt/client/ssh/state.py
-+++ b/salt/client/ssh/state.py
-@@ -135,9 +135,9 @@ def lowstate_file_refs(chunks, extras=''):
- elif state.startswith('__'):
- continue
- crefs.extend(salt_refs(chunk[state]))
-+ if saltenv not in refs:
-+ refs[saltenv] = []
- if crefs:
-- if saltenv not in refs:
-- refs[saltenv] = []
- refs[saltenv].append(crefs)
- if extras:
- extra_refs = extras.split(',')
---
-2.13.6
-
-
diff --git a/fall-back-to-pymysql.patch b/fall-back-to-pymysql.patch
index 067ac2e..c135d4e 100644
--- a/fall-back-to-pymysql.patch
+++ b/fall-back-to-pymysql.patch
@@ -1,4 +1,4 @@
-From f7ba683153e11be401a5971ba029d0a3964b1ecb Mon Sep 17 00:00:00 2001
+From 9e0c0bbc1b48fa7065a9d0f50bd7111789712e2d Mon Sep 17 00:00:00 2001
From: Maximilian Meister
Date: Thu, 5 Apr 2018 13:23:23 +0200
Subject: [PATCH] fall back to PyMySQL
@@ -123,7 +123,7 @@ index 9d6aa17987..8b0a942310 100644
raise
# reconnect creating new client
diff --git a/salt/modules/mysql.py b/salt/modules/mysql.py
-index 0625b02a96..8b17e461ea 100644
+index 833a766a97..a5965f3a25 100644
--- a/salt/modules/mysql.py
+++ b/salt/modules/mysql.py
@@ -51,13 +51,14 @@ import salt.utils.stringutils
@@ -299,7 +299,7 @@ index af6698142b..85892cb06c 100644
cursor = conn.cursor()
diff --git a/tests/unit/pillar/test_mysql.py b/tests/unit/pillar/test_mysql.py
-index 8d49ac24e2..b72988673d 100644
+index a242eac1a1..f6a2d0a44b 100644
--- a/tests/unit/pillar/test_mysql.py
+++ b/tests/unit/pillar/test_mysql.py
@@ -12,7 +12,7 @@ import salt.pillar.mysql as mysql
@@ -312,6 +312,6 @@ index 8d49ac24e2..b72988673d 100644
maxDiff = None
--
-2.13.6
+2.13.7
diff --git a/feat-add-grain-for-all-fqdns.patch b/feat-add-grain-for-all-fqdns.patch
index b9315b0..f604e76 100644
--- a/feat-add-grain-for-all-fqdns.patch
+++ b/feat-add-grain-for-all-fqdns.patch
@@ -1,4 +1,4 @@
-From 0449bead92ff763d186f5e524556f82c618d652c Mon Sep 17 00:00:00 2001
+From 6e5f0fbbe3c232c7d5212d4fddfe52b5a5a71597 Mon Sep 17 00:00:00 2001
From: Michele Bologna
Date: Thu, 14 Dec 2017 18:20:02 +0100
Subject: [PATCH] Feat: add grain for all FQDNs
@@ -21,10 +21,10 @@ https://github.com/saltstack/salt/pull/45060
3 files changed, 29 insertions(+)
diff --git a/salt/grains/core.py b/salt/grains/core.py
-index b7d446676e..96b7ce2cf2 100644
+index 8545d4368c..24de3cff6b 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
-@@ -1888,6 +1888,33 @@ def append_domain():
+@@ -1886,6 +1886,33 @@ def append_domain():
return grain
@@ -59,7 +59,7 @@ index b7d446676e..96b7ce2cf2 100644
'''
Return ip address and FQDN grains
diff --git a/tests/integration/modules/test_grains.py b/tests/integration/modules/test_grains.py
-index 709f882b45..aa7bd44202 100644
+index 616e07d455..dfa70afa03 100644
--- a/tests/integration/modules/test_grains.py
+++ b/tests/integration/modules/test_grains.py
@@ -51,6 +51,7 @@ class TestModulesGrains(ModuleCase):
@@ -71,7 +71,7 @@ index 709f882b45..aa7bd44202 100644
'groupname',
'host',
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
-index 50babe3ed3..47c9cdd35b 100644
+index 54c8293dcf..616c62e658 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -7,6 +7,7 @@
@@ -83,6 +83,6 @@ index 50babe3ed3..47c9cdd35b 100644
# Import Salt Testing Libs
try:
--
-2.16.2
+2.13.7
diff --git a/fix-bsc-1065792.patch b/fix-bsc-1065792.patch
index b4184ee..1c71aa1 100644
--- a/fix-bsc-1065792.patch
+++ b/fix-bsc-1065792.patch
@@ -1,4 +1,4 @@
-From 27d0e8b7e7c1eae68ef6dc972ea0f091d18cd92e Mon Sep 17 00:00:00 2001
+From 30a4053231cf67f486ca1f430dce563f7247d963 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk
Date: Thu, 14 Dec 2017 16:21:40 +0100
Subject: [PATCH] Fix bsc#1065792
@@ -20,6 +20,6 @@ index c5bf3f2d54..a5ec426ec4 100644
return __virtualname__
else:
--
-2.16.2
+2.13.7
diff --git a/fix-cp.push-empty-file.patch b/fix-cp.push-empty-file.patch
deleted file mode 100644
index ea3a799..0000000
--- a/fix-cp.push-empty-file.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 74ca7c3fd6a42f95f9d702ef2847a1f76399db5f Mon Sep 17 00:00:00 2001
-From: Mihai Dinca
-Date: Wed, 7 Mar 2018 13:11:16 +0100
-Subject: [PATCH] Fix cp.push empty file
-
-Co-authored-by: Jochen Breuer
----
- salt/master.py | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/salt/master.py b/salt/master.py
-index fb704909c8..6fb37ece1a 100644
---- a/salt/master.py
-+++ b/salt/master.py
-@@ -1451,7 +1451,7 @@ class AESFuncs(object):
- if load['loc']:
- fp_.seek(load['loc'])
-
-- fp_.write(load['data'])
-+ fp_.write(salt.utils.stringutils.to_bytes(load['data']))
- return True
-
- def _pillar(self, load):
---
-2.16.2
-
-
diff --git a/fix-decrease-loglevel-when-unable-to-resolve-addr.patch b/fix-decrease-loglevel-when-unable-to-resolve-addr.patch
index 717b109..114e1f3 100644
--- a/fix-decrease-loglevel-when-unable-to-resolve-addr.patch
+++ b/fix-decrease-loglevel-when-unable-to-resolve-addr.patch
@@ -1,4 +1,4 @@
-From 14128fc65bf007bbb5b27b3eedec30b7f729bfbd Mon Sep 17 00:00:00 2001
+From 5d12b612b1f7b05a13e7b8da02e50ec471a72187 Mon Sep 17 00:00:00 2001
From: Michele Bologna
Date: Tue, 20 Mar 2018 19:27:36 +0100
Subject: [PATCH] Fix: decrease loglevel when unable to resolve addr
@@ -54,10 +54,10 @@ This patch changes the log level of the exception to INFO, since the resolve-una
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
-index 96b7ce2cf2..17a7d9819a 100644
+index 24de3cff6b..c166a43d7c 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
-@@ -1909,7 +1909,7 @@ def fqdns():
+@@ -1907,7 +1907,7 @@ def fqdns():
fqdns.add(socket.gethostbyaddr(ip)[0])
except (socket.error, socket.herror,
socket.gaierror, socket.timeout) as e:
@@ -67,6 +67,6 @@ index 96b7ce2cf2..17a7d9819a 100644
grains['fqdns'] = list(fqdns)
return grains
--
-2.16.2
+2.13.7
diff --git a/fix-deprecation-warning-bsc-1095507.patch b/fix-deprecation-warning-bsc-1095507.patch
new file mode 100644
index 0000000..4ba68ee
--- /dev/null
+++ b/fix-deprecation-warning-bsc-1095507.patch
@@ -0,0 +1,26 @@
+From 9289e1607ebf6f397c027d4a6edcf35c59bd600c Mon Sep 17 00:00:00 2001
+From: Mihai Dinca
+Date: Wed, 6 Jun 2018 15:47:45 +0200
+Subject: [PATCH] Fix deprecation warning (bsc#1095507)
+
+---
+ salt/utils/thin.py | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/salt/utils/thin.py b/salt/utils/thin.py
+index e4b878eb19..b99e407583 100644
+--- a/salt/utils/thin.py
++++ b/salt/utils/thin.py
+@@ -546,7 +546,7 @@ def thin_sum(cachedir, form='sha1'):
+ thintar = gen_thin(cachedir)
+ code_checksum_path = os.path.join(cachedir, 'thin', 'code-checksum')
+ if os.path.isfile(code_checksum_path):
+- with salt.utils.fopen(code_checksum_path, 'r') as fh:
++ with salt.utils.files.fopen(code_checksum_path, 'r') as fh:
+ code_checksum = "'{0}'".format(fh.read().strip())
+ else:
+ code_checksum = "'0'"
+--
+2.13.7
+
+
diff --git a/fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch b/fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch
new file mode 100644
index 0000000..4eb2a40
--- /dev/null
+++ b/fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch
@@ -0,0 +1,27 @@
+From 7bda1dcd4f14da55abe38b1739b1e46ad0f5213c Mon Sep 17 00:00:00 2001
+From: Erik Johnson
+Date: Fri, 13 Apr 2018 11:25:24 -0500
+Subject: [PATCH] Fix diffing binary files in file.get_diff (bsc#1098394)
+
+---
+ salt/modules/file.py | 3 +--
+ 1 file changed, 1 insertion(+), 2 deletions(-)
+
+diff --git a/salt/modules/file.py b/salt/modules/file.py
+index 1b4b7e0e46..95bca7fb1b 100644
+--- a/salt/modules/file.py
++++ b/salt/modules/file.py
+@@ -5008,8 +5008,7 @@ def get_diff(file1,
+ *salt.utils.data.decode(args)
+ )
+ )
+- return ret
+- return ''
++ return ret
+
+
+ def manage_file(name,
+--
+2.13.7
+
+
diff --git a/fix-for-ec2-rate-limit-failures.patch b/fix-for-ec2-rate-limit-failures.patch
index c8fd3db..e736067 100644
--- a/fix-for-ec2-rate-limit-failures.patch
+++ b/fix-for-ec2-rate-limit-failures.patch
@@ -1,4 +1,4 @@
-From b0273e150b29fb44d72246918792117b485441de Mon Sep 17 00:00:00 2001
+From 88a99b5beeaa51eaf646eb92d8f546f65f654008 Mon Sep 17 00:00:00 2001
From: Daniel Wallace
Date: Wed, 25 Apr 2018 11:13:15 -0500
Subject: [PATCH] Fix for EC2 Rate Limit Failures
@@ -61,6 +61,6 @@ index 059450e7ca..912f1466ba 100644
log.error(
--
-2.13.6
+2.13.7
diff --git a/fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch b/fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch
index 87b161e..9fce2ae 100644
--- a/fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch
+++ b/fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch
@@ -1,4 +1,4 @@
-From b9cc71639d4e918ef14635124f6991917150de46 Mon Sep 17 00:00:00 2001
+From 826194be2a036fee80d3ca546822023416ac3a7d Mon Sep 17 00:00:00 2001
From: Bo Maryniuk
Date: Wed, 21 Mar 2018 11:10:23 +0100
Subject: [PATCH] Fix for [Errno 0] Resolver Error 0 (no error)
@@ -9,14 +9,14 @@ Subject: [PATCH] Fix for [Errno 0] Resolver Error 0 (no error)
* Fix unit test for reversed fqdns return data
---
salt/grains/core.py | 19 ++++++++++++-------
- tests/unit/grains/test_core.py | 32 ++++++++++++++++++++++++++++++++
- 2 files changed, 44 insertions(+), 7 deletions(-)
+ tests/unit/grains/test_core.py | 22 ++++++++++++++++++++++
+ 2 files changed, 34 insertions(+), 7 deletions(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
-index 17a7d9819a..cd9ba1f29c 100644
+index c166a43d7c..dc472a6c0a 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
-@@ -1900,16 +1900,21 @@ def fqdns():
+@@ -1898,16 +1898,21 @@ def fqdns():
fqdns = set()
addresses = salt.utils.network.ip_addrs(include_loopback=False,
@@ -46,22 +46,13 @@ index 17a7d9819a..cd9ba1f29c 100644
grains['fqdns'] = list(fqdns)
return grains
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
-index 47c9cdd35b..c604df6c57 100644
+index 616c62e658..dd7d5b06f8 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
-@@ -784,3 +784,35 @@ SwapTotal: 4789244 kB'''
- []}}
- with patch.object(salt.utils.dns, 'parse_resolv', MagicMock(return_value=resolv_mock)):
- assert core.dns() == ret
-+
-+ def _run_dns_test(self, resolv_mock, ret):
-+ with patch.object(salt.utils, 'is_windows',
-+ MagicMock(return_value=False)):
-+ with patch.dict(core.__opts__, {'ipv6': False}):
-+ with patch.object(salt.utils.dns, 'parse_resolv',
-+ MagicMock(return_value=resolv_mock)):
-+ get_dns = core.dns()
-+ self.assertEqual(get_dns, ret)
+@@ -877,3 +877,25 @@ SwapTotal: 4789244 kB'''
+ osdata = {'kernel': 'test', }
+ ret = core._virtual(osdata)
+ self.assertEqual(ret['virtual'], virt)
+
+ @skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
+ @patch.object(salt.utils, 'is_windows', MagicMock(return_value=False))
@@ -84,8 +75,7 @@ index 47c9cdd35b..c604df6c57 100644
+ self.assertIn('fqdns', fqdns)
+ self.assertEqual(len(fqdns['fqdns']), len(ret['fqdns']))
+ self.assertEqual(set(fqdns['fqdns']), set(ret['fqdns']))
-+
--
-2.13.6
+2.13.7
diff --git a/fix-openscap-push.patch b/fix-openscap-push.patch
deleted file mode 100644
index 69b69cb..0000000
--- a/fix-openscap-push.patch
+++ /dev/null
@@ -1,72 +0,0 @@
-From 589d90117783a126dce695cf76a3b8fc2953f8b6 Mon Sep 17 00:00:00 2001
-From: Mihai Dinca
-Date: Fri, 2 Mar 2018 17:17:58 +0100
-Subject: [PATCH] Fix openscap push
-
----
- salt/modules/openscap.py | 4 +---
- tests/unit/modules/test_openscap.py | 10 +++++-----
- 2 files changed, 6 insertions(+), 8 deletions(-)
-
-diff --git a/salt/modules/openscap.py b/salt/modules/openscap.py
-index c5b51a1846..e3190e1e11 100644
---- a/salt/modules/openscap.py
-+++ b/salt/modules/openscap.py
-@@ -13,7 +13,6 @@ from subprocess import Popen, PIPE
-
- # Import Salt libs
- from salt.ext import six
--from salt.client import Caller
-
-
- ArgumentParser = object
-@@ -105,8 +104,7 @@ def xccdf(params):
- success = _OSCAP_EXIT_CODES_MAP[proc.returncode]
- returncode = proc.returncode
- if success:
-- caller = Caller()
-- caller.cmd('cp.push_dir', tempdir)
-+ __salt__['cp.push_dir'](tempdir)
- shutil.rmtree(tempdir, ignore_errors=True)
- upload_dir = tempdir
-
-diff --git a/tests/unit/modules/test_openscap.py b/tests/unit/modules/test_openscap.py
-index eb8ad1225b..6e17148de1 100644
---- a/tests/unit/modules/test_openscap.py
-+++ b/tests/unit/modules/test_openscap.py
-@@ -28,8 +28,10 @@ class OpenscapTestCase(TestCase):
- policy_file = '/usr/share/openscap/policy-file-xccdf.xml'
-
- def setUp(self):
-+ import salt.modules.openscap
-+ salt.modules.openscap.__salt__ = MagicMock()
- patchers = [
-- patch('salt.modules.openscap.Caller', MagicMock()),
-+ patch('salt.modules.openscap.__salt__', MagicMock()),
- patch('salt.modules.openscap.shutil.rmtree', Mock()),
- patch(
- 'salt.modules.openscap.tempfile.mkdtemp',
-@@ -68,8 +70,7 @@ class OpenscapTestCase(TestCase):
- cwd=openscap.tempfile.mkdtemp.return_value,
- stderr=PIPE,
- stdout=PIPE)
-- openscap.Caller().cmd.assert_called_once_with(
-- 'cp.push_dir', self.random_temp_dir)
-+ openscap.__salt__['cp.push_dir'].assert_called_once_with(self.random_temp_dir)
- self.assertEqual(openscap.shutil.rmtree.call_count, 1)
- self.assertEqual(
- response,
-@@ -106,8 +107,7 @@ class OpenscapTestCase(TestCase):
- cwd=openscap.tempfile.mkdtemp.return_value,
- stderr=PIPE,
- stdout=PIPE)
-- openscap.Caller().cmd.assert_called_once_with(
-- 'cp.push_dir', self.random_temp_dir)
-+ openscap.__salt__['cp.push_dir'].assert_called_once_with(self.random_temp_dir)
- self.assertEqual(openscap.shutil.rmtree.call_count, 1)
- self.assertEqual(
- response,
---
-2.16.2
-
-
diff --git a/fix-unboundlocalerror-in-file.get_diff.patch b/fix-unboundlocalerror-in-file.get_diff.patch
new file mode 100644
index 0000000..5720bcf
--- /dev/null
+++ b/fix-unboundlocalerror-in-file.get_diff.patch
@@ -0,0 +1,32 @@
+From 854ffed98894b8aa8b51973c0ba13fb75093e715 Mon Sep 17 00:00:00 2001
+From: Erik Johnson
+Date: Mon, 30 Apr 2018 10:25:40 -0500
+Subject: [PATCH] Fix UnboundLocalError in file.get_diff
+
+This was only in 2018.3 head and not part of a release
+
+Add unit test for file.get_diff
+
+Use a lambda instead of defining a one-line function
+---
+ salt/modules/file.py | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/salt/modules/file.py b/salt/modules/file.py
+index 95bca7fb1b..1b4b7e0e46 100644
+--- a/salt/modules/file.py
++++ b/salt/modules/file.py
+@@ -5008,7 +5008,8 @@ def get_diff(file1,
+ *salt.utils.data.decode(args)
+ )
+ )
+- return ret
++ return ret
++ return ''
+
+
+ def manage_file(name,
+--
+2.13.7
+
+
diff --git a/fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch b/fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch
new file mode 100644
index 0000000..3aa3ba5
--- /dev/null
+++ b/fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch
@@ -0,0 +1,311 @@
+From 997edb18b81d73933324b299da36474c24630b42 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
+
+Date: Mon, 25 Jun 2018 13:06:40 +0100
+Subject: [PATCH] Fix zypper.list_pkgs to be aligned with pkg state
+
+Handle packages with multiple version properly with zypper
+
+Add unit test coverage for multiple version packages on Zypper
+
+Fix '_find_remove_targets' after aligning Zypper with pkg state
+---
+ salt/modules/zypper.py | 57 +++++++++++++---------
+ salt/states/pkg.py | 21 --------
+ tests/unit/modules/test_zypper.py | 100 +++++++++++++++++++++++++-------------
+ 3 files changed, 102 insertions(+), 76 deletions(-)
+
+diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
+index 05ba3d86c9..4689f84926 100644
+--- a/salt/modules/zypper.py
++++ b/salt/modules/zypper.py
+@@ -38,6 +38,7 @@ import salt.utils.files
+ import salt.utils.functools
+ import salt.utils.path
+ import salt.utils.pkg
++import salt.utils.pkg.rpm
+ import salt.utils.stringutils
+ import salt.utils.systemd
+ from salt.utils.versions import LooseVersion
+@@ -714,24 +715,44 @@ def list_pkgs(versions_as_list=False, **kwargs):
+ contextkey = 'pkg.list_pkgs'
+
+ if contextkey not in __context__:
+-
+- cmd = ['rpm', '-qa', '--queryformat', (
+- "%{NAME}_|-%{VERSION}_|-%{RELEASE}_|-%{ARCH}_|-"
+- "%|EPOCH?{%{EPOCH}}:{}|_|-%{INSTALLTIME}\\n")]
+ ret = {}
+- for line in __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False).splitlines():
+- name, pkgver, rel, arch, epoch, install_time = line.split('_|-')
+- install_date = datetime.datetime.utcfromtimestamp(int(install_time)).isoformat() + "Z"
+- install_date_time_t = int(install_time)
+-
+- all_attr = {'epoch': epoch, 'version': pkgver, 'release': rel, 'arch': arch,
+- 'install_date': install_date, 'install_date_time_t': install_date_time_t}
+- __salt__['pkg_resource.add_pkg'](ret, name, all_attr)
++ cmd = ['rpm', '-qa', '--queryformat',
++ salt.utils.pkg.rpm.QUERYFORMAT.replace('%{REPOID}', '(none)') + '\n']
++ output = __salt__['cmd.run'](cmd,
++ python_shell=False,
++ output_loglevel='trace')
++ for line in output.splitlines():
++ pkginfo = salt.utils.pkg.rpm.parse_pkginfo(
++ line,
++ osarch=__grains__['osarch']
++ )
++ if pkginfo is not None:
++ # see rpm version string rules available at https://goo.gl/UGKPNd
++ pkgver = pkginfo.version
++ epoch = ''
++ release = ''
++ if ':' in pkgver:
++ epoch, pkgver = pkgver.split(":", 1)
++ if '-' in pkgver:
++ pkgver, release = pkgver.split("-", 1)
++ all_attr = {
++ 'epoch': epoch,
++ 'version': pkgver,
++ 'release': release,
++ 'arch': pkginfo.arch,
++ 'install_date': pkginfo.install_date,
++ 'install_date_time_t': pkginfo.install_date_time_t
++ }
++ __salt__['pkg_resource.add_pkg'](ret, pkginfo.name, all_attr)
+
++ _ret = {}
+ for pkgname in ret:
+- ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version'])
++ # Filter out GPG public keys packages
++ if pkgname.startswith('gpg-pubkey'):
++ continue
++ _ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version'])
+
+- __context__[contextkey] = ret
++ __context__[contextkey] = _ret
+
+ return __salt__['pkg_resource.format_pkg_list'](
+ __context__[contextkey],
+@@ -1350,14 +1371,6 @@ def install(name=None,
+
+ _clean_cache()
+ new = list_pkgs(attr=diff_attr) if not downloadonly else list_downloaded()
+-
+- # Handle packages which report multiple new versions
+- # (affects only kernel packages at this point)
+- for pkg_name in new:
+- pkg_data = new[pkg_name]
+- if isinstance(pkg_data, six.string_types):
+- new[pkg_name] = pkg_data.split(',')[-1]
+-
+ ret = salt.utils.data.compare_dicts(old, new)
+
+ if errors:
+diff --git a/salt/states/pkg.py b/salt/states/pkg.py
+index ed405cb6b5..aad87e3278 100644
+--- a/salt/states/pkg.py
++++ b/salt/states/pkg.py
+@@ -415,16 +415,6 @@ def _find_remove_targets(name=None,
+
+ if __grains__['os'] == 'FreeBSD' and origin:
+ cver = [k for k, v in six.iteritems(cur_pkgs) if v['origin'] == pkgname]
+- elif __grains__['os_family'] == 'Suse':
+- # On SUSE systems. Zypper returns packages without "arch" in name
+- try:
+- namepart, archpart = pkgname.rsplit('.', 1)
+- except ValueError:
+- cver = cur_pkgs.get(pkgname, [])
+- else:
+- if archpart in salt.utils.pkg.rpm.ARCHES + ("noarch",):
+- pkgname = namepart
+- cver = cur_pkgs.get(pkgname, [])
+ else:
+ cver = cur_pkgs.get(pkgname, [])
+
+@@ -854,17 +844,6 @@ def _verify_install(desired, new_pkgs, ignore_epoch=False, new_caps=None):
+ cver = new_pkgs.get(pkgname.split('%')[0])
+ elif __grains__['os_family'] == 'Debian':
+ cver = new_pkgs.get(pkgname.split('=')[0])
+- elif __grains__['os_family'] == 'Suse':
+- # On SUSE systems. Zypper returns packages without "arch" in name
+- try:
+- namepart, archpart = pkgname.rsplit('.', 1)
+- except ValueError:
+- cver = new_pkgs.get(pkgname)
+- else:
+- if archpart in salt.utils.pkg.rpm.ARCHES + ("noarch",):
+- cver = new_pkgs.get(namepart)
+- else:
+- cver = new_pkgs.get(pkgname)
+ else:
+ cver = new_pkgs.get(pkgname)
+ if not cver and pkgname in new_caps:
+diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py
+index 6eccee568b..bb15aca11a 100644
+--- a/tests/unit/modules/test_zypper.py
++++ b/tests/unit/modules/test_zypper.py
+@@ -475,7 +475,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
+ with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[
+ {"kernel-default": "3.12.49-11.1"}, {"kernel-default": "3.12.49-11.1,3.12.51-60.20.2"}])):
+ ret = zypper.install('kernel-default', '--auto-agree-with-licenses')
+- self.assertDictEqual(ret, {"kernel-default": {"old": "3.12.49-11.1", "new": "3.12.51-60.20.2"}})
++ self.assertDictEqual(ret, {"kernel-default": {"old": "3.12.49-11.1", "new": "3.12.49-11.1,3.12.51-60.20.2"}})
+
+ def test_upgrade_failure(self):
+ '''
+@@ -540,27 +540,36 @@ Repository 'DUMMY' not found by its alias, number, or URI.
+ data.setdefault(key, []).append(value)
+
+ rpm_out = [
+- 'protobuf-java_|-2.6.1_|-3.1.develHead_|-noarch_|-_|-1499257756',
+- 'yast2-ftp-server_|-3.1.8_|-8.1_|-x86_64_|-_|-1499257798',
+- 'jose4j_|-0.4.4_|-2.1.develHead_|-noarch_|-_|-1499257756',
+- 'apache-commons-cli_|-1.2_|-1.233_|-noarch_|-_|-1498636510',
+- 'jakarta-commons-discovery_|-0.4_|-129.686_|-noarch_|-_|-1498636511',
+- 'susemanager-build-keys-web_|-12.0_|-5.1.develHead_|-noarch_|-_|-1498636510',
++ 'protobuf-java_|-(none)_|-2.6.1_|-3.1.develHead_|-noarch_|-(none)_|-1499257756',
++ 'yast2-ftp-server_|-(none)_|-3.1.8_|-8.1_|-x86_64_|-(none)_|-1499257798',
++ 'jose4j_|-(none)_|-0.4.4_|-2.1.develHead_|-noarch_|-(none)_|-1499257756',
++ 'apache-commons-cli_|-(none)_|-1.2_|-1.233_|-noarch_|-(none)_|-1498636510',
++ 'jakarta-commons-discovery_|-(none)_|-0.4_|-129.686_|-noarch_|-(none)_|-1498636511',
++ 'susemanager-build-keys-web_|-(none)_|-12.0_|-5.1.develHead_|-noarch_|-(none)_|-1498636510',
++ 'gpg-pubkey_|-(none)_|-39db7c82_|-5847eb1f_|-(none)_|-(none)_|-1519203802',
++ 'gpg-pubkey_|-(none)_|-8a7c64f9_|-5aaa93ca_|-(none)_|-(none)_|-1529925595',
++ 'kernel-default_|-(none)_|-4.4.138_|-94.39.1_|-x86_64_|-(none)_|-1529936067',
++ 'kernel-default_|-(none)_|-4.4.73_|-5.1_|-x86_64_|-(none)_|-1503572639',
++ 'perseus-dummy_|-(none)_|-1.1_|-1.1_|-i586_|-(none)_|-1529936062',
+ ]
+- with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
++ with patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \
++ patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
+ patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \
+ patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
+ patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}):
+ pkgs = zypper.list_pkgs(versions_as_list=True)
++ self.assertFalse(pkgs.get('gpg-pubkey', False))
+ for pkg_name, pkg_version in {
+- 'jakarta-commons-discovery': '0.4-129.686',
+- 'yast2-ftp-server': '3.1.8-8.1',
+- 'protobuf-java': '2.6.1-3.1.develHead',
+- 'susemanager-build-keys-web': '12.0-5.1.develHead',
+- 'apache-commons-cli': '1.2-1.233',
+- 'jose4j': '0.4.4-2.1.develHead'}.items():
++ 'jakarta-commons-discovery': ['0.4-129.686'],
++ 'yast2-ftp-server': ['3.1.8-8.1'],
++ 'protobuf-java': ['2.6.1-3.1.develHead'],
++ 'susemanager-build-keys-web': ['12.0-5.1.develHead'],
++ 'apache-commons-cli': ['1.2-1.233'],
++ 'kernel-default': ['4.4.138-94.39.1', '4.4.73-5.1'],
++ 'perseus-dummy.i586': ['1.1-1.1'],
++ 'jose4j': ['0.4.4-2.1.develHead']}.items():
+ self.assertTrue(pkgs.get(pkg_name))
+- self.assertEqual(pkgs[pkg_name], [pkg_version])
++ self.assertEqual(pkgs[pkg_name], pkg_version)
+
+ def test_list_pkgs_with_attr(self):
+ '''
+@@ -572,57 +581,82 @@ Repository 'DUMMY' not found by its alias, number, or URI.
+ data.setdefault(key, []).append(value)
+
+ rpm_out = [
+- 'protobuf-java_|-2.6.1_|-3.1.develHead_|-noarch_|-_|-1499257756',
+- 'yast2-ftp-server_|-3.1.8_|-8.1_|-x86_64_|-_|-1499257798',
+- 'jose4j_|-0.4.4_|-2.1.develHead_|-noarch_|-_|-1499257756',
+- 'apache-commons-cli_|-1.2_|-1.233_|-noarch_|-_|-1498636510',
+- 'jakarta-commons-discovery_|-0.4_|-129.686_|-noarch_|-_|-1498636511',
+- 'susemanager-build-keys-web_|-12.0_|-5.1.develHead_|-noarch_|-_|-1498636510',
++ 'protobuf-java_|-(none)_|-2.6.1_|-3.1.develHead_|-noarch_|-(none)_|-1499257756',
++ 'yast2-ftp-server_|-(none)_|-3.1.8_|-8.1_|-x86_64_|-(none)_|-1499257798',
++ 'jose4j_|-(none)_|-0.4.4_|-2.1.develHead_|-noarch_|-(none)_|-1499257756',
++ 'apache-commons-cli_|-(none)_|-1.2_|-1.233_|-noarch_|-(none)_|-1498636510',
++ 'jakarta-commons-discovery_|-(none)_|-0.4_|-129.686_|-noarch_|-(none)_|-1498636511',
++ 'susemanager-build-keys-web_|-(none)_|-12.0_|-5.1.develHead_|-noarch_|-(none)_|-1498636510',
++ 'gpg-pubkey_|-(none)_|-39db7c82_|-5847eb1f_|-(none)_|-(none)_|-1519203802',
++ 'gpg-pubkey_|-(none)_|-8a7c64f9_|-5aaa93ca_|-(none)_|-(none)_|-1529925595',
++ 'kernel-default_|-(none)_|-4.4.138_|-94.39.1_|-x86_64_|-(none)_|-1529936067',
++ 'kernel-default_|-(none)_|-4.4.73_|-5.1_|-x86_64_|-(none)_|-1503572639',
++ 'perseus-dummy_|-(none)_|-1.1_|-1.1_|-i586_|-(none)_|-1529936062',
+ ]
+ with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
++ patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \
+ patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \
+ patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
+ patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}):
+ pkgs = zypper.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
++ self.assertFalse(pkgs.get('gpg-pubkey', False))
+ for pkg_name, pkg_attr in {
+- 'jakarta-commons-discovery': {
++ 'jakarta-commons-discovery': [{
+ 'version': '0.4',
+ 'release': '129.686',
+ 'arch': 'noarch',
+ 'install_date_time_t': 1498636511,
+- },
+- 'yast2-ftp-server': {
++ }],
++ 'yast2-ftp-server': [{
+ 'version': '3.1.8',
+ 'release': '8.1',
+ 'arch': 'x86_64',
+ 'install_date_time_t': 1499257798,
+- },
+- 'protobuf-java': {
++ }],
++ 'protobuf-java': [{
+ 'version': '2.6.1',
+ 'release': '3.1.develHead',
+ 'install_date_time_t': 1499257756,
+ 'arch': 'noarch',
+- },
+- 'susemanager-build-keys-web': {
++ }],
++ 'susemanager-build-keys-web': [{
+ 'version': '12.0',
+ 'release': '5.1.develHead',
+ 'arch': 'noarch',
+ 'install_date_time_t': 1498636510,
+- },
+- 'apache-commons-cli': {
++ }],
++ 'apache-commons-cli': [{
+ 'version': '1.2',
+ 'release': '1.233',
+ 'arch': 'noarch',
+ 'install_date_time_t': 1498636510,
++ }],
++ 'kernel-default': [{
++ 'version': '4.4.138',
++ 'release': '94.39.1',
++ 'arch': 'x86_64',
++ 'install_date_time_t': 1529936067
+ },
+- 'jose4j': {
++ {
++ 'version': '4.4.73',
++ 'release': '5.1',
++ 'arch': 'x86_64',
++ 'install_date_time_t': 1503572639,
++ }],
++ 'perseus-dummy.i586': [{
++ 'version': '1.1',
++ 'release': '1.1',
++ 'arch': 'i586',
++ 'install_date_time_t': 1529936062,
++ }],
++ 'jose4j': [{
+ 'arch': 'noarch',
+ 'version': '0.4.4',
+ 'release': '2.1.develHead',
+ 'install_date_time_t': 1499257756,
+- }}.items():
++ }]}.items():
+ self.assertTrue(pkgs.get(pkg_name))
+- self.assertEqual(pkgs[pkg_name], [pkg_attr])
++ self.assertEqual(pkgs[pkg_name], pkg_attr)
+
+ def test_list_patches(self):
+ '''
+--
+2.13.7
+
+
diff --git a/fixed-usage-of-ipaddress.patch b/fixed-usage-of-ipaddress.patch
index b89affc..1e71632 100644
--- a/fixed-usage-of-ipaddress.patch
+++ b/fixed-usage-of-ipaddress.patch
@@ -1,4 +1,4 @@
-From 205e031f6e5552ac860120f7ac852e24c5da73e5 Mon Sep 17 00:00:00 2001
+From 8ae052fbb07d7506492b430579308e4611e51754 Mon Sep 17 00:00:00 2001
From: Jochen Breuer
Date: Sun, 22 Apr 2018 23:11:11 +0200
Subject: [PATCH] Fixed usage of ipaddress
@@ -23,6 +23,6 @@ index f188fd7954..92893572a6 100644
ret['netmask'] = six.text_type(network_info.netmask)
return ret
--
-2.13.6
+2.13.7
diff --git a/initialize-__context__-retcode-for-functions-handled.patch b/initialize-__context__-retcode-for-functions-handled.patch
deleted file mode 100644
index b326f74..0000000
--- a/initialize-__context__-retcode-for-functions-handled.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From c374feb62af75dfe18e8c81fb9cb556d678487ce Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
-
-Date: Tue, 24 Apr 2018 13:50:49 +0100
-Subject: [PATCH] Initialize __context__ retcode for functions handled
- via schedule util module
-
----
- salt/utils/schedule.py | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/salt/utils/schedule.py b/salt/utils/schedule.py
-index de057477a3..6cb3ce0ef8 100644
---- a/salt/utils/schedule.py
-+++ b/salt/utils/schedule.py
-@@ -701,6 +701,7 @@ class Schedule(object):
- for global_key, value in six.iteritems(func_globals):
- self.functions[mod_name].__globals__[global_key] = value
-
-+ self.functions.pack['__context__']['retcode'] = 0
- ret['return'] = self.functions[func](*args, **kwargs)
-
- if not self.standalone:
---
-2.15.1
-
-
diff --git a/make-it-possible-to-use-login-pull-and-push-from-mod.patch b/make-it-possible-to-use-login-pull-and-push-from-mod.patch
deleted file mode 100644
index 8d5d401..0000000
--- a/make-it-possible-to-use-login-pull-and-push-from-mod.patch
+++ /dev/null
@@ -1,119 +0,0 @@
-From d0b7808f63a32c15249a8adbed048859dfac21a8 Mon Sep 17 00:00:00 2001
-From: Michael Calmer
-Date: Thu, 22 Mar 2018 08:56:58 +0100
-Subject: [PATCH] make it possible to use login, pull and push from
- module.run and detect errors
-
-when using state.apply module.run doing docker operations retcode
-is tracked to find out if the call was successful or not.
-
-add unit test for failed login
----
- salt/modules/dockermod.py | 14 ++++++++++----
- tests/unit/modules/test_dockermod.py | 20 ++++++++++++++++++++
- 2 files changed, 30 insertions(+), 4 deletions(-)
-
-diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py
-index 23cff8806b..c20b73452b 100644
---- a/salt/modules/dockermod.py
-+++ b/salt/modules/dockermod.py
-@@ -1354,7 +1354,7 @@ def login(*registries):
- # information is added to the config.json, since docker-py isn't designed
- # to do so.
- registry_auth = __pillar__.get('docker-registries', {})
-- ret = {}
-+ ret = {'retcode': 0}
- errors = ret.setdefault('Errors', [])
- if not isinstance(registry_auth, dict):
- errors.append('\'docker-registries\' Pillar value must be a dictionary')
-@@ -1412,6 +1412,8 @@ def login(*registries):
- errors.append(login_cmd['stderr'])
- elif login_cmd['stdout']:
- errors.append(login_cmd['stdout'])
-+ if errors:
-+ ret['retcode'] = 1
- return ret
-
-
-@@ -4490,7 +4492,7 @@ def pull(image,
-
- time_started = time.time()
- response = _client_wrapper('pull', image, **kwargs)
-- ret = {'Time_Elapsed': time.time() - time_started}
-+ ret = {'Time_Elapsed': time.time() - time_started, 'retcode': 0}
- _clear_context()
-
- if not response:
-@@ -4523,6 +4525,7 @@ def pull(image,
-
- if errors:
- ret['Errors'] = errors
-+ ret['retcode'] = 1
- return ret
-
-
-@@ -4585,7 +4588,7 @@ def push(image,
-
- time_started = time.time()
- response = _client_wrapper('push', image, **kwargs)
-- ret = {'Time_Elapsed': time.time() - time_started}
-+ ret = {'Time_Elapsed': time.time() - time_started, 'retcode': 0}
- _clear_context()
-
- if not response:
-@@ -4617,6 +4620,7 @@ def push(image,
-
- if errors:
- ret['Errors'] = errors
-+ ret['retcode'] = 1
- return ret
-
-
-@@ -4688,9 +4692,11 @@ def rmi(*names, **kwargs):
-
- _clear_context()
- ret = {'Layers': [x for x in pre_images if x not in images(all=True)],
-- 'Tags': [x for x in pre_tags if x not in list_tags()]}
-+ 'Tags': [x for x in pre_tags if x not in list_tags()],
-+ 'retcode': 0}
- if errors:
- ret['Errors'] = errors
-+ ret['retcode'] = 1
- return ret
-
-
-diff --git a/tests/unit/modules/test_dockermod.py b/tests/unit/modules/test_dockermod.py
-index 4e061ce369..77c4bcfb85 100644
---- a/tests/unit/modules/test_dockermod.py
-+++ b/tests/unit/modules/test_dockermod.py
-@@ -64,6 +64,26 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
- '''
- docker_mod.__context__.pop('docker.client', None)
-
-+ def test_failed_login(self):
-+ '''
-+ Check that when docker.login failed a retcode other then 0
-+ is part of the return.
-+ '''
-+ client = Mock()
-+ get_client_mock = MagicMock(return_value=client)
-+ ref_out = {
-+ 'stdout': '',
-+ 'stderr': 'login failed',
-+ 'retcode': 1
-+ }
-+ with patch.dict(docker_mod.__pillar__, {'docker-registries': {'portus.example.com:5000':
-+ {'username': 'admin', 'password': 'linux12345', 'email': 'tux@example.com'}}}):
-+ with patch.object(docker_mod, '_get_client', get_client_mock):
-+ with patch.dict(docker_mod.__salt__, {'cmd.run_all': MagicMock(return_value=ref_out)}):
-+ ret = docker_mod.login('portus.example.com:5000')
-+ self.assertTrue('retcode' in ret)
-+ self.assertTrue(ret['retcode'] > 0)
-+
- def test_ps_with_host_true(self):
- '''
- Check that docker.ps called with host is ``True``,
---
-2.16.2
-
-
diff --git a/move-log_file-option-to-changeable-defaults.patch b/move-log_file-option-to-changeable-defaults.patch
deleted file mode 100644
index 849089c..0000000
--- a/move-log_file-option-to-changeable-defaults.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-From f77ae8d0426e551d6249b097850da0ed4ff7276d Mon Sep 17 00:00:00 2001
-From: Michael Calmer
-Date: Sun, 11 Feb 2018 19:15:27 +0100
-Subject: [PATCH] move log_file option to changeable defaults
-
----
- salt/client/ssh/__init__.py | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
-index 3dd46899e3..f1c1ad9a22 100644
---- a/salt/client/ssh/__init__.py
-+++ b/salt/client/ssh/__init__.py
-@@ -884,6 +884,7 @@ class Single(object):
- # Pre apply changeable defaults
- self.minion_opts = {
- 'grains_cache': True,
-+ 'log_file': 'salt-call.log',
- }
- self.minion_opts.update(opts.get('ssh_minion_opts', {}))
- if minion_opts is not None:
-@@ -893,7 +894,6 @@ class Single(object):
- 'root_dir': os.path.join(self.thin_dir, 'running_data'),
- 'id': self.id,
- 'sock_dir': '/',
-- 'log_file': 'salt-call.log',
- 'fileserver_list_cache_time': 3,
- })
- self.minion_config = salt.serializers.yaml.serialize(self.minion_opts)
---
-2.16.2
-
-
diff --git a/option-to-merge-current-pillar-with-opts-pillar-duri.patch b/option-to-merge-current-pillar-with-opts-pillar-duri.patch
index 49f64d7..cef690c 100644
--- a/option-to-merge-current-pillar-with-opts-pillar-duri.patch
+++ b/option-to-merge-current-pillar-with-opts-pillar-duri.patch
@@ -1,4 +1,4 @@
-From 0cfa4f2a1cf559f87286069691a2766cb24f6076 Mon Sep 17 00:00:00 2001
+From e8c1b2c5a8af5cc6f4551918f695d1463a6eb584 Mon Sep 17 00:00:00 2001
From: Matei Albu
Date: Sun, 6 May 2018 21:15:58 +0200
Subject: [PATCH] Option to merge current pillar with opts['pillar']
@@ -13,7 +13,7 @@ Fixes #47501
3 files changed, 38 insertions(+), 1 deletion(-)
diff --git a/doc/ref/configuration/minion.rst b/doc/ref/configuration/minion.rst
-index 9683a0a20a..75ad26c723 100644
+index c9010a702b..d9823b78d8 100644
--- a/doc/ref/configuration/minion.rst
+++ b/doc/ref/configuration/minion.rst
@@ -3219,3 +3219,31 @@ URL of the repository:
@@ -49,10 +49,10 @@ index 9683a0a20a..75ad26c723 100644
+If set to ``False`` only the overriding pillar data will be available
+to the ``showpillar`` state.
diff --git a/salt/config/__init__.py b/salt/config/__init__.py
-index b3de3820b0..82d3dfa07f 100644
+index 432364b201..feda0abac1 100644
--- a/salt/config/__init__.py
+++ b/salt/config/__init__.py
-@@ -983,6 +983,7 @@ VALID_OPTS = {
+@@ -989,6 +989,7 @@ VALID_OPTS = {
'ssh_identities_only': bool,
'ssh_log_file': six.string_types,
'ssh_config_file': six.string_types,
@@ -60,7 +60,7 @@ index b3de3820b0..82d3dfa07f 100644
# Enable ioflo verbose logging. Warning! Very verbose!
'ioflo_verbose': int,
-@@ -1476,6 +1477,7 @@ DEFAULT_MINION_OPTS = {
+@@ -1485,6 +1486,7 @@ DEFAULT_MINION_OPTS = {
},
'discovery': False,
'schedule': {},
@@ -68,7 +68,7 @@ index b3de3820b0..82d3dfa07f 100644
}
DEFAULT_MASTER_OPTS = {
-@@ -2078,7 +2080,7 @@ def _validate_ssh_minion_opts(opts):
+@@ -2089,7 +2091,7 @@ def _validate_ssh_minion_opts(opts):
for opt_name in list(ssh_minion_opts):
if re.match('^[a-z0-9]+fs_', opt_name, flags=re.IGNORECASE) \
@@ -78,10 +78,10 @@ index b3de3820b0..82d3dfa07f 100644
log.warning(
'\'%s\' is not a valid ssh_minion_opts parameter, ignoring',
diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py
-index 388b774434..5940b7c105 100644
+index fc1e34f75d..fc3ce0a5c0 100644
--- a/salt/pillar/__init__.py
+++ b/salt/pillar/__init__.py
-@@ -993,6 +993,13 @@ class Pillar(object):
+@@ -1014,6 +1014,13 @@ class Pillar(object):
mopts['file_roots'] = self.actual_file_roots
mopts['saltversion'] = __version__
pillar['master'] = mopts
@@ -96,6 +96,6 @@ index 388b774434..5940b7c105 100644
for error in errors:
log.critical('Pillar render error: %s', error)
--
-2.13.6
+2.13.7
diff --git a/prevent-zypper-from-parsing-repo-configuration-from-.patch b/prevent-zypper-from-parsing-repo-configuration-from-.patch
new file mode 100644
index 0000000..e511dc7
--- /dev/null
+++ b/prevent-zypper-from-parsing-repo-configuration-from-.patch
@@ -0,0 +1,28 @@
+From d282de5c59e27c17bd5afb207c4eeaa754993368 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
+
+Date: Tue, 22 May 2018 12:04:48 +0100
+Subject: [PATCH] Prevent zypper from parsing repo configuration from not
+ .repo files
+
+---
+ salt/modules/zypper.py | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
+index 06f8335c18..05ba3d86c9 100644
+--- a/salt/modules/zypper.py
++++ b/salt/modules/zypper.py
+@@ -862,7 +862,7 @@ def _get_configured_repos():
+ '''
+
+ repos_cfg = configparser.ConfigParser()
+- repos_cfg.read([REPOS + '/' + fname for fname in os.listdir(REPOS)])
++ repos_cfg.read([REPOS + '/' + fname for fname in os.listdir(REPOS) if fname.endswith(".repo")])
+
+ return repos_cfg
+
+--
+2.13.7
+
+
diff --git a/provide-kwargs-to-pkg_resource.parse_targets-require.patch b/provide-kwargs-to-pkg_resource.parse_targets-require.patch
deleted file mode 100644
index 139352d..0000000
--- a/provide-kwargs-to-pkg_resource.parse_targets-require.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From f7af1739a5795de6f98cfe2856372c755711e6dc Mon Sep 17 00:00:00 2001
-From: Michael Calmer
-Date: Wed, 18 Apr 2018 17:19:18 +0200
-Subject: [PATCH] provide kwargs to pkg_resource.parse_targets required
- to detect advisory type
-
-fix invalid string compare
----
- salt/modules/yumpkg.py | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
-index 39abb77fbc..9eb27e7701 100644
---- a/salt/modules/yumpkg.py
-+++ b/salt/modules/yumpkg.py
-@@ -1322,7 +1322,7 @@ def install(name=None,
-
- try:
- pkg_params, pkg_type = __salt__['pkg_resource.parse_targets'](
-- name, pkgs, sources, saltenv=saltenv, normalize=normalize
-+ name, pkgs, sources, saltenv=saltenv, normalize=normalize, **kwargs
- )
- except MinionError as exc:
- raise CommandExecutionError(exc)
-@@ -1620,7 +1620,7 @@ def install(name=None,
- if _yum() == 'dnf':
- cmd.extend(['--best', '--allowerasing'])
- _add_common_args(cmd)
-- cmd.append('install' if pkg_type is not 'advisory' else 'update')
-+ cmd.append('install' if pkg_type != 'advisory' else 'update')
- cmd.extend(targets)
- out = __salt__['cmd.run_all'](
- cmd,
---
-2.15.1
-
-
diff --git a/remove-obsolete-unicode-handling-in-pkg.info_install.patch b/remove-obsolete-unicode-handling-in-pkg.info_install.patch
deleted file mode 100644
index 47e4bee..0000000
--- a/remove-obsolete-unicode-handling-in-pkg.info_install.patch
+++ /dev/null
@@ -1,46 +0,0 @@
-From dc262b912c63ed0d3152a01c9eaaa3ec3f8e0f7e Mon Sep 17 00:00:00 2001
-From: Mihai Dinca
-Date: Tue, 13 Feb 2018 16:11:20 +0100
-Subject: [PATCH] Remove obsolete unicode handling in pkg.info_installed
-
----
- salt/modules/zypper.py | 15 +++++----------
- 1 file changed, 5 insertions(+), 10 deletions(-)
-
-diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
-index 51d01c3fc9..16fc877684 100644
---- a/salt/modules/zypper.py
-+++ b/salt/modules/zypper.py
-@@ -309,7 +309,11 @@ class _Zypper(object):
- if self.error_msg and not self.__no_raise and not self.__ignore_repo_failure:
- raise CommandExecutionError('Zypper command failure: {0}'.format(self.error_msg))
-
-- return self._is_xml_mode() and dom.parseString(self.__call_result['stdout']) or self.__call_result['stdout']
-+ return (
-+ self._is_xml_mode() and
-+ dom.parseString(salt.utils.stringutils.to_str(self.__call_result['stdout'])) or
-+ self.__call_result['stdout']
-+ )
-
-
- __zypper__ = _Zypper()
-@@ -482,15 +486,6 @@ def info_installed(*names, **kwargs):
- t_nfo = dict()
- # Translate dpkg-specific keys to a common structure
- for key, value in six.iteritems(pkg_nfo):
-- if isinstance(value, six.string_types):
-- # Check, if string is encoded in a proper UTF-8
-- if six.PY3:
-- value_ = value.encode('UTF-8', 'ignore').decode('UTF-8', 'ignore')
-- else:
-- value_ = value.decode('UTF-8', 'ignore').encode('UTF-8', 'ignore')
-- if value != value_:
-- value = kwargs.get('errors', 'ignore') == 'ignore' and value_ or 'N/A (invalid UTF-8)'
-- log.error('Package %s has bad UTF-8 code in %s: %s', pkg_name, key, value)
- if key == 'source_rpm':
- t_nfo['source'] = value
- else:
---
-2.16.2
-
-
diff --git a/remove-old-hack-when-reporting-multiversion-packages.patch b/remove-old-hack-when-reporting-multiversion-packages.patch
new file mode 100644
index 0000000..7e3e5fc
--- /dev/null
+++ b/remove-old-hack-when-reporting-multiversion-packages.patch
@@ -0,0 +1,51 @@
+From 11186ce52ae42967c49a6e238659a566e488a6b4 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
+
+Date: Mon, 23 Jul 2018 16:32:26 +0100
+Subject: [PATCH] Remove old hack when reporting multiversion packages
+
+Fix unit tests for zypper pkg.upgrade
+---
+ salt/modules/zypper.py | 5 -----
+ tests/unit/modules/test_zypper.py | 8 +++++++-
+ 2 files changed, 7 insertions(+), 6 deletions(-)
+
+diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
+index 4689f84926..695bce4f4e 100644
+--- a/salt/modules/zypper.py
++++ b/salt/modules/zypper.py
+@@ -1480,11 +1480,6 @@ def upgrade(refresh=True,
+ __zypper__(systemd_scope=_systemd_scope()).noraise.call(*cmd_update)
+ _clean_cache()
+ new = list_pkgs()
+-
+- # Handle packages which report multiple new versions
+- # (affects only kernel packages at this point)
+- for pkg in new:
+- new[pkg] = new[pkg].split(',')[-1]
+ ret = salt.utils.data.compare_dicts(old, new)
+
+ if __zypper__.exit_code not in __zypper__.SUCCESS_EXIT_CODES:
+diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py
+index bb15aca11a..424438c8bf 100644
+--- a/tests/unit/modules/test_zypper.py
++++ b/tests/unit/modules/test_zypper.py
+@@ -429,7 +429,13 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
+ zypper_mock.assert_any_call('update', '--auto-agree-with-licenses')
+
+ with patch('salt.modules.zypper.list_pkgs',
+- MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1,1.2"}])):
++ MagicMock(side_effect=[{"kernel-default": "1.1"}, {"kernel-default": "1.1,1.2"}])):
++ ret = zypper.upgrade()
++ self.assertDictEqual(ret, {"kernel-default": {"old": "1.1", "new": "1.1,1.2"}})
++ zypper_mock.assert_any_call('update', '--auto-agree-with-licenses')
++
++ with patch('salt.modules.zypper.list_pkgs',
++ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}])):
+ ret = zypper.upgrade()
+ self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}})
+ zypper_mock.assert_any_call('update', '--auto-agree-with-licenses')
+--
+2.17.1
+
+
diff --git a/run-salt-api-as-user-salt-bsc-1064520.patch b/run-salt-api-as-user-salt-bsc-1064520.patch
index 9e68400..0b448df 100644
--- a/run-salt-api-as-user-salt-bsc-1064520.patch
+++ b/run-salt-api-as-user-salt-bsc-1064520.patch
@@ -1,4 +1,4 @@
-From 92f41027bc08be3e14a47bbf7f43205a60606643 Mon Sep 17 00:00:00 2001
+From e9b5c0ae02552eb9a76488da32217a0e339d86a2 Mon Sep 17 00:00:00 2001
From: Christian Lanig
Date: Mon, 27 Nov 2017 13:10:26 +0100
Subject: [PATCH] Run salt-api as user salt (bsc#1064520)
@@ -20,6 +20,6 @@ index 7ca582dfb4..bf513e4dbd 100644
ExecStart=/usr/bin/salt-api
TimeoutStopSec=3
--
-2.16.2
+2.13.7
diff --git a/run-salt-master-as-dedicated-salt-user.patch b/run-salt-master-as-dedicated-salt-user.patch
index 8319161..9266201 100644
--- a/run-salt-master-as-dedicated-salt-user.patch
+++ b/run-salt-master-as-dedicated-salt-user.patch
@@ -1,4 +1,4 @@
-From 04906c9a9c1b9fdbc6854a017e92525acd167bc7 Mon Sep 17 00:00:00 2001
+From 3d4be53c265dffdbfaf1d7d4764c361a640fd5ff Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Klaus=20K=C3=A4mpf?=
Date: Wed, 20 Jan 2016 11:01:06 +0100
Subject: [PATCH] Run salt master as dedicated salt user
@@ -10,7 +10,7 @@ Subject: [PATCH] Run salt master as dedicated salt user
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/conf/master b/conf/master
-index 986898436a..8461101210 100644
+index 149fe8812f..d492aef6df 100644
--- a/conf/master
+++ b/conf/master
@@ -25,7 +25,8 @@
@@ -42,6 +42,6 @@ index 3cd002308e..0d99d1b801 100644
missingok
rotate 7
--
-2.16.2
+2.13.7
diff --git a/salt.changes b/salt.changes
index 688aec0..e7b7269 100644
--- a/salt.changes
+++ b/salt.changes
@@ -1,3 +1,58 @@
+-------------------------------------------------------------------
+Mon Jul 30 10:42:01 UTC 2018 - mihai.dinca@suse.com
+
+- Update to 2018.3.2
+ See https://docs.saltstack.com/en/latest/topics/releases/2018.3.2.html
+ for full changelog
+
+- Added:
+ * accounting-for-when-files-in-an-archive-contain-non-.patch
+ * add-all_versions-parameter-to-include-all-installed-.patch
+ * add-custom-suse-capabilities-as-grains.patch
+ * add-engine-relaying-libvirt-events.patch
+ * add-environment-variable-to-know-if-yum-is-invoked-f.patch
+ * add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch
+ * align-suse-salt-master.service-limitnofiles-limit-wi.patch
+ * avoid-incomprehensive-message-if-crashes.patch
+ * fix-deprecation-warning-bsc-1095507.patch
+ * fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch
+ * fix-unboundlocalerror-in-file.get_diff.patch
+ * fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch
+ * prevent-zypper-from-parsing-repo-configuration-from-.patch
+ * remove-old-hack-when-reporting-multiversion-packages.patch
+ * show-recommendations-for-salt-ssh-cross-version-pyth.patch
+
+- Modified:
+ * activate-all-beacons-sources-config-pillar-grains.patch
+ * add-saltssh-multi-version-support-across-python-inte.patch
+ * avoid-excessive-syslogging-by-watchdog-cronjob-58.patch
+ * do-not-override-jid-on-returners-only-sending-back-t.patch
+ * enable-passing-a-unix_socket-for-mysql-returners-bsc.patch
+ * fall-back-to-pymysql.patch
+ * feat-add-grain-for-all-fqdns.patch
+ * fix-bsc-1065792.patch
+ * fix-decrease-loglevel-when-unable-to-resolve-addr.patch
+ * fix-for-ec2-rate-limit-failures.patch
+ * fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch
+ * fixed-usage-of-ipaddress.patch
+ * option-to-merge-current-pillar-with-opts-pillar-duri.patch
+ * run-salt-api-as-user-salt-bsc-1064520.patch
+ * run-salt-master-as-dedicated-salt-user.patch
+ * strip-trailing-commas-on-linux-user-gecos-fields.patch
+
+- Deleted:
+ * explore-module.run-response-to-catch-the-result-in-d.patch
+ * extra-filerefs-include-files-even-if-no-refs-in-stat.patch
+ * fix-cp.push-empty-file.patch
+ * fix-for-sorting-of-multi-version-packages-bsc-109717.patch
+ * fix-openscap-push.patch
+ * initialize-__context__-retcode-for-functions-handled.patch
+ * make-it-possible-to-use-login-pull-and-push-from-mod.patch
+ * move-log_file-option-to-changeable-defaults.patch
+ * provide-kwargs-to-pkg_resource.parse_targets-require.patch
+ * remove-obsolete-unicode-handling-in-pkg.info_install.patch
+
+
-------------------------------------------------------------------
Thu May 17 15:14:01 UTC 2018 - Jochen Breuer
diff --git a/salt.spec b/salt.spec
index f11d52a..086d54c 100644
--- a/salt.spec
+++ b/salt.spec
@@ -1,7 +1,7 @@
#
# spec file for package salt
#
-# Copyright (c) 2017 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@@ -15,6 +15,7 @@
# Please submit bugfixes or comments via http://bugs.opensuse.org/
#
+
%if 0%{?suse_version} >= 1320
# SLE15
%global build_py3 1
@@ -52,7 +53,7 @@
%bcond_with builddocs
Name: salt
-Version: 2018.3.0
+Version: 2018.3.2
Release: 0
Summary: A parallel remote execution system
License: Apache-2.0
@@ -65,49 +66,66 @@ Source3: html.tar.bz2
Source4: update-documentation.sh
Source5: travis.yml
-Patch1: run-salt-master-as-dedicated-salt-user.patch
-Patch2: run-salt-api-as-user-salt-bsc-1064520.patch
-Patch3: activate-all-beacons-sources-config-pillar-grains.patch
-Patch4: avoid-excessive-syslogging-by-watchdog-cronjob-58.patch
-Patch5: feat-add-grain-for-all-fqdns.patch
-Patch6: fix-bsc-1065792.patch
-# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46006
-Patch7: remove-obsolete-unicode-handling-in-pkg.info_install.patch
-Patch8: fix-openscap-push.patch
-# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/45972
-Patch9: move-log_file-option-to-changeable-defaults.patch
-# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46416
-Patch10: fix-cp.push-empty-file.patch
+Patch1: run-salt-master-as-dedicated-salt-user.patch
+Patch2: run-salt-api-as-user-salt-bsc-1064520.patch
+Patch3: activate-all-beacons-sources-config-pillar-grains.patch
+Patch4: avoid-excessive-syslogging-by-watchdog-cronjob-58.patch
+Patch5: feat-add-grain-for-all-fqdns.patch
+Patch6: fix-bsc-1065792.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46575
-Patch11: fix-decrease-loglevel-when-unable-to-resolve-addr.patch
-# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46643
-Patch12: make-it-possible-to-use-login-pull-and-push-from-mod.patch
-# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46413
-Patch13: explore-module.run-response-to-catch-the-result-in-d.patch
+Patch7: fix-decrease-loglevel-when-unable-to-resolve-addr.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46684
-Patch14: add-saltssh-multi-version-support-across-python-inte.patch
+Patch8: add-saltssh-multi-version-support-across-python-inte.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46635
-Patch15: fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch
+Patch9: fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46890
-Patch16: fall-back-to-pymysql.patch
+Patch10: fall-back-to-pymysql.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47149
-Patch17: strip-trailing-commas-on-linux-user-gecos-fields.patch
-# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47155
-Patch18: provide-kwargs-to-pkg_resource.parse_targets-require.patch
-# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47270
-Patch19: initialize-__context__-retcode-for-functions-handled.patch
+Patch11: strip-trailing-commas-on-linux-user-gecos-fields.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47232
-Patch20: fixed-usage-of-ipaddress.patch
-# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47497
-Patch21: extra-filerefs-include-files-even-if-no-refs-in-stat.patch
+Patch12: fixed-usage-of-ipaddress.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47504
-Patch22: option-to-merge-current-pillar-with-opts-pillar-duri.patch
+Patch13: option-to-merge-current-pillar-with-opts-pillar-duri.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47471
-Patch23: do-not-override-jid-on-returners-only-sending-back-t.patch
+Patch14: do-not-override-jid-on-returners-only-sending-back-t.patch
# PATCH-FIX_OPENSUSE bsc#1091371
-Patch24: enable-passing-a-unix_socket-for-mysql-returners-bsc.patch
+Patch15: enable-passing-a-unix_socket-for-mysql-returners-bsc.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47211
-Patch25: fix-for-ec2-rate-limit-failures.patch
+Patch16: fix-for-ec2-rate-limit-failures.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47638
+Patch17: add-all_versions-parameter-to-include-all-installed-.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47765
+Patch18: prevent-zypper-from-parsing-repo-configuration-from-.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47149
+Patch19: add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47908
+Patch20: align-suse-salt-master.service-limitnofiles-limit-wi.patch
+# PATCH-FIX_OPENSUSE bsc#1095507
+Patch21: fix-deprecation-warning-bsc-1095507.patch
+# PATCH-FIX_OPENSUSE bsc#1057635
+Patch22: add-environment-variable-to-know-if-yum-is-invoked-f.patch
+# PATCH-FIX_OPENSUSE
+Patch23: add-custom-suse-capabilities-as-grains.patch
+# PATCH-FIX_OPENSUSE bsc#1098394 https://github.com/saltstack/salt/pull/47061
+Patch24: fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch
+# PATCH-FIX_OPENSUSE bsc#1072599
+Patch25: show-recommendations-for-salt-ssh-cross-version-pyth.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47405
+Patch26: fix-unboundlocalerror-in-file.get_diff.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48294
+Patch27: fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47572
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48015
+Patch28: accounting-for-when-files-in-an-archive-contain-non-.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48712
+Patch29: remove-old-hack-when-reporting-multiversion-packages.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46461
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46928
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46957
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47243
+Patch30: add-engine-relaying-libvirt-events.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48781
+Patch31: avoid-incomprehensive-message-if-crashes.patch
# BuildRoot: %{_tmppath}/%{name}-%{version}-build
BuildRoot: %{_tmppath}/%{name}-%{version}-build
@@ -116,7 +134,7 @@ BuildRequires: logrotate
BuildRequires: fdupes
%endif
-Requires: %{pythonX}-%{name} = %{version}-%{release}
+Requires: %{pythonX}-%{name} = %{version}-%{release}
Requires(pre): %{_sbindir}/groupadd
Requires(pre): %{_sbindir}/useradd
@@ -136,12 +154,12 @@ Requires: logrotate
Requires: procps
%if 0%{?suse_version} >= 1500
-Requires: iproute2
+Requires: iproute2
%else
%if 0%{?suse_version}
-Requires: net-tools
+Requires: net-tools
%else
-Requires: iproute
+Requires: iproute
%endif
%endif
@@ -193,12 +211,12 @@ BuildRequires: python-devel >= 2.7
# requirements/base.txt
%if 0%{?rhel}
BuildRequires: python-jinja2
-BuildRequires: python-yaml
BuildRequires: python-markupsafe
+BuildRequires: python-yaml
%else
BuildRequires: python-Jinja2
-BuildRequires: python-PyYAML
BuildRequires: python-MarkupSafe
+BuildRequires: python-PyYAML
%endif
BuildRequires: python-futures >= 2.0
@@ -231,16 +249,16 @@ Requires: python-certifi
# requirements/base.txt
%if 0%{?rhel}
Requires: python-jinja2
-Requires: python-yaml
Requires: python-markupsafe
+Requires: python-yaml
Requires: yum
%if 0%{?rhel} == 6
Requires: yum-plugin-security
%endif
%else
Requires: python-Jinja2
-Requires: python-PyYAML
Requires: python-MarkupSafe
+Requires: python-PyYAML
%endif
Requires: python-futures >= 2.0
@@ -280,7 +298,7 @@ Summary: python3 library for salt
Group: System/Management
Requires: %{name} = %{version}-%{release}
BuildRequires: python-rpm-macros
-BuildRequires: python3
+BuildRequires: python3 < 3.7
BuildRequires: python3-devel
# requirements/base.txt
%if 0%{?rhel}
@@ -311,7 +329,7 @@ BuildRequires: python3-xml
%if %{with builddocs}
BuildRequires: python3-sphinx
%endif
-Requires: python3
+Requires: python3 < 3.7
#
%if ! 0%{?suse_version} > 1110
Requires: python3-certifi
@@ -591,6 +609,12 @@ cp %{S:5} ./.travis.yml
%patch23 -p1
%patch24 -p1
%patch25 -p1
+%patch26 -p1
+%patch27 -p1
+%patch28 -p1
+%patch29 -p1
+%patch30 -p1
+%patch31 -p1
%build
%if 0%{?build_py2}
@@ -1232,7 +1256,8 @@ rm -f %{_localstatedir}/cache/salt/minion/thin/version
%{_mandir}/man1/salt-call.1.gz
%{_mandir}/man1/spm.1.gz
%config(noreplace) %{_sysconfdir}/logrotate.d/salt
-%doc LICENSE AUTHORS README.rst HACKING.rst README.SUSE
+%license LICENSE
+%doc AUTHORS README.rst HACKING.rst README.SUSE
#
%dir %attr(0750, root, salt) %{_sysconfdir}/salt
%dir %attr(0750, root, salt) %{_sysconfdir}/salt/pki
@@ -1288,5 +1313,3 @@ rm -f %{_localstatedir}/cache/salt/minion/thin/version
%endif
%changelog
-
-
diff --git a/show-recommendations-for-salt-ssh-cross-version-pyth.patch b/show-recommendations-for-salt-ssh-cross-version-pyth.patch
new file mode 100644
index 0000000..9d42956
--- /dev/null
+++ b/show-recommendations-for-salt-ssh-cross-version-pyth.patch
@@ -0,0 +1,63 @@
+From 15e97fd2916176fe850850fe90983ac95a1f8e7b Mon Sep 17 00:00:00 2001
+From: Erik Johnson
+Date: Mon, 11 Jun 2018 14:46:58 -0500
+Subject: [PATCH] Show recommendations for salt-ssh cross-version python
+ errors
+
+This shows more accurate information on how to resolve version issues
+(e.g. master only has Salt deps installed for Python 3 but remote host
+has no Python 3 installed).
+
+Use parenthesis for line continuation
+---
+ salt/client/ssh/__init__.py | 26 +++++++++++++++++++++++++-
+ 1 file changed, 25 insertions(+), 1 deletion(-)
+
+diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
+index f1300b5698..8a85cc2480 100644
+--- a/salt/client/ssh/__init__.py
++++ b/salt/client/ssh/__init__.py
+@@ -1387,6 +1387,30 @@ ARGS = {arguments}\n'''.format(config=self.minion_config,
+ perm_error_fmt = 'Permissions problem, target user may need '\
+ 'to be root or use sudo:\n {0}'
+
++ def _version_mismatch_error():
++ messages = {
++ 2: {
++ 6: 'Install Python 2.7 / Python 3 Salt dependencies on the Salt SSH master \n'
++ 'to interact with Python 2.7 / Python 3 targets',
++ 7: 'Install Python 2.6 / Python 3 Salt dependencies on the Salt SSH master \n'
++ 'to interact with Python 2.6 / Python 3 targets',
++ },
++ 3: {
++ 'default': '- Install Python 2.6/2.7 Salt dependencies on the Salt SSH \n'
++ ' master to interact with Python 2.6/2.7 targets\n'
++ '- Install Python 3 on the target machine(s)',
++ },
++ 'default': 'Matching major/minor Python release (>=2.6) needed both on the Salt SSH \n'
++ 'master and target machine',
++ }
++ major, minor = sys.version_info[:2]
++ help_msg = (
++ messages.get(major, {}).get(minor)
++ or messages.get(major, {}).get('default')
++ or messages['default']
++ )
++ return 'Python version error. Recommendation(s) follow:\n' + help_msg
++
+ errors = [
+ (
+ (),
+@@ -1396,7 +1420,7 @@ ARGS = {arguments}\n'''.format(config=self.minion_config,
+ (
+ (salt.defaults.exitcodes.EX_THIN_PYTHON_INVALID,),
+ 'Python interpreter is too old',
+- 'salt requires python 2.6 or newer on target hosts, must have same major version as origin host'
++ _version_mismatch_error()
+ ),
+ (
+ (salt.defaults.exitcodes.EX_THIN_CHECKSUM,),
+--
+2.13.7
+
+
diff --git a/strip-trailing-commas-on-linux-user-gecos-fields.patch b/strip-trailing-commas-on-linux-user-gecos-fields.patch
index e82879b..150c8b4 100644
--- a/strip-trailing-commas-on-linux-user-gecos-fields.patch
+++ b/strip-trailing-commas-on-linux-user-gecos-fields.patch
@@ -1,4 +1,4 @@
-From f9fb3639bb3c44babd92d9499bdde83a0a81d6ab Mon Sep 17 00:00:00 2001
+From f515f99ee42ffaba30cee2e1941a7e9af9db7453 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
Date: Wed, 18 Apr 2018 12:05:35 +0100
@@ -50,6 +50,6 @@ index eb983685bb..fa30a0df71 100644
+ expected_gecos_fields = 'Testing'
+ self.assertEqual(useradd._build_gecos(test_gecos), expected_gecos_fields)
--
-2.15.1
+2.13.7
diff --git a/v2018.3.0.tar.gz b/v2018.3.0.tar.gz
deleted file mode 100644
index df8e8b6..0000000
--- a/v2018.3.0.tar.gz
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:4310936a99a330fb67d86d430189831b8b7e064357a8faabebd5e0115a7e0dfc
-size 13469511
diff --git a/v2018.3.2.tar.gz b/v2018.3.2.tar.gz
new file mode 100644
index 0000000..7fc7708
--- /dev/null
+++ b/v2018.3.2.tar.gz
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:515df2eed05b1a31101dc8d7cfb52f554ced6db52417a3e9c2096f055807235b
+size 13024996