diff --git a/_lastrevision b/_lastrevision index 047b3de..4543d1d 100644 --- a/_lastrevision +++ b/_lastrevision @@ -1 +1 @@ -2460cb78e6bda580f2567781e060a3e6c6ba25de \ No newline at end of file +65afa65b0d69f90c1cd716474cdddcdc98751274 \ No newline at end of file diff --git a/_service b/_service index 0a94560..61586c5 100644 --- a/_service +++ b/_service @@ -3,7 +3,7 @@ https://github.com/openSUSE/salt-packaging.git salt package - 2018.3.2 + 2019.2.0 git @@ -12,8 +12,8 @@ codeload.github.com - saltstack/salt/tar.gz/v2018.3.2 - v2018.3.2.tar.gz + openSUSE/salt/tar.gz/v2019.2.0-suse + v2019.2.0.tar.gz diff --git a/accounting-for-when-files-in-an-archive-contain-non-.patch b/accounting-for-when-files-in-an-archive-contain-non-.patch deleted file mode 100644 index 3901761..0000000 --- a/accounting-for-when-files-in-an-archive-contain-non-.patch +++ /dev/null @@ -1,158 +0,0 @@ -From 5305ee8bf07e40dc54aefcbb92016ff868135749 Mon Sep 17 00:00:00 2001 -From: "Gareth J. Greenaway" -Date: Wed, 9 May 2018 09:33:58 -0700 -Subject: [PATCH] Accounting for when files in an archive contain - non-ascii characters - -Updating integration/modules/test_archive to include filenames with unicode characters. - -only convert to bytes when using Python2 - -Updating with requested changes. - -Ensure member names are decoded before adding to various lists. - -Adding a test to ensure archive.list returns the right results when a tar file contains a file with unicode in it's name. ---- - salt/modules/archive.py | 13 +++--- - salt/states/archive.py | 4 +- - tests/integration/modules/test_archive.py | 52 ++++++++++++++++++++++- - 3 files changed, 59 insertions(+), 10 deletions(-) - -diff --git a/salt/modules/archive.py b/salt/modules/archive.py -index 48f0efa18e..76cd3eeb97 100644 ---- a/salt/modules/archive.py -+++ b/salt/modules/archive.py -@@ -186,12 +186,13 @@ def list_(name, - else {'fileobj': cached.stdout, 'mode': 'r|'} - with contextlib.closing(tarfile.open(**open_kwargs)) as tar_archive: - for member in tar_archive.getmembers(): -+ _member = salt.utils.data.decode(member.name) - if member.issym(): -- links.append(member.name) -+ links.append(_member) - elif member.isdir(): -- dirs.append(member.name + '/') -+ dirs.append(_member + '/') - else: -- files.append(member.name) -+ files.append(_member) - return dirs, files, links - - except tarfile.ReadError: -@@ -410,9 +411,9 @@ def list_(name, - item.sort() - - if verbose: -- ret = {'dirs': sorted(dirs), -- 'files': sorted(files), -- 'links': sorted(links)} -+ ret = {'dirs': sorted(salt.utils.data.decode_list(dirs)), -+ 'files': sorted(salt.utils.data.decode_list(files)), -+ 'links': sorted(salt.utils.data.decode_list(links))} - ret['top_level_dirs'] = [x for x in ret['dirs'] - if x.count('/') == 1] - ret['top_level_files'] = [x for x in ret['files'] -diff --git a/salt/states/archive.py b/salt/states/archive.py -index 847c5e9914..6838b2202d 100644 ---- a/salt/states/archive.py -+++ b/salt/states/archive.py -@@ -1090,7 +1090,7 @@ def extracted(name, - and not stat.S_ISDIR(x)), - (contents['links'], stat.S_ISLNK)): - for path in path_list: -- full_path = os.path.join(name, path) -+ full_path = salt.utils.path.join(name, path) - try: - path_mode = os.lstat(full_path.rstrip(os.sep)).st_mode - if not func(path_mode): -@@ -1259,7 +1259,7 @@ def extracted(name, - if options is None: - try: - with closing(tarfile.open(cached, 'r')) as tar: -- tar.extractall(name) -+ tar.extractall(salt.utils.stringutils.to_str(name)) - files = tar.getnames() - if trim_output: - files = files[:trim_output] -diff --git a/tests/integration/modules/test_archive.py b/tests/integration/modules/test_archive.py -index 59fe2f5f61..4301b9e3b0 100644 ---- a/tests/integration/modules/test_archive.py -+++ b/tests/integration/modules/test_archive.py -@@ -47,7 +47,7 @@ class ArchiveTest(ModuleCase): - self.arch = os.path.join(self.base_path, 'archive.{0}'.format(arch_fmt)) - self.dst = os.path.join(self.base_path, '{0}_dst_dir'.format(arch_fmt)) - -- def _set_up(self, arch_fmt): -+ def _set_up(self, arch_fmt, unicode_filename=False): - ''' - Create source file tree and destination directory - -@@ -62,7 +62,11 @@ class ArchiveTest(ModuleCase): - - # Create source - os.makedirs(self.src) -- with salt.utils.files.fopen(os.path.join(self.src, 'file'), 'w') as theorem: -+ if unicode_filename: -+ filename = 'file®' -+ else: -+ filename = 'file' -+ with salt.utils.files.fopen(os.path.join(self.src, filename), 'w') as theorem: - theorem.write(textwrap.dedent(salt.utils.stringutils.to_str(r'''\ - Compression theorem of computational complexity theory: - -@@ -150,6 +154,50 @@ class ArchiveTest(ModuleCase): - - self._tear_down() - -+ @skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable') -+ def test_tar_pack_unicode(self): -+ ''' -+ Validate using the tar function to create archives -+ ''' -+ self._set_up(arch_fmt='tar', unicode_filename=True) -+ -+ # Test create archive -+ ret = self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src) -+ self.assertTrue(isinstance(ret, list), six.text_type(ret)) -+ self._assert_artifacts_in_ret(ret) -+ -+ self._tear_down() -+ -+ @skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable') -+ def test_tar_unpack_unicode(self): -+ ''' -+ Validate using the tar function to extract archives -+ ''' -+ self._set_up(arch_fmt='tar', unicode_filename=True) -+ self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src) -+ -+ # Test extract archive -+ ret = self.run_function('archive.tar', ['-xvf', self.arch], dest=self.dst) -+ self.assertTrue(isinstance(ret, list), six.text_type(ret)) -+ self._assert_artifacts_in_ret(ret) -+ -+ self._tear_down() -+ -+ @skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable') -+ def test_tar_list_unicode(self): -+ ''' -+ Validate using the tar function to extract archives -+ ''' -+ self._set_up(arch_fmt='tar', unicode_filename=True) -+ self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src) -+ -+ # Test list archive -+ ret = self.run_function('archive.list', name=self.arch) -+ self.assertTrue(isinstance(ret, list), six.text_type(ret)) -+ self._assert_artifacts_in_ret(ret) -+ -+ self._tear_down() -+ - @skipIf(not salt.utils.path.which('gzip'), 'Cannot find gzip executable') - def test_gzip(self): - ''' --- -2.17.1 - - diff --git a/add-all_versions-parameter-to-include-all-installed-.patch b/add-all_versions-parameter-to-include-all-installed-.patch index 324744c..24be312 100644 --- a/add-all_versions-parameter-to-include-all-installed-.patch +++ b/add-all_versions-parameter-to-include-all-installed-.patch @@ -1,4 +1,4 @@ -From 9de54cf6f7d8d6da4212842fef8c4c658a2a9b9c Mon Sep 17 00:00:00 2001 +From c059d617a77184c3bec8159d5197355f3cab8c4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Mon, 14 May 2018 11:33:13 +0100 @@ -19,259 +19,14 @@ Refactor: use dict.setdefault instead if-else statement Allow removing only specific package versions with zypper and yum --- - salt/modules/rpm.py | 18 ++++++++--- - salt/modules/yumpkg.py | 49 ++++++++++++++++++++++-------- - salt/modules/zypper.py | 64 ++++++++++++++++++++++++++++++++------- - salt/states/pkg.py | 33 +++++++++++++++++++- - tests/unit/modules/test_yumpkg.py | 50 ++++++++++++++++++++++++++++++ - tests/unit/modules/test_zypper.py | 50 ++++++++++++++++++++++++++++++ - 6 files changed, 236 insertions(+), 28 deletions(-) + salt/states/pkg.py | 21 +++++++++++++++++++++ + 1 file changed, 21 insertions(+) -diff --git a/salt/modules/rpm.py b/salt/modules/rpm.py -index d065f1e2d9..3683234f59 100644 ---- a/salt/modules/rpm.py -+++ b/salt/modules/rpm.py -@@ -453,7 +453,7 @@ def diff(package, path): - return res - - --def info(*packages, **attr): -+def info(*packages, **kwargs): - ''' - Return a detailed package(s) summary information. - If no packages specified, all packages will be returned. -@@ -467,6 +467,9 @@ def info(*packages, **attr): - version, vendor, release, build_date, build_date_time_t, install_date, install_date_time_t, - build_host, group, source_rpm, arch, epoch, size, license, signature, packager, url, summary, description. - -+ :param all_versions: -+ Return information for all installed versions of the packages -+ - :return: - - CLI example: -@@ -476,7 +479,9 @@ def info(*packages, **attr): - salt '*' lowpkg.info apache2 bash - salt '*' lowpkg.info apache2 bash attr=version - salt '*' lowpkg.info apache2 bash attr=version,build_date_iso,size -+ salt '*' lowpkg.info apache2 bash attr=version,build_date_iso,size all_versions=True - ''' -+ all_versions = kwargs.get('all_versions', False) - # LONGSIZE is not a valid tag for all versions of rpm. If LONGSIZE isn't - # available, then we can just use SIZE for older versions. See Issue #31366. - rpm_tags = __salt__['cmd.run_stdout']( -@@ -516,7 +521,7 @@ def info(*packages, **attr): - "edition": "edition: %|EPOCH?{%{EPOCH}:}|%{VERSION}-%{RELEASE}\\n", - } - -- attr = attr.get('attr', None) and attr['attr'].split(",") or None -+ attr = kwargs.get('attr', None) and kwargs['attr'].split(",") or None - query = list() - if attr: - for attr_k in attr: -@@ -610,8 +615,13 @@ def info(*packages, **attr): - if pkg_name.startswith('gpg-pubkey'): - continue - if pkg_name not in ret: -- ret[pkg_name] = pkg_data.copy() -- del ret[pkg_name]['edition'] -+ if all_versions: -+ ret[pkg_name] = [pkg_data.copy()] -+ else: -+ ret[pkg_name] = pkg_data.copy() -+ del ret[pkg_name]['edition'] -+ elif all_versions: -+ ret[pkg_name].append(pkg_data.copy()) - - return ret - -diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py -index 747142264d..9ce4926790 100644 ---- a/salt/modules/yumpkg.py -+++ b/salt/modules/yumpkg.py -@@ -994,31 +994,39 @@ def list_downloaded(): - return ret - - --def info_installed(*names): -+def info_installed(*names, **kwargs): - ''' - .. versionadded:: 2015.8.1 - - Return the information of the named package(s), installed on the system. - -+ :param all_versions: -+ Include information for all versions of the packages installed on the minion. -+ - CLI example: - - .. code-block:: bash - - salt '*' pkg.info_installed - salt '*' pkg.info_installed ... -+ salt '*' pkg.info_installed all_versions=True - ''' -+ all_versions = kwargs.get('all_versions', False) - ret = dict() -- for pkg_name, pkg_nfo in __salt__['lowpkg.info'](*names).items(): -- t_nfo = dict() -- # Translate dpkg-specific keys to a common structure -- for key, value in pkg_nfo.items(): -- if key == 'source_rpm': -- t_nfo['source'] = value -+ for pkg_name, pkgs_nfo in __salt__['lowpkg.info'](*names, **kwargs).items(): -+ pkg_nfo = pkgs_nfo if all_versions else [pkgs_nfo] -+ for _nfo in pkg_nfo: -+ t_nfo = dict() -+ # Translate dpkg-specific keys to a common structure -+ for key, value in _nfo.items(): -+ if key == 'source_rpm': -+ t_nfo['source'] = value -+ else: -+ t_nfo[key] = value -+ if not all_versions: -+ ret[pkg_name] = t_nfo - else: -- t_nfo[key] = value -- -- ret[pkg_name] = t_nfo -- -+ ret.setdefault(pkg_name, []).append(t_nfo) - return ret - - -@@ -1919,7 +1927,24 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613 - raise CommandExecutionError(exc) - - old = list_pkgs() -- targets = [x for x in pkg_params if x in old] -+ targets = [] -+ for target in pkg_params: -+ # Check if package version set to be removed is actually installed: -+ # old[target] contains a comma-separated list of installed versions -+ if target in old and not pkg_params[target]: -+ targets.append(target) -+ elif target in old and pkg_params[target] in old[target].split(','): -+ arch = '' -+ pkgname = target -+ try: -+ namepart, archpart = target.rsplit('.', 1) -+ except ValueError: -+ pass -+ else: -+ if archpart in salt.utils.pkg.rpm.ARCHES: -+ arch = '.' + archpart -+ pkgname = namepart -+ targets.append('{0}-{1}{2}'.format(pkgname, pkg_params[target], arch)) - if not targets: - return {} - -diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py -index 668143bdd9..06f8335c18 100644 ---- a/salt/modules/zypper.py -+++ b/salt/modules/zypper.py -@@ -470,28 +470,37 @@ def info_installed(*names, **kwargs): - Valid attributes are: - ignore, report - -+ :param all_versions: -+ Include information for all versions of the packages installed on the minion. -+ - CLI example: - - .. code-block:: bash - - salt '*' pkg.info_installed - salt '*' pkg.info_installed ... -- salt '*' pkg.info_installed attr=version,vendor -+ salt '*' pkg.info_installed all_versions=True -+ salt '*' pkg.info_installed attr=version,vendor all_versions=True - salt '*' pkg.info_installed ... attr=version,vendor - salt '*' pkg.info_installed ... attr=version,vendor errors=ignore - salt '*' pkg.info_installed ... attr=version,vendor errors=report - ''' -+ all_versions = kwargs.get('all_versions', False) - ret = dict() -- for pkg_name, pkg_nfo in __salt__['lowpkg.info'](*names, **kwargs).items(): -- t_nfo = dict() -- # Translate dpkg-specific keys to a common structure -- for key, value in six.iteritems(pkg_nfo): -- if key == 'source_rpm': -- t_nfo['source'] = value -+ for pkg_name, pkgs_nfo in __salt__['lowpkg.info'](*names, **kwargs).items(): -+ pkg_nfo = pkgs_nfo if all_versions else [pkgs_nfo] -+ for _nfo in pkg_nfo: -+ t_nfo = dict() -+ # Translate dpkg-specific keys to a common structure -+ for key, value in six.iteritems(_nfo): -+ if key == 'source_rpm': -+ t_nfo['source'] = value -+ else: -+ t_nfo[key] = value -+ if not all_versions: -+ ret[pkg_name] = t_nfo - else: -- t_nfo[key] = value -- ret[pkg_name] = t_nfo -- -+ ret.setdefault(pkg_name, []).append(t_nfo) - return ret - - -@@ -1494,7 +1503,14 @@ def _uninstall(name=None, pkgs=None): - raise CommandExecutionError(exc) - - old = list_pkgs() -- targets = [target for target in pkg_params if target in old] -+ targets = [] -+ for target in pkg_params: -+ # Check if package version set to be removed is actually installed: -+ # old[target] contains a comma-separated list of installed versions -+ if target in old and pkg_params[target] in old[target].split(','): -+ targets.append(target + "-" + pkg_params[target]) -+ elif target in old and not pkg_params[target]: -+ targets.append(target) - if not targets: - return {} - -@@ -1517,6 +1533,32 @@ def _uninstall(name=None, pkgs=None): - return ret - - -+def normalize_name(name): -+ ''' -+ Strips the architecture from the specified package name, if necessary. -+ Circumstances where this would be done include: -+ -+ * If the arch is 32 bit and the package name ends in a 32-bit arch. -+ * If the arch matches the OS arch, or is ``noarch``. -+ -+ CLI Example: -+ -+ .. code-block:: bash -+ -+ salt '*' pkg.normalize_name zsh.x86_64 -+ ''' -+ try: -+ arch = name.rsplit('.', 1)[-1] -+ if arch not in salt.utils.pkg.rpm.ARCHES + ('noarch',): -+ return name -+ except ValueError: -+ return name -+ if arch in (__grains__['osarch'], 'noarch') \ -+ or salt.utils.pkg.rpm.check_32(arch, osarch=__grains__['osarch']): -+ return name[:-(len(arch) + 1)] -+ return name -+ -+ - def remove(name=None, pkgs=None, **kwargs): # pylint: disable=unused-argument - ''' - .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 diff --git a/salt/states/pkg.py b/salt/states/pkg.py -index 2682ee17f9..ed405cb6b5 100644 +index 0aca1e0af8..2034262b23 100644 --- a/salt/states/pkg.py +++ b/salt/states/pkg.py -@@ -415,6 +415,16 @@ def _find_remove_targets(name=None, +@@ -455,6 +455,16 @@ def _find_remove_targets(name=None, if __grains__['os'] == 'FreeBSD' and origin: cver = [k for k, v in six.iteritems(cur_pkgs) if v['origin'] == pkgname] @@ -288,7 +43,7 @@ index 2682ee17f9..ed405cb6b5 100644 else: cver = cur_pkgs.get(pkgname, []) -@@ -844,6 +854,17 @@ def _verify_install(desired, new_pkgs, ignore_epoch=False, new_caps=None): +@@ -861,6 +871,17 @@ def _verify_install(desired, new_pkgs, ignore_epoch=False, new_caps=None): cver = new_pkgs.get(pkgname.split('%')[0]) elif __grains__['os_family'] == 'Debian': cver = new_pkgs.get(pkgname.split('=')[0]) @@ -306,145 +61,7 @@ index 2682ee17f9..ed405cb6b5 100644 else: cver = new_pkgs.get(pkgname) if not cver and pkgname in new_caps: -@@ -2674,7 +2695,17 @@ def _uninstall( - - changes = __salt__['pkg.{0}'.format(action)](name, pkgs=pkgs, version=version, **kwargs) - new = __salt__['pkg.list_pkgs'](versions_as_list=True, **kwargs) -- failed = [x for x in pkg_params if x in new] -+ failed = [] -+ for x in pkg_params: -+ if __grains__['os_family'] in ['Suse', 'RedHat']: -+ # Check if the package version set to be removed is actually removed: -+ if x in new and not pkg_params[x]: -+ failed.append(x) -+ elif x in new and pkg_params[x] in new[x]: -+ failed.append(x + "-" + pkg_params[x]) -+ elif x in new: -+ failed.append(x) -+ - if action == 'purge': - new_removed = __salt__['pkg.list_pkgs'](versions_as_list=True, - removed=True, -diff --git a/tests/unit/modules/test_yumpkg.py b/tests/unit/modules/test_yumpkg.py -index 28b6e1294c..c73f2582b9 100644 ---- a/tests/unit/modules/test_yumpkg.py -+++ b/tests/unit/modules/test_yumpkg.py -@@ -601,3 +601,53 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): - '--branch=foo', '--exclude=kernel*', 'upgrade'], - output_loglevel='trace', - python_shell=False) -+ -+ def test_info_installed_with_all_versions(self): -+ ''' -+ Test the return information of all versions for the named package(s), installed on the system. -+ -+ :return: -+ ''' -+ run_out = { -+ 'virgo-dummy': [ -+ {'build_date': '2015-07-09T10:55:19Z', -+ 'vendor': 'openSUSE Build Service', -+ 'description': 'This is the Virgo dummy package used for testing SUSE Manager', -+ 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com', -+ 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)', -+ 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z', -+ 'install_date_time_t': 1456241517, 'summary': 'Virgo dummy package', 'version': '1.0', -+ 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9', -+ 'release': '1.1', 'group': 'Applications/System', 'arch': 'i686', 'size': '17992'}, -+ {'build_date': '2015-07-09T10:15:19Z', -+ 'vendor': 'openSUSE Build Service', -+ 'description': 'This is the Virgo dummy package used for testing SUSE Manager', -+ 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com', -+ 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)', -+ 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z', -+ 'install_date_time_t': 14562415127, 'summary': 'Virgo dummy package', 'version': '1.0', -+ 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9', -+ 'release': '1.1', 'group': 'Applications/System', 'arch': 'x86_64', 'size': '13124'} -+ ], -+ 'libopenssl1_0_0': [ -+ {'build_date': '2015-11-04T23:20:34Z', 'vendor': 'SUSE LLC ', -+ 'description': 'The OpenSSL Project is a collaborative effort.', -+ 'license': 'OpenSSL', 'build_host': 'sheep11', 'url': 'https://www.openssl.org/', -+ 'build_date_time_t': 1446675634, 'relocations': '(not relocatable)', -+ 'source_rpm': 'openssl-1.0.1i-34.1.src.rpm', 'install_date': '2016-02-23T16:31:35Z', -+ 'install_date_time_t': 1456241495, 'summary': 'Secure Sockets and Transport Layer Security', -+ 'version': '1.0.1i', 'signature': 'RSA/SHA256, Wed Nov 4 22:21:34 2015, Key ID 70af9e8139db7c82', -+ 'release': '34.1', 'group': 'Productivity/Networking/Security', 'packager': 'https://www.suse.com/', -+ 'arch': 'x86_64', 'size': '2576912'} -+ ] -+ } -+ with patch.dict(yumpkg.__salt__, {'lowpkg.info': MagicMock(return_value=run_out)}): -+ installed = yumpkg.info_installed(all_versions=True) -+ # Test overall products length -+ self.assertEqual(len(installed), 2) -+ -+ # Test multiple versions for the same package -+ for pkg_name, pkg_info_list in installed.items(): -+ self.assertEqual(len(pkg_info_list), 2 if pkg_name == "virgo-dummy" else 1) -+ for info in pkg_info_list: -+ self.assertTrue(info['arch'] in ('x86_64', 'i686')) -diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py -index 539a950252..6eccee568b 100644 ---- a/tests/unit/modules/test_zypper.py -+++ b/tests/unit/modules/test_zypper.py -@@ -327,6 +327,56 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): - installed = zypper.info_installed() - self.assertEqual(installed['vīrgô']['description'], 'vīrgô d€šçripţiǫñ') - -+ def test_info_installed_with_all_versions(self): -+ ''' -+ Test the return information of all versions for the named package(s), installed on the system. -+ -+ :return: -+ ''' -+ run_out = { -+ 'virgo-dummy': [ -+ {'build_date': '2015-07-09T10:55:19Z', -+ 'vendor': 'openSUSE Build Service', -+ 'description': 'This is the Virgo dummy package used for testing SUSE Manager', -+ 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com', -+ 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)', -+ 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z', -+ 'install_date_time_t': 1456241517, 'summary': 'Virgo dummy package', 'version': '1.0', -+ 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9', -+ 'release': '1.1', 'group': 'Applications/System', 'arch': 'i686', 'size': '17992'}, -+ {'build_date': '2015-07-09T10:15:19Z', -+ 'vendor': 'openSUSE Build Service', -+ 'description': 'This is the Virgo dummy package used for testing SUSE Manager', -+ 'license': 'GPL-2.0', 'build_host': 'sheep05', 'url': 'http://www.suse.com', -+ 'build_date_time_t': 1436432119, 'relocations': '(not relocatable)', -+ 'source_rpm': 'virgo-dummy-1.0-1.1.src.rpm', 'install_date': '2016-02-23T16:31:57Z', -+ 'install_date_time_t': 14562415127, 'summary': 'Virgo dummy package', 'version': '1.0', -+ 'signature': 'DSA/SHA1, Thu Jul 9 08:55:33 2015, Key ID 27fa41bd8a7c64f9', -+ 'release': '1.1', 'group': 'Applications/System', 'arch': 'x86_64', 'size': '13124'} -+ ], -+ 'libopenssl1_0_0': [ -+ {'build_date': '2015-11-04T23:20:34Z', 'vendor': 'SUSE LLC ', -+ 'description': 'The OpenSSL Project is a collaborative effort.', -+ 'license': 'OpenSSL', 'build_host': 'sheep11', 'url': 'https://www.openssl.org/', -+ 'build_date_time_t': 1446675634, 'relocations': '(not relocatable)', -+ 'source_rpm': 'openssl-1.0.1i-34.1.src.rpm', 'install_date': '2016-02-23T16:31:35Z', -+ 'install_date_time_t': 1456241495, 'summary': 'Secure Sockets and Transport Layer Security', -+ 'version': '1.0.1i', 'signature': 'RSA/SHA256, Wed Nov 4 22:21:34 2015, Key ID 70af9e8139db7c82', -+ 'release': '34.1', 'group': 'Productivity/Networking/Security', 'packager': 'https://www.suse.com/', -+ 'arch': 'x86_64', 'size': '2576912'} -+ ] -+ } -+ with patch.dict(zypper.__salt__, {'lowpkg.info': MagicMock(return_value=run_out)}): -+ installed = zypper.info_installed(all_versions=True) -+ # Test overall products length -+ self.assertEqual(len(installed), 2) -+ -+ # Test multiple versions for the same package -+ for pkg_name, pkg_info_list in installed.items(): -+ self.assertEqual(len(pkg_info_list), 2 if pkg_name == "virgo-dummy" else 1) -+ for info in pkg_info_list: -+ self.assertTrue(info['arch'] in ('x86_64', 'i686')) -+ - def test_info_available(self): - ''' - Test return the information of the named package available for the system. -- -2.13.7 +2.17.1 diff --git a/add-cpe_name-for-osversion-grain-parsing-u-49946.patch b/add-cpe_name-for-osversion-grain-parsing-u-49946.patch index 8b8826a..22cc3dd 100644 --- a/add-cpe_name-for-osversion-grain-parsing-u-49946.patch +++ b/add-cpe_name-for-osversion-grain-parsing-u-49946.patch @@ -1,4 +1,4 @@ -From 3bad9e211c2e76ddac48f7c8ff1632e32e0a256e Mon Sep 17 00:00:00 2001 +From c2c002a2b8f106388fda3c1abaf518f2d47ce1cf Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Tue, 9 Oct 2018 14:08:50 +0200 Subject: [PATCH] Add CPE_NAME for osversion* grain parsing (U#49946) @@ -25,16 +25,15 @@ Expand unit test to verify part name Fix proper part name in the string-bound CPE --- - salt/grains/core.py | 43 +++++++++++++++++++++--- - tests/unit/grains/test_core.py | 60 +++++++++++++++++++++++++++++----- - 2 files changed, 90 insertions(+), 13 deletions(-) + salt/grains/core.py | 28 ++++++++++++++++++++++++++++ + 1 file changed, 28 insertions(+) diff --git a/salt/grains/core.py b/salt/grains/core.py -index 80eebd1c05..e41ab4e0ae 100644 +index 29e8371c2b..d688b6c757 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py -@@ -1355,6 +1355,34 @@ def _parse_os_release(os_release_files): - return data +@@ -1571,6 +1571,34 @@ def _parse_cpe_name(cpe): + return ret +def _parse_cpe_name(cpe): @@ -68,127 +67,7 @@ index 80eebd1c05..e41ab4e0ae 100644 def os_data(): ''' Return grains pertaining to the operating system -@@ -1554,13 +1582,20 @@ def os_data(): - codename = codename_match.group(1) - grains['lsb_distrib_codename'] = codename - if 'CPE_NAME' in os_release: -- if ":suse:" in os_release['CPE_NAME'] or ":opensuse:" in os_release['CPE_NAME']: -+ cpe = _parse_cpe_name(os_release['CPE_NAME']) -+ if not cpe: -+ log.error('Broken CPE_NAME format in /etc/os-release!') -+ elif cpe.get('vendor', '').lower() in ['suse', 'opensuse']: - grains['os'] = "SUSE" - # openSUSE `osfullname` grain normalization - if os_release.get("NAME") == "openSUSE Leap": - grains['osfullname'] = "Leap" - elif os_release.get("VERSION") == "Tumbleweed": - grains['osfullname'] = os_release["VERSION"] -+ # Override VERSION_ID, if CPE_NAME around -+ if cpe.get('version') and cpe.get('vendor') == 'opensuse': # Keep VERSION_ID for SLES -+ grains['lsb_distrib_release'] = cpe['version'] -+ - elif os.path.isfile('/etc/SuSE-release'): - grains['lsb_distrib_id'] = 'SUSE' - version = '' -@@ -1666,8 +1701,7 @@ def os_data(): - # Commit introducing this comment should be reverted after the upstream bug is released. - if 'CentOS Linux 7' in grains.get('lsb_distrib_codename', ''): - grains.pop('lsb_distrib_release', None) -- grains['osrelease'] = \ -- grains.get('lsb_distrib_release', osrelease).strip() -+ grains['osrelease'] = grains.get('lsb_distrib_release', osrelease).strip() - grains['oscodename'] = grains.get('lsb_distrib_codename', '').strip() or oscodename - if 'Red Hat' in grains['oscodename']: - grains['oscodename'] = oscodename -@@ -1702,8 +1736,7 @@ def os_data(): - r'((?:Open|Oracle )?Solaris|OpenIndiana|OmniOS) (Development)?' - r'\s*(\d+\.?\d*|v\d+)\s?[A-Z]*\s?(r\d+|\d+\/\d+|oi_\S+|snv_\S+)?' - ) -- osname, development, osmajorrelease, osminorrelease = \ -- release_re.search(rel_data).groups() -+ osname, development, osmajorrelease, osminorrelease = release_re.search(rel_data).groups() - except AttributeError: - # Set a blank osrelease grain and fallback to 'Solaris' - # as the 'os' grain. -diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py -index e973428add..2ab32ef41b 100644 ---- a/tests/unit/grains/test_core.py -+++ b/tests/unit/grains/test_core.py -@@ -62,10 +62,11 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin): - def test_parse_etc_os_release(self, path_isfile_mock): - path_isfile_mock.side_effect = lambda x: x == "/usr/lib/os-release" - with salt.utils.files.fopen(os.path.join(OS_RELEASE_DIR, "ubuntu-17.10")) as os_release_file: -- os_release_content = os_release_file.readlines() -- with patch("salt.utils.files.fopen", mock_open()) as os_release_file: -- os_release_file.return_value.__iter__.return_value = os_release_content -- os_release = core._parse_os_release(["/etc/os-release", "/usr/lib/os-release"]) -+ os_release_content = os_release_file.read() -+ with patch("salt.utils.files.fopen", mock_open(read_data=os_release_content)): -+ os_release = core._parse_os_release( -+ '/etc/os-release', -+ '/usr/lib/os-release') - self.assertEqual(os_release, { - "NAME": "Ubuntu", - "VERSION": "17.10 (Artful Aardvark)", -@@ -81,10 +82,53 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin): - "UBUNTU_CODENAME": "artful", - }) - -- @patch("os.path.isfile") -- def test_missing_os_release(self, path_isfile_mock): -- path_isfile_mock.return_value = False -- os_release = core._parse_os_release(["/etc/os-release", "/usr/lib/os-release"]) -+ def test_parse_cpe_name_wfn(self): -+ ''' -+ Parse correct CPE_NAME data WFN formatted -+ :return: -+ ''' -+ for cpe, cpe_ret in [('cpe:/o:opensuse:leap:15.0', -+ {'phase': None, 'version': '15.0', 'product': 'leap', -+ 'vendor': 'opensuse', 'part': 'operating system'}), -+ ('cpe:/o:vendor:product:42:beta', -+ {'phase': 'beta', 'version': '42', 'product': 'product', -+ 'vendor': 'vendor', 'part': 'operating system'})]: -+ ret = core._parse_cpe_name(cpe) -+ for key in cpe_ret: -+ assert key in ret -+ assert cpe_ret[key] == ret[key] -+ -+ def test_parse_cpe_name_v23(self): -+ ''' -+ Parse correct CPE_NAME data v2.3 formatted -+ :return: -+ ''' -+ for cpe, cpe_ret in [('cpe:2.3:o:microsoft:windows_xp:5.1.601:beta:*:*:*:*:*:*', -+ {'phase': 'beta', 'version': '5.1.601', 'product': 'windows_xp', -+ 'vendor': 'microsoft', 'part': 'operating system'}), -+ ('cpe:2.3:h:corellian:millenium_falcon:1.0:*:*:*:*:*:*:*', -+ {'phase': None, 'version': '1.0', 'product': 'millenium_falcon', -+ 'vendor': 'corellian', 'part': 'hardware'}), -+ ('cpe:2.3:*:dark_empire:light_saber:3.0:beta:*:*:*:*:*:*', -+ {'phase': 'beta', 'version': '3.0', 'product': 'light_saber', -+ 'vendor': 'dark_empire', 'part': None})]: -+ ret = core._parse_cpe_name(cpe) -+ for key in cpe_ret: -+ assert key in ret -+ assert cpe_ret[key] == ret[key] -+ -+ def test_parse_cpe_name_broken(self): -+ ''' -+ Parse broken CPE_NAME data -+ :return: -+ ''' -+ for cpe in ['cpe:broken', 'cpe:broken:in:all:ways:*:*:*:*', -+ 'cpe:x:still:broken:123', 'who:/knows:what:is:here']: -+ assert core._parse_cpe_name(cpe) == {} -+ -+ def test_missing_os_release(self): -+ with patch('salt.utils.files.fopen', mock_open(read_data={})): -+ os_release = core._parse_os_release('/etc/os-release', '/usr/lib/os-release') - self.assertEqual(os_release, {}) - - @skipIf(not salt.utils.platform.is_linux(), 'System is not Linux') -- -2.19.0 +2.17.1 diff --git a/add-engine-relaying-libvirt-events.patch b/add-engine-relaying-libvirt-events.patch deleted file mode 100644 index 0da11aa..0000000 --- a/add-engine-relaying-libvirt-events.patch +++ /dev/null @@ -1,894 +0,0 @@ -From 5c41a5b8c9925bf788946e334cb3912ca9b09190 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?C=C3=A9dric=20Bosdonnat?= -Date: Fri, 9 Mar 2018 15:46:12 +0100 -Subject: [PATCH] Add engine relaying libvirt events - -Libvirt API offers clients to register callbacks for various events. -libvirt_events engine will listen on a libvirt URI (local or remote) -for events and send them to the salt event bus. - -Special thanks to @isbm for the code cleanup help ---- - salt/engines/libvirt_events.py | 702 ++++++++++++++++++++++ - tests/unit/engines/test_libvirt_events.py | 159 +++++ - 2 files changed, 861 insertions(+) - create mode 100644 salt/engines/libvirt_events.py - create mode 100644 tests/unit/engines/test_libvirt_events.py - -diff --git a/salt/engines/libvirt_events.py b/salt/engines/libvirt_events.py -new file mode 100644 -index 0000000000..a1c9d09067 ---- /dev/null -+++ b/salt/engines/libvirt_events.py -@@ -0,0 +1,702 @@ -+# -*- coding: utf-8 -*- -+ -+''' -+An engine that listens for libvirt events and resends them to the salt event bus. -+ -+The minimal configuration is the following and will listen to all events on the -+local hypervisor and send them with a tag starting with ``salt/engines/libvirt_events``: -+ -+.. code-block:: yaml -+ -+ engines: -+ - libvirt_events -+ -+Note that the automatically-picked libvirt connection will depend on the value -+of ``uri_default`` in ``/etc/libvirt/libvirt.conf``. To force using another -+connection like the local LXC libvirt driver, set the ``uri`` property as in the -+following example configuration. -+ -+.. code-block:: yaml -+ -+ engines: -+ - libvirt_events: -+ uri: lxc:/// -+ tag_prefix: libvirt -+ filters: -+ - domain/lifecycle -+ - domain/reboot -+ - pool -+ -+Filters is a list of event types to relay to the event bus. Items in this list -+can be either one of the main types (``domain``, ``network``, ``pool``, -+``nodedev``, ``secret``), ``all`` or a more precise filter. These can be done -+with values like /. The possible values are in the -+CALLBACK_DEFS constant. If the filters list contains ``all``, all -+events will be relayed. -+ -+Be aware that the list of events increases with libvirt versions, for example -+network events have been added in libvirt 1.2.1. -+ -+Running the engine on non-root -+------------------------------ -+ -+Running this engine as non-root requires a special attention, which is surely -+the case for the master running as user `salt`. The engine is likely to fail -+to connect to libvirt with an error like this one: -+ -+ [ERROR ] authentication unavailable: no polkit agent available to authenticate action 'org.libvirt.unix.monitor' -+ -+ -+To fix this, the user running the engine, for example the salt-master, needs -+to have the rights to connect to libvirt in the machine polkit config. -+A polkit rule like the following one will allow `salt` user to connect to libvirt: -+ -+.. code-block:: javascript -+ -+ polkit.addRule(function(action, subject) { -+ if (action.id.indexOf("org.libvirt") == 0 && -+ subject.user == "salt") { -+ return polkit.Result.YES; -+ } -+ }); -+ -+:depends: libvirt 1.0.0+ python binding -+ -+.. versionadded:: Fluorine -+''' -+ -+from __future__ import absolute_import, unicode_literals, print_function -+import logging -+ -+# Import salt libs -+import salt.utils.event -+ -+# pylint: disable=no-name-in-module,import-error -+from salt.ext.six.moves.urllib.parse import urlparse -+# pylint: enable=no-name-in-module,import-error -+ -+log = logging.getLogger(__name__) -+ -+ -+try: -+ import libvirt -+except ImportError: -+ libvirt = None # pylint: disable=invalid-name -+ -+ -+def __virtual__(): -+ ''' -+ Only load if libvirt python binding is present -+ ''' -+ if libvirt is None: -+ msg = 'libvirt module not found' -+ elif libvirt.getVersion() < 1000000: -+ msg = 'libvirt >= 1.0.0 required' -+ else: -+ msg = '' -+ return not bool(msg), msg -+ -+ -+REGISTER_FUNCTIONS = { -+ 'domain': 'domainEventRegisterAny', -+ 'network': 'networkEventRegisterAny', -+ 'pool': 'storagePoolEventRegisterAny', -+ 'nodedev': 'nodeDeviceEventRegisterAny', -+ 'secret': 'secretEventRegisterAny' -+} -+ -+# Handle either BLOCK_JOB or BLOCK_JOB_2, but prefer the latter -+if hasattr(libvirt, 'VIR_DOMAIN_EVENT_ID_BLOCK_JOB_2'): -+ BLOCK_JOB_ID = 'VIR_DOMAIN_EVENT_ID_BLOCK_JOB_2' -+else: -+ BLOCK_JOB_ID = 'VIR_DOMAIN_EVENT_ID_BLOCK_JOB' -+ -+CALLBACK_DEFS = { -+ 'domain': (('lifecycle', None), -+ ('reboot', None), -+ ('rtc_change', None), -+ ('watchdog', None), -+ ('graphics', None), -+ ('io_error', 'VIR_DOMAIN_EVENT_ID_IO_ERROR_REASON'), -+ ('control_error', None), -+ ('disk_change', None), -+ ('tray_change', None), -+ ('pmwakeup', None), -+ ('pmsuspend', None), -+ ('balloon_change', None), -+ ('pmsuspend_disk', None), -+ ('device_removed', None), -+ ('block_job', BLOCK_JOB_ID), -+ ('tunable', None), -+ ('agent_lifecycle', None), -+ ('device_added', None), -+ ('migration_iteration', None), -+ ('job_completed', None), -+ ('device_removal_failed', None), -+ ('metadata_change', None), -+ ('block_threshold', None)), -+ 'network': (('lifecycle', None),), -+ 'pool': (('lifecycle', None), -+ ('refresh', None)), -+ 'nodedev': (('lifecycle', None), -+ ('update', None)), -+ 'secret': (('lifecycle', None), -+ ('value_changed', None)) -+} -+ -+ -+def _compute_subprefix(attr): -+ ''' -+ Get the part before the first '_' or the end of attr including -+ the potential '_' -+ ''' -+ return ''.join((attr.split('_')[0], '_' if len(attr.split('_')) > 1 else '')) -+ -+ -+def _get_libvirt_enum_string(prefix, value): -+ ''' -+ Convert the libvirt enum integer value into a human readable string. -+ -+ :param prefix: start of the libvirt attribute to look for. -+ :param value: integer to convert to string -+ ''' -+ attributes = [attr[len(prefix):] for attr in libvirt.__dict__ if attr.startswith(prefix)] -+ -+ # Filter out the values starting with a common base as they match another enum -+ prefixes = [_compute_subprefix(p) for p in attributes] -+ counts = {p: prefixes.count(p) for p in prefixes} -+ sub_prefixes = [p for p, count in counts.items() if count > 1] -+ filtered = [attr for attr in attributes if _compute_subprefix(attr) not in sub_prefixes] -+ -+ for candidate in filtered: -+ if value == getattr(libvirt, ''.join((prefix, candidate))): -+ name = candidate.lower().replace('_', ' ') -+ return name -+ return 'unknown' -+ -+ -+def _get_domain_event_detail(event, detail): -+ ''' -+ Convert event and detail numeric values into a tuple of human readable strings -+ ''' -+ event_name = _get_libvirt_enum_string('VIR_DOMAIN_EVENT_', event) -+ if event_name == 'unknown': -+ return event_name, 'unknown' -+ -+ prefix = 'VIR_DOMAIN_EVENT_{0}_'.format(event_name.upper()) -+ detail_name = _get_libvirt_enum_string(prefix, detail) -+ -+ return event_name, detail_name -+ -+ -+def _salt_send_event(opaque, conn, data): -+ ''' -+ Convenience function adding common data to the event and sending it -+ on the salt event bus. -+ -+ :param opaque: the opaque data that is passed to the callback. -+ This is a dict with 'prefix', 'object' and 'event' keys. -+ :param conn: libvirt connection -+ :param data: additional event data dict to send -+ ''' -+ tag_prefix = opaque['prefix'] -+ object_type = opaque['object'] -+ event_type = opaque['event'] -+ -+ # Prepare the connection URI to fit in the tag -+ # qemu+ssh://user@host:1234/system -> qemu+ssh/user@host:1234/system -+ uri = urlparse(conn.getURI()) -+ uri_tag = [uri.scheme] -+ if uri.netloc: -+ uri_tag.append(uri.netloc) -+ path = uri.path.strip('/') -+ if path: -+ uri_tag.append(path) -+ uri_str = "/".join(uri_tag) -+ -+ # Append some common data -+ all_data = { -+ 'uri': conn.getURI() -+ } -+ all_data.update(data) -+ -+ tag = '/'.join((tag_prefix, uri_str, object_type, event_type)) -+ -+ # Actually send the event in salt -+ if __opts__.get('__role') == 'master': -+ salt.utils.event.get_master_event( -+ __opts__, -+ __opts__['sock_dir']).fire_event(all_data, tag) -+ else: -+ __salt__['event.send'](tag, all_data) -+ -+ -+def _salt_send_domain_event(opaque, conn, domain, event, event_data): -+ ''' -+ Helper function send a salt event for a libvirt domain. -+ -+ :param opaque: the opaque data that is passed to the callback. -+ This is a dict with 'prefix', 'object' and 'event' keys. -+ :param conn: libvirt connection -+ :param domain: name of the domain related to the event -+ :param event: name of the event -+ :param event_data: additional event data dict to send -+ ''' -+ data = { -+ 'domain': { -+ 'name': domain.name(), -+ 'id': domain.ID(), -+ 'uuid': domain.UUIDString() -+ }, -+ 'event': event -+ } -+ data.update(event_data) -+ _salt_send_event(opaque, conn, data) -+ -+ -+def _domain_event_lifecycle_cb(conn, domain, event, detail, opaque): -+ ''' -+ Domain lifecycle events handler -+ ''' -+ event_str, detail_str = _get_domain_event_detail(event, detail) -+ -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'event': event_str, -+ 'detail': detail_str -+ }) -+ -+ -+def _domain_event_reboot_cb(conn, domain, opaque): -+ ''' -+ Domain reboot events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {}) -+ -+ -+def _domain_event_rtc_change_cb(conn, domain, utcoffset, opaque): -+ ''' -+ Domain RTC change events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'utcoffset': utcoffset -+ }) -+ -+ -+def _domain_event_watchdog_cb(conn, domain, action, opaque): -+ ''' -+ Domain watchdog events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'action': _get_libvirt_enum_string('VIR_DOMAIN_EVENT_WATCHDOG_', action) -+ }) -+ -+ -+def _domain_event_io_error_cb(conn, domain, srcpath, devalias, action, reason, opaque): -+ ''' -+ Domain I/O Error events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'srcPath': srcpath, -+ 'dev': devalias, -+ 'action': _get_libvirt_enum_string('VIR_DOMAIN_EVENT_IO_ERROR_', action), -+ 'reason': reason -+ }) -+ -+ -+def _domain_event_graphics_cb(conn, domain, phase, local, remote, auth, subject, opaque): -+ ''' -+ Domain graphics events handler -+ ''' -+ prefix = 'VIR_DOMAIN_EVENT_GRAPHICS_' -+ -+ def get_address(addr): -+ ''' -+ transform address structure into event data piece -+ ''' -+ data = {'family': _get_libvirt_enum_string('{0}_ADDRESS_'.format(prefix), addr['family']), -+ 'node': addr['node'], -+ 'service': addr['service']} -+ return addr -+ -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'phase': _get_libvirt_enum_string(prefix, phase), -+ 'local': get_address(local), -+ 'remote': get_address(remote), -+ 'authScheme': auth, -+ 'subject': [{'type': item[0], 'name': item[1]} for item in subject] -+ }) -+ -+ -+def _domain_event_control_error_cb(conn, domain, opaque): -+ ''' -+ Domain control error events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], {}) -+ -+ -+def _domain_event_disk_change_cb(conn, domain, old_src, new_src, dev, reason, opaque): -+ ''' -+ Domain disk change events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'oldSrcPath': old_src, -+ 'newSrcPath': new_src, -+ 'dev': dev, -+ 'reason': _get_libvirt_enum_string('VIR_DOMAIN_EVENT_DISK_', reason) -+ }) -+ -+ -+def _domain_event_tray_change_cb(conn, domain, dev, reason, opaque): -+ ''' -+ Domain tray change events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'dev': dev, -+ 'reason': _get_libvirt_enum_string('VIR_DOMAIN_EVENT_TRAY_CHANGE_', reason) -+ }) -+ -+ -+def _domain_event_pmwakeup_cb(conn, domain, reason, opaque): -+ ''' -+ Domain wakeup events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'reason': 'unknown' # currently unused -+ }) -+ -+ -+def _domain_event_pmsuspend_cb(conn, domain, reason, opaque): -+ ''' -+ Domain suspend events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'reason': 'unknown' # currently unused -+ }) -+ -+ -+def _domain_event_balloon_change_cb(conn, domain, actual, opaque): -+ ''' -+ Domain balloon change events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'actual': actual -+ }) -+ -+ -+def _domain_event_pmsuspend_disk_cb(conn, domain, reason, opaque): -+ ''' -+ Domain disk suspend events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'reason': 'unknown' # currently unused -+ }) -+ -+ -+def _domain_event_block_job_cb(conn, domain, disk, job_type, status, opaque): -+ ''' -+ Domain block job events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'disk': disk, -+ 'type': _get_libvirt_enum_string('VIR_DOMAIN_BLOCK_JOB_TYPE_', job_type), -+ 'status': _get_libvirt_enum_string('VIR_DOMAIN_BLOCK_JOB_', status) -+ }) -+ -+ -+def _domain_event_device_removed_cb(conn, domain, dev, opaque): -+ ''' -+ Domain device removal events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'dev': dev -+ }) -+ -+ -+def _domain_event_tunable_cb(conn, domain, params, opaque): -+ ''' -+ Domain tunable events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'params': params -+ }) -+ -+ -+# pylint: disable=invalid-name -+def _domain_event_agent_lifecycle_cb(conn, domain, state, reason, opaque): -+ ''' -+ Domain agent lifecycle events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'state': _get_libvirt_enum_string('VIR_CONNECT_DOMAIN_EVENT_AGENT_LIFECYCLE_STATE_', state), -+ 'reason': _get_libvirt_enum_string('VIR_CONNECT_DOMAIN_EVENT_AGENT_LIFECYCLE_REASON_', reason) -+ }) -+ -+ -+def _domain_event_device_added_cb(conn, domain, dev, opaque): -+ ''' -+ Domain device addition events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'dev': dev -+ }) -+ -+ -+# pylint: disable=invalid-name -+def _domain_event_migration_iteration_cb(conn, domain, iteration, opaque): -+ ''' -+ Domain migration iteration events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'iteration': iteration -+ }) -+ -+ -+def _domain_event_job_completed_cb(conn, domain, params, opaque): -+ ''' -+ Domain job completion events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'params': params -+ }) -+ -+ -+def _domain_event_device_removal_failed_cb(conn, domain, dev, opaque): -+ ''' -+ Domain device removal failure events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'dev': dev -+ }) -+ -+ -+def _domain_event_metadata_change_cb(conn, domain, mtype, nsuri, opaque): -+ ''' -+ Domain metadata change events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'type': _get_libvirt_enum_string('VIR_DOMAIN_METADATA_', mtype), -+ 'nsuri': nsuri -+ }) -+ -+ -+def _domain_event_block_threshold_cb(conn, domain, dev, path, threshold, excess, opaque): -+ ''' -+ Domain block threshold events handler -+ ''' -+ _salt_send_domain_event(opaque, conn, domain, opaque['event'], { -+ 'dev': dev, -+ 'path': path, -+ 'threshold': threshold, -+ 'excess': excess -+ }) -+ -+ -+def _network_event_lifecycle_cb(conn, net, event, detail, opaque): -+ ''' -+ Network lifecycle events handler -+ ''' -+ -+ _salt_send_event(opaque, conn, { -+ 'network': { -+ 'name': net.name(), -+ 'uuid': net.UUIDString() -+ }, -+ 'event': _get_libvirt_enum_string('VIR_NETWORK_EVENT_', event), -+ 'detail': 'unknown' # currently unused -+ }) -+ -+ -+def _pool_event_lifecycle_cb(conn, pool, event, detail, opaque): -+ ''' -+ Storage pool lifecycle events handler -+ ''' -+ _salt_send_event(opaque, conn, { -+ 'pool': { -+ 'name': pool.name(), -+ 'uuid': pool.UUIDString() -+ }, -+ 'event': _get_libvirt_enum_string('VIR_STORAGE_POOL_EVENT_', event), -+ 'detail': 'unknown' # currently unused -+ }) -+ -+ -+def _pool_event_refresh_cb(conn, pool, opaque): -+ ''' -+ Storage pool refresh events handler -+ ''' -+ _salt_send_event(opaque, conn, { -+ 'pool': { -+ 'name': pool.name(), -+ 'uuid': pool.UUIDString() -+ }, -+ 'event': opaque['event'] -+ }) -+ -+ -+def _nodedev_event_lifecycle_cb(conn, dev, event, detail, opaque): -+ ''' -+ Node device lifecycle events handler -+ ''' -+ _salt_send_event(opaque, conn, { -+ 'nodedev': { -+ 'name': dev.name() -+ }, -+ 'event': _get_libvirt_enum_string('VIR_NODE_DEVICE_EVENT_', event), -+ 'detail': 'unknown' # currently unused -+ }) -+ -+ -+def _nodedev_event_update_cb(conn, dev, opaque): -+ ''' -+ Node device update events handler -+ ''' -+ _salt_send_event(opaque, conn, { -+ 'nodedev': { -+ 'name': dev.name() -+ }, -+ 'event': opaque['event'] -+ }) -+ -+ -+def _secret_event_lifecycle_cb(conn, secret, event, detail, opaque): -+ ''' -+ Secret lifecycle events handler -+ ''' -+ _salt_send_event(opaque, conn, { -+ 'secret': { -+ 'uuid': secret.UUIDString() -+ }, -+ 'event': _get_libvirt_enum_string('VIR_SECRET_EVENT_', event), -+ 'detail': 'unknown' # currently unused -+ }) -+ -+ -+def _secret_event_value_changed_cb(conn, secret, opaque): -+ ''' -+ Secret value change events handler -+ ''' -+ _salt_send_event(opaque, conn, { -+ 'secret': { -+ 'uuid': secret.UUIDString() -+ }, -+ 'event': opaque['event'] -+ }) -+ -+ -+def _cleanup(cnx): -+ ''' -+ Close the libvirt connection -+ -+ :param cnx: libvirt connection -+ ''' -+ log.debug('Closing libvirt connection: %s', cnx.getURI()) -+ cnx.close() -+ -+ -+def _callbacks_cleanup(cnx, callback_ids): -+ ''' -+ Unregister all the registered callbacks -+ -+ :param cnx: libvirt connection -+ :param callback_ids: dictionary mapping a libvirt object type to an ID list -+ of callbacks to deregister -+ ''' -+ for obj, ids in callback_ids.items(): -+ register_name = REGISTER_FUNCTIONS[obj] -+ deregister_name = register_name.replace('Reg', 'Dereg') -+ deregister = getattr(cnx, deregister_name) -+ for callback_id in ids: -+ deregister(callback_id) -+ -+ -+def _register_callback(cnx, tag_prefix, obj, event, real_id): -+ ''' -+ Helper function registering a callback -+ -+ :param cnx: libvirt connection -+ :param tag_prefix: salt event tag prefix to use -+ :param obj: the libvirt object name for the event. Needs to -+ be one of the REGISTER_FUNCTIONS keys. -+ :param event: the event type name. -+ :param real_id: the libvirt name of an alternative event id to use or None -+ -+ :rtype integer value needed to deregister the callback -+ ''' -+ libvirt_name = real_id -+ if real_id is None: -+ libvirt_name = 'VIR_{0}_EVENT_ID_{1}'.format(obj, event).upper() -+ -+ if not hasattr(libvirt, libvirt_name): -+ log.warning('Skipping "%s/%s" events: libvirt too old', obj, event) -+ return None -+ -+ libvirt_id = getattr(libvirt, libvirt_name) -+ callback_name = "_{0}_event_{1}_cb".format(obj, event) -+ callback = globals().get(callback_name, None) -+ if callback is None: -+ log.error('Missing function %s in engine', callback_name) -+ return None -+ -+ register = getattr(cnx, REGISTER_FUNCTIONS[obj]) -+ return register(None, libvirt_id, callback, -+ {'prefix': tag_prefix, -+ 'object': obj, -+ 'event': event}) -+ -+ -+def _append_callback_id(ids, obj, callback_id): -+ ''' -+ Helper function adding a callback ID to the IDs dict. -+ The callback ids dict maps an object to event callback ids. -+ -+ :param ids: dict of callback IDs to update -+ :param obj: one of the keys of REGISTER_FUNCTIONS -+ :param callback_id: the result of _register_callback -+ ''' -+ if obj not in ids: -+ ids[obj] = [] -+ ids[obj].append(callback_id) -+ -+ -+def start(uri=None, -+ tag_prefix='salt/engines/libvirt_events', -+ filters=None): -+ ''' -+ Listen to libvirt events and forward them to salt. -+ -+ :param uri: libvirt URI to listen on. -+ Defaults to None to pick the first available local hypervisor -+ :param tag_prefix: the begining of the salt event tag to use. -+ Defaults to 'salt/engines/libvirt_events' -+ :param filters: the list of event of listen on. Defaults to 'all' -+ ''' -+ if filters is None: -+ filters = ['all'] -+ try: -+ libvirt.virEventRegisterDefaultImpl() -+ -+ cnx = libvirt.openReadOnly(uri) -+ log.debug('Opened libvirt uri: %s', cnx.getURI()) -+ -+ callback_ids = {} -+ all_filters = "all" in filters -+ -+ for obj, event_defs in CALLBACK_DEFS.items(): -+ for event, real_id in event_defs: -+ event_filter = "/".join((obj, event)) -+ if event_filter not in filters and obj not in filters and not all_filters: -+ continue -+ registered_id = _register_callback(cnx, tag_prefix, -+ obj, event, real_id) -+ if registered_id: -+ _append_callback_id(callback_ids, obj, registered_id) -+ -+ exit_loop = False -+ while not exit_loop: -+ exit_loop = libvirt.virEventRunDefaultImpl() < 0 -+ -+ except Exception as err: # pylint: disable=broad-except -+ log.exception(err) -+ finally: -+ _callbacks_cleanup(cnx, callback_ids) -+ _cleanup(cnx) -diff --git a/tests/unit/engines/test_libvirt_events.py b/tests/unit/engines/test_libvirt_events.py -new file mode 100644 -index 0000000000..6608aaf648 ---- /dev/null -+++ b/tests/unit/engines/test_libvirt_events.py -@@ -0,0 +1,159 @@ -+# -*- coding: utf-8 -*- -+''' -+unit tests for the libvirt_events engine -+''' -+# Import Python libs -+from __future__ import absolute_import, print_function, unicode_literals -+ -+# Import Salt Testing Libs -+from tests.support.mixins import LoaderModuleMockMixin -+from tests.support.unit import skipIf, TestCase -+from tests.support.mock import ( -+ NO_MOCK, -+ NO_MOCK_REASON, -+ MagicMock, -+ patch) -+ -+# Import Salt Libs -+import salt.engines.libvirt_events as libvirt_events -+ -+ -+# pylint: disable=protected-access,attribute-defined-outside-init,invalid-name,unused-argument,no-self-use -+ -+ -+@skipIf(NO_MOCK, NO_MOCK_REASON) -+class EngineLibvirtEventTestCase(TestCase, LoaderModuleMockMixin): -+ ''' -+ Test cases for salt.engine.libvirt_events -+ ''' -+ -+ def setup_loader_modules(self): -+ patcher = patch('salt.engines.libvirt_events.libvirt') -+ self.mock_libvirt = patcher.start() -+ self.mock_libvirt.getVersion.return_value = 2000000 -+ self.mock_libvirt.virEventRunDefaultImpl.return_value = -1 # Don't loop for ever -+ self.mock_libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE = 0 -+ self.mock_libvirt.VIR_DOMAIN_EVENT_ID_REBOOT = 1 -+ self.addCleanup(patcher.stop) -+ self.addCleanup(delattr, self, 'mock_libvirt') -+ return {libvirt_events: {}} -+ -+ @patch('salt.engines.libvirt_events.libvirt', -+ VIR_PREFIX_NONE=0, -+ VIR_PREFIX_ONE=1, -+ VIR_PREFIX_TWO=2, -+ VIR_PREFIX_SUB_FOO=0, -+ VIR_PREFIX_SUB_BAR=1, -+ VIR_PREFIX_SUB_FOOBAR=2) -+ def test_get_libvirt_enum_string_subprefix(self, libvirt_mock): -+ ''' -+ Make sure the libvirt enum value to string works reliably with -+ elements with a sub prefix, eg VIR_PREFIX_SUB_* in this case. -+ ''' -+ # Test case with a sub prefix -+ -+ assert libvirt_events._get_libvirt_enum_string('VIR_PREFIX_', 2) == 'two' -+ -+ @patch('salt.engines.libvirt_events.libvirt', -+ VIR_PREFIX_FOO=0, -+ VIR_PREFIX_FOO_BAR=1, -+ VIR_PREFIX_BAR_FOO=2) -+ def test_get_libvirt_enum_string_underscores(self, libvirt_mock): -+ ''' -+ Make sure the libvirt enum value to string works reliably and items -+ with an underscore aren't confused with sub prefixes. -+ ''' -+ assert libvirt_events._get_libvirt_enum_string('VIR_PREFIX_', 1) == 'foo bar' -+ -+ @patch('salt.engines.libvirt_events.libvirt', -+ VIR_DOMAIN_EVENT_DEFINED=0, -+ VIR_DOMAIN_EVENT_UNDEFINED=1, -+ VIR_DOMAIN_EVENT_DEFINED_ADDED=0, -+ VIR_DOMAIN_EVENT_DEFINED_UPDATED=1) -+ def test_get_domain_event_detail(self, mock_libvirt): -+ ''' -+ Test get_domain_event_detail function -+ ''' -+ assert libvirt_events._get_domain_event_detail(1, 2) == ('undefined', 'unknown') -+ assert libvirt_events._get_domain_event_detail(0, 1) == ('defined', 'updated') -+ assert libvirt_events._get_domain_event_detail(4, 2) == ('unknown', 'unknown') -+ -+ @patch('salt.engines.libvirt_events.libvirt', VIR_NETWORK_EVENT_ID_LIFECYCLE=1000) -+ def test_event_register(self, mock_libvirt): -+ ''' -+ Test that the libvirt_events engine actually registers events catch them and cleans -+ before leaving the place. -+ ''' -+ mock_cnx = MagicMock() -+ mock_libvirt.openReadOnly.return_value = mock_cnx -+ -+ mock_cnx.networkEventRegisterAny.return_value = 10000 -+ -+ libvirt_events.start('test:///', 'test/prefix') -+ -+ # Check that the connection has been opened -+ mock_libvirt.openReadOnly.assert_called_once_with('test:///') -+ -+ # Check that the connection has been closed -+ mock_cnx.close.assert_called_once() -+ -+ # Check events registration and deregistration -+ mock_cnx.domainEventRegisterAny.assert_any_call( -+ None, mock_libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE, -+ libvirt_events._domain_event_lifecycle_cb, -+ {'prefix': 'test/prefix', 'object': 'domain', 'event': 'lifecycle'}) -+ mock_cnx.networkEventRegisterAny.assert_any_call( -+ None, mock_libvirt.VIR_NETWORK_EVENT_ID_LIFECYCLE, -+ libvirt_events._network_event_lifecycle_cb, -+ {'prefix': 'test/prefix', 'object': 'network', 'event': 'lifecycle'}) -+ -+ # Check that the deregister events are called with the result of register -+ mock_cnx.networkEventDeregisterAny.assert_called_with( -+ mock_cnx.networkEventRegisterAny.return_value) -+ -+ # Check that the default 'all' filter actually worked -+ counts = {obj: len(callback_def) for obj, callback_def in libvirt_events.CALLBACK_DEFS.items()} -+ for obj, count in counts.items(): -+ register = libvirt_events.REGISTER_FUNCTIONS[obj] -+ assert getattr(mock_cnx, register).call_count == count -+ -+ def test_event_skipped(self): -+ ''' -+ Test that events are skipped if their ID isn't defined in the libvirt -+ module (older libvirt) -+ ''' -+ self.mock_libvirt.mock_add_spec([ -+ 'openReadOnly', -+ 'virEventRegisterDefaultImpl', -+ 'virEventRunDefaultImpl', -+ 'VIR_DOMAIN_EVENT_ID_LIFECYCLE'], spec_set=True) -+ -+ libvirt_events.start('test:///', 'test/prefix') -+ -+ # Check events registration and deregistration -+ mock_cnx = self.mock_libvirt.openReadOnly.return_value -+ -+ mock_cnx.domainEventRegisterAny.assert_any_call( -+ None, self.mock_libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE, -+ libvirt_events._domain_event_lifecycle_cb, -+ {'prefix': 'test/prefix', 'object': 'domain', 'event': 'lifecycle'}) -+ -+ # Network events should have been skipped -+ mock_cnx.networkEventRegisterAny.assert_not_called() -+ -+ def test_event_filtered(self): -+ ''' -+ Test that events are skipped if their ID isn't defined in the libvirt -+ module (older libvirt) -+ ''' -+ libvirt_events.start('test', 'test/prefix', 'domain/lifecycle') -+ -+ # Check events registration and deregistration -+ mock_cnx = self.mock_libvirt.openReadOnly.return_value -+ -+ mock_cnx.domainEventRegisterAny.assert_any_call( -+ None, 0, libvirt_events._domain_event_lifecycle_cb, -+ {'prefix': 'test/prefix', 'object': 'domain', 'event': 'lifecycle'}) -+ -+ # Network events should have been filtered out -+ mock_cnx.networkEventRegisterAny.assert_not_called() --- -2.17.1 - - diff --git a/add-environment-variable-to-know-if-yum-is-invoked-f.patch b/add-environment-variable-to-know-if-yum-is-invoked-f.patch index 4cdb448..1a820fe 100644 --- a/add-environment-variable-to-know-if-yum-is-invoked-f.patch +++ b/add-environment-variable-to-know-if-yum-is-invoked-f.patch @@ -1,28 +1,18 @@ -From 39d9d9fb26f9aff83fce4ce67d5b2a6bd4f60b95 Mon Sep 17 00:00:00 2001 +From d9d459f62d53acddd67313d9d66e1fe8caf4fd45 Mon Sep 17 00:00:00 2001 From: Marcelo Chiaradia Date: Thu, 7 Jun 2018 10:29:41 +0200 Subject: [PATCH] Add environment variable to know if yum is invoked from Salt(bsc#1057635) --- - salt/modules/yumpkg.py | 59 +++++++++++++++++++++++++++++++++----------------- - 1 file changed, 39 insertions(+), 20 deletions(-) + salt/modules/yumpkg.py | 18 ++++++++++++------ + 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py -index 9ce4926790..51832bf883 100644 +index c250b94f0e..a56a2e8366 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py -@@ -452,7 +452,8 @@ def latest_version(*names, **kwargs): - out = __salt__['cmd.run_all'](cmd, - output_loglevel='trace', - ignore_retcode=True, -- python_shell=False) -+ python_shell=False, -+ env={"SALT_RUNNING": '1'}) - if out['retcode'] != 0: - if out['stderr']: - # Check first if this is just a matter of the packages being -@@ -850,7 +851,8 @@ def list_repo_pkgs(*args, **kwargs): +@@ -887,7 +887,8 @@ def list_repo_pkgs(*args, **kwargs): yum_version = None if _yum() != 'yum' else _LooseVersion( __salt__['cmd.run']( ['yum', '--version'], @@ -32,133 +22,7 @@ index 9ce4926790..51832bf883 100644 ).splitlines()[0].strip() ) # Really old version of yum; does not even have --showduplicates option -@@ -865,7 +867,8 @@ def list_repo_pkgs(*args, **kwargs): - cmd_prefix + [pkg_src], - output_loglevel='trace', - ignore_retcode=True, -- python_shell=False -+ python_shell=False, -+ env={"SALT_RUNNING": '1'} - ) - if out['retcode'] == 0: - _parse_output(out['stdout'], strict=True) -@@ -882,7 +885,8 @@ def list_repo_pkgs(*args, **kwargs): - cmd_prefix + [pkg_src], - output_loglevel='trace', - ignore_retcode=True, -- python_shell=False -+ python_shell=False, -+ env={"SALT_RUNNING": '1'} - ) - if out['retcode'] == 0: - _parse_output(out['stdout'], strict=True) -@@ -898,7 +902,8 @@ def list_repo_pkgs(*args, **kwargs): - out = __salt__['cmd.run_all'](cmd, - output_loglevel='trace', - ignore_retcode=True, -- python_shell=False) -+ python_shell=False, -+ env={"SALT_RUNNING": '1'}) - if out['retcode'] != 0 and 'Error:' in out['stdout']: - continue - _parse_output(out['stdout']) -@@ -955,7 +960,8 @@ def list_upgrades(refresh=True, **kwargs): - out = __salt__['cmd.run_all'](cmd, - output_loglevel='trace', - ignore_retcode=True, -- python_shell=False) -+ python_shell=False, -+ env={"SALT_RUNNING": '1'}) - if out['retcode'] != 0 and 'Error:' in out: - return {} - -@@ -1090,12 +1096,13 @@ def refresh_db(**kwargs): - clean_cmd.extend(options) - update_cmd.extend(options) - -- __salt__['cmd.run'](clean_cmd, python_shell=False) -+ __salt__['cmd.run'](clean_cmd, python_shell=False, env={"SALT_RUNNING": '1'}) - if check_update_: - result = __salt__['cmd.retcode'](update_cmd, - output_loglevel='trace', - ignore_retcode=True, -- python_shell=False) -+ python_shell=False, -+ env={"SALT_RUNNING": '1'}) - return retcodes.get(result, False) - return True - -@@ -1634,7 +1641,8 @@ def install(name=None, - cmd, - output_loglevel='trace', - python_shell=False, -- redirect_stderr=True -+ redirect_stderr=True, -+ env={"SALT_RUNNING": '1'} - ) - if out['retcode'] != 0: - errors.append(out['stdout']) -@@ -1654,7 +1662,8 @@ def install(name=None, - cmd, - output_loglevel='trace', - python_shell=False, -- redirect_stderr=True -+ redirect_stderr=True, -+ env={"SALT_RUNNING": '1'} - ) - if out['retcode'] != 0: - errors.append(out['stdout']) -@@ -1674,7 +1683,8 @@ def install(name=None, - cmd, - output_loglevel='trace', - python_shell=False, -- redirect_stderr=True -+ redirect_stderr=True, -+ env={"SALT_RUNNING": '1'} - ) - if out['retcode'] != 0: - errors.append(out['stdout']) -@@ -1866,7 +1876,8 @@ def upgrade(name=None, - - result = __salt__['cmd.run_all'](cmd, - output_loglevel='trace', -- python_shell=False) -+ python_shell=False, -+ env={"SALT_RUNNING": '1'}) - __context__.pop('pkg.list_pkgs', None) - new = list_pkgs() - ret = salt.utils.data.compare_dicts(old, new) -@@ -1957,7 +1968,8 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613 - out = __salt__['cmd.run_all']( - [_yum(), '-y', 'remove'] + targets, - output_loglevel='trace', -- python_shell=False -+ python_shell=False, -+ env={"SALT_RUNNING": '1'} - ) - - if out['retcode'] != 0 and out['stderr']: -@@ -2094,7 +2106,8 @@ def hold(name=None, pkgs=None, sources=None, normalize=True, **kwargs): # pylin - else: - out = __salt__['cmd.run_all']( - [_yum(), 'versionlock', target], -- python_shell=False -+ python_shell=False, -+ env={"SALT_RUNNING": '1'} - ) - - if out['retcode'] == 0: -@@ -2203,7 +2216,8 @@ def unhold(name=None, pkgs=None, sources=None, **kwargs): # pylint: disable=W06 - else: - out = __salt__['cmd.run_all']( - [_yum(), 'versionlock', 'delete'] + search_locks, -- python_shell=False -+ python_shell=False, -+ env={"SALT_RUNNING": '1'} - ) - - if out['retcode'] == 0: -@@ -2254,7 +2268,8 @@ def list_holds(pattern=__HOLD_PATTERN, full=True): +@@ -2298,7 +2299,8 @@ def list_holds(pattern=__HOLD_PATTERN, full=True): _check_versionlock() out = __salt__['cmd.run']([_yum(), 'versionlock', 'list'], @@ -168,7 +32,7 @@ index 9ce4926790..51832bf883 100644 ret = [] for line in salt.utils.itertools.split(out, '\n'): match = _get_hold(line, pattern=pattern, full=full) -@@ -2319,7 +2334,8 @@ def group_list(): +@@ -2364,7 +2366,8 @@ def group_list(): out = __salt__['cmd.run_stdout']( [_yum(), 'grouplist', 'hidden'], output_loglevel='trace', @@ -178,7 +42,7 @@ index 9ce4926790..51832bf883 100644 ) key = None for line in salt.utils.itertools.split(out, '\n'): -@@ -2386,7 +2402,8 @@ def group_info(name, expand=False): +@@ -2431,7 +2434,8 @@ def group_info(name, expand=False): out = __salt__['cmd.run_stdout']( cmd, output_loglevel='trace', @@ -188,7 +52,7 @@ index 9ce4926790..51832bf883 100644 ) g_info = {} -@@ -3055,7 +3072,8 @@ def download(*packages): +@@ -3100,7 +3104,8 @@ def download(*packages): __salt__['cmd.run']( cmd, output_loglevel='trace', @@ -198,7 +62,7 @@ index 9ce4926790..51832bf883 100644 ) ret = {} for dld_result in os.listdir(CACHE_DIR): -@@ -3130,7 +3148,8 @@ def _get_patches(installed_only=False): +@@ -3175,7 +3180,8 @@ def _get_patches(installed_only=False): cmd = [_yum(), '--quiet', 'updateinfo', 'list', 'all'] ret = __salt__['cmd.run_stdout']( cmd, @@ -209,6 +73,6 @@ index 9ce4926790..51832bf883 100644 for line in salt.utils.itertools.split(ret, os.linesep): inst, advisory_id, sev, pkg = re.match(r'([i|\s]) ([^\s]+) +([^\s]+) +([^\s]+)', -- -2.13.7 +2.17.1 diff --git a/add-hold-unhold-functions.patch b/add-hold-unhold-functions.patch index b1def6b..d39030f 100644 --- a/add-hold-unhold-functions.patch +++ b/add-hold-unhold-functions.patch @@ -1,4 +1,4 @@ -From ba5171ce35b733a1f7997b4ea038998802b67298 Mon Sep 17 00:00:00 2001 +From 4219d3d69799bc20f88eed0a02ef15c932e6782e Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Thu, 6 Dec 2018 16:26:23 +0100 Subject: [PATCH] Add hold/unhold functions @@ -7,22 +7,22 @@ Add unhold function Add warnings --- - salt/modules/zypper.py | 88 +++++++++++++++++++++++++++++++++++++++++- + salt/modules/zypperpkg.py | 88 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 87 insertions(+), 1 deletion(-) -diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py -index 6845e44ab6..773354b2f3 100644 ---- a/salt/modules/zypper.py -+++ b/salt/modules/zypper.py +diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py +index 001b852fc4..0c26e2214c 100644 +--- a/salt/modules/zypperpkg.py ++++ b/salt/modules/zypperpkg.py @@ -41,6 +41,7 @@ import salt.utils.pkg import salt.utils.pkg.rpm import salt.utils.stringutils import salt.utils.systemd +import salt.utils.versions from salt.utils.versions import LooseVersion + import salt.utils.environment from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError - -@@ -1738,7 +1739,7 @@ def clean_locks(): +@@ -1742,7 +1743,7 @@ def clean_locks(): return out @@ -31,7 +31,7 @@ index 6845e44ab6..773354b2f3 100644 ''' Remove specified package lock. -@@ -1750,7 +1751,47 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument +@@ -1754,7 +1755,47 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument salt '*' pkg.remove_lock ,, salt '*' pkg.remove_lock pkgs='["foo", "bar"]' ''' @@ -79,7 +79,7 @@ index 6845e44ab6..773354b2f3 100644 locks = list_locks() try: packages = list(__salt__['pkg_resource.parse_targets'](packages)[0].keys()) -@@ -1771,6 +1812,50 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument +@@ -1775,6 +1816,50 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument return {'removed': len(removed), 'not_found': missing} @@ -130,7 +130,7 @@ index 6845e44ab6..773354b2f3 100644 def add_lock(packages, **kwargs): # pylint: disable=unused-argument ''' Add a package lock. Specify packages to lock by exact name. -@@ -1783,6 +1868,7 @@ def add_lock(packages, **kwargs): # pylint: disable=unused-argument +@@ -1787,6 +1872,7 @@ def add_lock(packages, **kwargs): # pylint: disable=unused-argument salt '*' pkg.add_lock ,, salt '*' pkg.add_lock pkgs='["foo", "bar"]' ''' diff --git a/add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch b/add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch deleted file mode 100644 index c99ca59..0000000 --- a/add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch +++ /dev/null @@ -1,294 +0,0 @@ -From cc8d6eaddf59973a94512779853558789b56ca3e Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= - -Date: Wed, 25 Apr 2018 12:55:36 +0100 -Subject: [PATCH] Add 'other' attribute to GECOS fields to avoid - inconsistencies with chfn - -Fix unsupported chars checking on GECOS fields - -Add unit test for new method 'user.chother' - -Do make comparisons in a single line - -Add 'other' as valid kwargs for 'user.add' method ---- - salt/modules/useradd.py | 41 ++++++++++++++++++++++++++++---------- - salt/states/user.py | 28 ++++++++++++++++++-------- - tests/unit/modules/test_useradd.py | 36 +++++++++++++++++++++++++++++++-- - 3 files changed, 84 insertions(+), 21 deletions(-) - -diff --git a/salt/modules/useradd.py b/salt/modules/useradd.py -index a61ba0e960..fc3c82a8bc 100644 ---- a/salt/modules/useradd.py -+++ b/salt/modules/useradd.py -@@ -60,17 +60,18 @@ def _get_gecos(name): - Retrieve GECOS field info and return it in dictionary form - ''' - gecos_field = salt.utils.stringutils.to_unicode( -- pwd.getpwnam(_quote_username(name)).pw_gecos).split(',', 3) -+ pwd.getpwnam(_quote_username(name)).pw_gecos).split(',', 4) - if not gecos_field: - return {} - else: - # Assign empty strings for any unspecified trailing GECOS fields -- while len(gecos_field) < 4: -+ while len(gecos_field) < 5: - gecos_field.append('') - return {'fullname': salt.utils.locales.sdecode(gecos_field[0]), - 'roomnumber': salt.utils.locales.sdecode(gecos_field[1]), - 'workphone': salt.utils.locales.sdecode(gecos_field[2]), -- 'homephone': salt.utils.locales.sdecode(gecos_field[3])} -+ 'homephone': salt.utils.locales.sdecode(gecos_field[3]), -+ 'other': salt.utils.locales.sdecode(gecos_field[4])} - - - def _build_gecos(gecos_dict): -@@ -78,10 +79,11 @@ def _build_gecos(gecos_dict): - Accepts a dictionary entry containing GECOS field names and their values, - and returns a full GECOS comment string, to be used with usermod. - ''' -- return '{0},{1},{2},{3}'.format(gecos_dict.get('fullname', ''), -- gecos_dict.get('roomnumber', ''), -- gecos_dict.get('workphone', ''), -- gecos_dict.get('homephone', '')).rstrip(',') -+ return '{0},{1},{2},{3},{4}'.format(gecos_dict.get('fullname', ''), -+ gecos_dict.get('roomnumber', ''), -+ gecos_dict.get('workphone', ''), -+ gecos_dict.get('homephone', ''), -+ gecos_dict.get('other', ''),).rstrip(',') - - - def _update_gecos(name, key, value, root=None): -@@ -124,6 +126,7 @@ def add(name, - roomnumber='', - workphone='', - homephone='', -+ other='', - createhome=True, - loginclass=None, - root=None, -@@ -237,6 +240,8 @@ def add(name, - chworkphone(name, workphone) - if homephone: - chhomephone(name, homephone) -+ if other: -+ chother(name, other) - return True - - -@@ -507,6 +512,19 @@ def chhomephone(name, homephone): - return _update_gecos(name, 'homephone', homephone) - - -+def chother(name, other): -+ ''' -+ Change the user's other GECOS attribute -+ -+ CLI Example: -+ -+ .. code-block:: bash -+ -+ salt '*' user.chother foobar -+ ''' -+ return _update_gecos(name, 'other', other) -+ -+ - def chloginclass(name, loginclass, root=None): - ''' - Change the default login class of the user -@@ -588,9 +606,9 @@ def _format_info(data): - Return user information in a pretty way - ''' - # Put GECOS info into a list -- gecos_field = salt.utils.stringutils.to_unicode(data.pw_gecos).split(',', 3) -- # Make sure our list has at least four elements -- while len(gecos_field) < 4: -+ gecos_field = salt.utils.stringutils.to_unicode(data.pw_gecos).split(',', 4) -+ # Make sure our list has at least five elements -+ while len(gecos_field) < 5: - gecos_field.append('') - - return {'gid': data.pw_gid, -@@ -603,7 +621,8 @@ def _format_info(data): - 'fullname': gecos_field[0], - 'roomnumber': gecos_field[1], - 'workphone': gecos_field[2], -- 'homephone': gecos_field[3]} -+ 'homephone': gecos_field[3], -+ 'other': gecos_field[4]} - - - @salt.utils.decorators.path.which('id') -diff --git a/salt/states/user.py b/salt/states/user.py -index f4ae81dd31..34f5a9d541 100644 ---- a/salt/states/user.py -+++ b/salt/states/user.py -@@ -68,6 +68,7 @@ def _changes(name, - roomnumber='', - workphone='', - homephone='', -+ other='', - loginclass=None, - date=None, - mindays=0, -@@ -170,24 +171,26 @@ def _changes(name, - - # MacOS doesn't have full GECOS support, so check for the "ch" functions - # and ignore these parameters if these functions do not exist. -- if 'user.chroomnumber' in __salt__ \ -- and roomnumber is not None: -+ if 'user.chroomnumber' in __salt__ and roomnumber is not None: - roomnumber = sdecode_if_string(roomnumber) - lusr['roomnumber'] = sdecode_if_string(lusr['roomnumber']) - if lusr['roomnumber'] != roomnumber: - change['roomnumber'] = roomnumber -- if 'user.chworkphone' in __salt__ \ -- and workphone is not None: -+ if 'user.chworkphone' in __salt__ and workphone is not None: - workphone = sdecode_if_string(workphone) - lusr['workphone'] = sdecode_if_string(lusr['workphone']) - if lusr['workphone'] != workphone: - change['workphone'] = workphone -- if 'user.chhomephone' in __salt__ \ -- and homephone is not None: -+ if 'user.chhomephone' in __salt__ and homephone is not None: - homephone = sdecode_if_string(homephone) - lusr['homephone'] = sdecode_if_string(lusr['homephone']) - if lusr['homephone'] != homephone: - change['homephone'] = homephone -+ if 'user.chother' in __salt__ and other is not None: -+ other = sdecode_if_string(other) -+ lusr['other'] = sdecode_if_string(lusr['other']) -+ if lusr['other'] != other: -+ change['other'] = other - # OpenBSD/FreeBSD login class - if __grains__['kernel'] in ('OpenBSD', 'FreeBSD'): - if loginclass: -@@ -236,6 +239,7 @@ def present(name, - roomnumber=None, - workphone=None, - homephone=None, -+ other=None, - loginclass=None, - date=None, - mindays=None, -@@ -377,7 +381,10 @@ def present(name, - - homephone - The user's home phone number (not supported in MacOS) -- If GECOS field contains more than 3 commas, this field will have the rest of 'em -+ -+ other -+ The user's other attribute (not supported in MacOS) -+ If GECOS field contains more than 4 commas, this field will have the rest of 'em - - .. versionchanged:: 2014.7.0 - Shadow attribute support added. -@@ -448,6 +455,8 @@ def present(name, - workphone = sdecode(workphone) - if homephone is not None: - homephone = sdecode(homephone) -+ if other is not None: -+ other = sdecode(other) - - # createhome not supported on Windows or Mac - if __grains__['kernel'] in ('Darwin', 'Windows'): -@@ -460,7 +469,7 @@ def present(name, - - # the comma is used to separate field in GECOS, thus resulting into - # salt adding the end of fullname each time this function is called -- for gecos_field in ['fullname', 'roomnumber', 'workphone']: -+ for gecos_field in [fullname, roomnumber, workphone]: - if isinstance(gecos_field, string_types) and ',' in gecos_field: - ret['comment'] = "Unsupported char ',' in {0}".format(gecos_field) - ret['result'] = False -@@ -519,6 +528,7 @@ def present(name, - roomnumber, - workphone, - homephone, -+ other, - loginclass, - date, - mindays, -@@ -654,6 +664,7 @@ def present(name, - roomnumber, - workphone, - homephone, -+ other, - loginclass, - date, - mindays, -@@ -705,6 +716,7 @@ def present(name, - 'roomnumber': roomnumber, - 'workphone': workphone, - 'homephone': homephone, -+ 'other': other, - 'createhome': createhome, - 'nologinit': nologinit, - 'loginclass': loginclass} -diff --git a/tests/unit/modules/test_useradd.py b/tests/unit/modules/test_useradd.py -index fa30a0df71..e79c78c663 100644 ---- a/tests/unit/modules/test_useradd.py -+++ b/tests/unit/modules/test_useradd.py -@@ -46,7 +46,8 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin): - 'fullname': 'root', - 'roomnumber': '', - 'workphone': '', -- 'homephone': ''} -+ 'homephone': '', -+ 'other': ''} - - @classmethod - def tearDownClass(cls): -@@ -96,7 +97,8 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin): - 'fullname': 'root', - 'roomnumber': '', - 'workphone': '', -- 'homephone': ''}] -+ 'homephone': '', -+ 'other': ''}] - with patch('salt.modules.useradd._format_info', MagicMock(return_value=self.mock_pwall)): - self.assertEqual(useradd.getent(), ret) - -@@ -330,6 +332,36 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin): - with patch.object(useradd, 'info', mock): - self.assertFalse(useradd.chhomephone('salt', 1)) - -+ # 'chother' function tests: 1 -+ -+ def test_chother(self): -+ ''' -+ Test if the user's other GECOS attribute is changed -+ ''' -+ mock = MagicMock(return_value=False) -+ with patch.object(useradd, '_get_gecos', mock): -+ self.assertFalse(useradd.chother('salt', 1)) -+ -+ mock = MagicMock(return_value={'other': 'foobar'}) -+ with patch.object(useradd, '_get_gecos', mock): -+ self.assertTrue(useradd.chother('salt', 'foobar')) -+ -+ mock = MagicMock(return_value={'other': 'foobar2'}) -+ with patch.object(useradd, '_get_gecos', mock): -+ mock = MagicMock(return_value=None) -+ with patch.dict(useradd.__salt__, {'cmd.run': mock}): -+ mock = MagicMock(return_value={'other': 'foobar3'}) -+ with patch.object(useradd, 'info', mock): -+ self.assertFalse(useradd.chother('salt', 'foobar')) -+ -+ mock = MagicMock(return_value={'other': 'foobar3'}) -+ with patch.object(useradd, '_get_gecos', mock): -+ mock = MagicMock(return_value=None) -+ with patch.dict(useradd.__salt__, {'cmd.run': mock}): -+ mock = MagicMock(return_value={'other': 'foobar3'}) -+ with patch.object(useradd, 'info', mock): -+ self.assertFalse(useradd.chother('salt', 'foobar')) -+ - # 'info' function tests: 1 - - @skipIf(HAS_PWD is False, 'The pwd module is not available') --- -2.13.7 - - diff --git a/add-saltssh-multi-version-support-across-python-inte.patch b/add-saltssh-multi-version-support-across-python-inte.patch index 4538647..0059197 100644 --- a/add-saltssh-multi-version-support-across-python-inte.patch +++ b/add-saltssh-multi-version-support-across-python-inte.patch @@ -1,4 +1,4 @@ -From 23aba97ccbdf9952f6a8107a8d90b40d0d2c41ee Mon Sep 17 00:00:00 2001 +From 18c46c301b98841d941e2d07901e7468de30b83a Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Mon, 12 Mar 2018 12:01:39 +0100 Subject: [PATCH] Add SaltSSH multi-version support across Python @@ -254,18 +254,11 @@ Lintfix Set master_top_first to False by default --- - doc/topics/releases/fluorine.rst | 178 +++++++++++ - salt/client/ssh/__init__.py | 60 ++-- - salt/client/ssh/ssh_py_shim.py | 93 ++++-- - salt/client/ssh/wrapper/__init__.py | 2 +- - salt/config/__init__.py | 1 + - salt/state.py | 2 +- - salt/utils/hashutils.py | 37 +++ - salt/utils/thin.py | 450 +++++++++++++++++++------- - tests/unit/utils/test_thin.py | 612 ++++++++++++++++++++++++++++++++++++ - 9 files changed, 1258 insertions(+), 177 deletions(-) + doc/topics/releases/fluorine.rst | 178 +++++++++++++++++++++++++++++++ + salt/client/ssh/ssh_py_shim.py | 4 + + salt/utils/thin.py | 1 + + 3 files changed, 183 insertions(+) create mode 100644 doc/topics/releases/fluorine.rst - create mode 100644 tests/unit/utils/test_thin.py diff --git a/doc/topics/releases/fluorine.rst b/doc/topics/releases/fluorine.rst new file mode 100644 @@ -451,153 +444,11 @@ index 0000000000..40c69e25cc +a minimal tarball using runners and include that. But this is only possible, when such specific +Salt version is also available on the Master machine, although does not need to be directly +installed together with the older Python interpreter. -diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py -index 141e1c6850..f1300b5698 100644 ---- a/salt/client/ssh/__init__.py -+++ b/salt/client/ssh/__init__.py -@@ -150,9 +150,7 @@ EX_PYTHON_INVALID={EX_THIN_PYTHON_INVALID} - PYTHON_CMDS="python3 python27 python2.7 python26 python2.6 python2 python" - for py_cmd in $PYTHON_CMDS - do -- if command -v "$py_cmd" >/dev/null 2>&1 && "$py_cmd" -c \ -- "import sys; sys.exit(not (sys.version_info >= (2, 6) -- and sys.version_info[0] == {{HOST_PY_MAJOR}}));" -+ if command -v "$py_cmd" >/dev/null 2>&1 && "$py_cmd" -c "import sys; sys.exit(not (sys.version_info >= (2, 6)));" - then - py_cmd_path=`"$py_cmd" -c \ - 'from __future__ import print_function; -@@ -323,7 +321,8 @@ class SSH(object): - extra_mods=self.opts.get('thin_extra_mods'), - overwrite=self.opts['regen_thin'], - python2_bin=self.opts['python2_bin'], -- python3_bin=self.opts['python3_bin']) -+ python3_bin=self.opts['python3_bin'], -+ extended_cfg=self.opts.get('ssh_ext_alternatives')) - self.mods = mod_data(self.fsclient) - - def _get_roster(self): -@@ -850,10 +849,10 @@ class Single(object): - - self.opts = opts - self.tty = tty -- if kwargs.get('wipe'): -- self.wipe = 'False' -+ if kwargs.get('disable_wipe'): -+ self.wipe = False - else: -- self.wipe = 'True' if self.opts.get('ssh_wipe') else 'False' -+ self.wipe = bool(self.opts.get('ssh_wipe')) - if kwargs.get('thin_dir'): - self.thin_dir = kwargs['thin_dir'] - elif self.winrm: -@@ -1178,38 +1177,39 @@ class Single(object): - cachedir = self.opts['_caller_cachedir'] - else: - cachedir = self.opts['cachedir'] -- thin_sum = salt.utils.thin.thin_sum(cachedir, 'sha1') -+ thin_code_digest, thin_sum = salt.utils.thin.thin_sum(cachedir, 'sha1') - debug = '' - if not self.opts.get('log_level'): - self.opts['log_level'] = 'info' - if salt.log.LOG_LEVELS['debug'] >= salt.log.LOG_LEVELS[self.opts.get('log_level', 'info')]: - debug = '1' - arg_str = ''' --OPTIONS = OBJ() - OPTIONS.config = \ - """ --{0} -+{config} - """ --OPTIONS.delimiter = '{1}' --OPTIONS.saltdir = '{2}' --OPTIONS.checksum = '{3}' --OPTIONS.hashfunc = '{4}' --OPTIONS.version = '{5}' --OPTIONS.ext_mods = '{6}' --OPTIONS.wipe = {7} --OPTIONS.tty = {8} --OPTIONS.cmd_umask = {9} --ARGS = {10}\n'''.format(self.minion_config, -- RSTR, -- self.thin_dir, -- thin_sum, -- 'sha1', -- salt.version.__version__, -- self.mods.get('version', ''), -- self.wipe, -- self.tty, -- self.cmd_umask, -- self.argv) -+OPTIONS.delimiter = '{delimeter}' -+OPTIONS.saltdir = '{saltdir}' -+OPTIONS.checksum = '{checksum}' -+OPTIONS.hashfunc = '{hashfunc}' -+OPTIONS.version = '{version}' -+OPTIONS.ext_mods = '{ext_mods}' -+OPTIONS.wipe = {wipe} -+OPTIONS.tty = {tty} -+OPTIONS.cmd_umask = {cmd_umask} -+OPTIONS.code_checksum = {code_checksum} -+ARGS = {arguments}\n'''.format(config=self.minion_config, -+ delimeter=RSTR, -+ saltdir=self.thin_dir, -+ checksum=thin_sum, -+ hashfunc='sha1', -+ version=salt.version.__version__, -+ ext_mods=self.mods.get('version', ''), -+ wipe=self.wipe, -+ tty=self.tty, -+ cmd_umask=self.cmd_umask, -+ code_checksum=thin_code_digest, -+ arguments=self.argv) - py_code = SSH_PY_SHIM.replace('#%%OPTS', arg_str) - if six.PY2: - py_code_enc = py_code.encode('base64') diff --git a/salt/client/ssh/ssh_py_shim.py b/salt/client/ssh/ssh_py_shim.py -index 5e5dbdc55e..92ede14930 100644 +index be17a1a38c..595d1c40c7 100644 --- a/salt/client/ssh/ssh_py_shim.py +++ b/salt/client/ssh/ssh_py_shim.py -@@ -16,11 +16,13 @@ import sys - import os - import stat - import subprocess -+import time - - THIN_ARCHIVE = 'salt-thin.tgz' - EXT_ARCHIVE = 'salt-ext_mods.tgz' - - # Keep these in sync with salt/defaults/exitcodes.py -+EX_THIN_PYTHON_INVALID = 10 - EX_THIN_DEPLOY = 11 - EX_THIN_CHECKSUM = 12 - EX_MOD_DEPLOY = 13 -@@ -28,14 +30,13 @@ EX_SCP_NOT_FOUND = 14 - EX_CANTCREAT = 73 - - --class OBJ(object): -+class OptionsContainer(object): - ''' - An empty class for holding instance attribute values. - ''' -- pass - - --OPTIONS = None -+OPTIONS = OptionsContainer() - ARGS = None - # The below line is where OPTIONS can be redefined with internal options - # (rather than cli arguments) when the shim is bundled by -@@ -130,7 +131,7 @@ def need_deployment(): - os.chmod(OPTIONS.saltdir, stt.st_mode | stat.S_IWGRP | stat.S_IRGRP | stat.S_IXGRP) - except OSError: - sys.stdout.write('\n\nUnable to set permissions on thin directory.\nIf sudo_user is set ' -- 'and is not root, be certain the user is in the same group\nas the login user') -+ 'and is not root, be certain the user is in the same group\nas the login user') - sys.exit(1) - - # Delimiter emitted on stdout *only* to indicate shim message to master. -@@ -163,11 +164,15 @@ def unpack_thin(thin_path): +@@ -164,6 +164,9 @@ def unpack_thin(thin_path): old_umask = os.umask(0o077) # pylint: disable=blacklisted-function tfile.extractall(path=OPTIONS.saltdir) tfile.close() @@ -607,1492 +458,26 @@ index 5e5dbdc55e..92ede14930 100644 os.umask(old_umask) # pylint: disable=blacklisted-function try: os.unlink(thin_path) - except OSError: - pass -+ reset_time(OPTIONS.saltdir) +@@ -357,5 +360,6 @@ def main(argv): # pylint: disable=W0613 + return retcode - def need_ext(): -@@ -201,6 +206,47 @@ def unpack_ext(ext_path): - shutil.move(ver_path, ver_dst) - - -+def reset_time(path='.', amt=None): -+ ''' -+ Reset atime/mtime on all files to prevent systemd swipes only part of the files in the /tmp. -+ ''' -+ if not amt: -+ amt = int(time.time()) -+ for fname in os.listdir(path): -+ fname = os.path.join(path, fname) -+ if os.path.isdir(fname): -+ reset_time(fname, amt=amt) -+ os.utime(fname, (amt, amt,)) -+ -+ -+def get_executable(): -+ ''' -+ Find executable which matches supported python version in the thin -+ ''' -+ pymap = {} -+ with open(os.path.join(OPTIONS.saltdir, 'supported-versions')) as _fp: -+ for line in _fp.readlines(): -+ ns, v_maj, v_min = line.strip().split(':') -+ pymap[ns] = (int(v_maj), int(v_min)) -+ -+ pycmds = (sys.executable, 'python3', 'python27', 'python2.7', 'python26', 'python2.6', 'python2', 'python') -+ for py_cmd in pycmds: -+ cmd = py_cmd + ' -c "import sys; sys.stdout.write(\'%s:%s\' % (sys.version_info[0], sys.version_info[1]))"' -+ stdout, _ = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).communicate() -+ if sys.version_info[0] == 2 and sys.version_info[1] < 7: -+ stdout = stdout.decode(get_system_encoding(), "replace").strip() -+ else: -+ stdout = stdout.decode(encoding=get_system_encoding(), errors="replace").strip() -+ if not stdout: -+ continue -+ c_vn = tuple([int(x) for x in stdout.split(':')]) -+ for ns in pymap: -+ if c_vn[0] == pymap[ns][0] and c_vn >= pymap[ns] and os.path.exists(os.path.join(OPTIONS.saltdir, ns)): -+ return py_cmd -+ -+ sys.exit(EX_THIN_PYTHON_INVALID) -+ -+ - def main(argv): # pylint: disable=W0613 - ''' - Main program body -@@ -217,32 +263,25 @@ def main(argv): # pylint: disable=W0613 - if scpstat != 0: - sys.exit(EX_SCP_NOT_FOUND) - -- if not os.path.exists(OPTIONS.saltdir): -- need_deployment() -- -- if not os.path.isdir(OPTIONS.saltdir): -+ if os.path.exists(OPTIONS.saltdir) and not os.path.isdir(OPTIONS.saltdir): - sys.stderr.write( - 'ERROR: salt path "{0}" exists but is' - ' not a directory\n'.format(OPTIONS.saltdir) - ) - sys.exit(EX_CANTCREAT) - -- version_path = os.path.normpath(os.path.join(OPTIONS.saltdir, 'version')) -- if not os.path.exists(version_path) or not os.path.isfile(version_path): -- sys.stderr.write( -- 'WARNING: Unable to locate current thin ' -- ' version: {0}.\n'.format(version_path) -- ) -+ if not os.path.exists(OPTIONS.saltdir): - need_deployment() -- with open(version_path, 'r') as vpo: -- cur_version = vpo.readline().strip() -- if cur_version != OPTIONS.version: -- sys.stderr.write( -- 'WARNING: current thin version {0}' -- ' is not up-to-date with {1}.\n'.format( -- cur_version, OPTIONS.version -- ) -- ) -+ -+ code_checksum_path = os.path.normpath(os.path.join(OPTIONS.saltdir, 'code-checksum')) -+ if not os.path.exists(code_checksum_path) or not os.path.isfile(code_checksum_path): -+ sys.stderr.write('WARNING: Unable to locate current code checksum: {0}.\n'.format(code_checksum_path)) -+ need_deployment() -+ with open(code_checksum_path, 'r') as vpo: -+ cur_code_cs = vpo.readline().strip() -+ if cur_code_cs != OPTIONS.code_checksum: -+ sys.stderr.write('WARNING: current code checksum {0} is different to {1}.\n'.format(cur_code_cs, -+ OPTIONS.code_checksum)) - need_deployment() - # Salt thin exists and is up-to-date - fall through and use it - -@@ -272,7 +311,7 @@ def main(argv): # pylint: disable=W0613 - argv_prepared = ARGS - - salt_argv = [ -- sys.executable, -+ get_executable(), - salt_call_path, - '--retcode-passthrough', - '--local', -@@ -305,7 +344,10 @@ def main(argv): # pylint: disable=W0613 - if OPTIONS.tty: - # Returns bytes instead of string on python 3 - stdout, _ = subprocess.Popen(salt_argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() -- sys.stdout.write(stdout.decode(encoding=get_system_encoding(), errors="replace")) -+ if sys.version_info[0] == 2 and sys.version_info[1] < 7: -+ sys.stdout.write(stdout.decode(get_system_encoding(), "replace")) -+ else: -+ sys.stdout.write(stdout.decode(encoding=get_system_encoding(), errors="replace")) - sys.stdout.flush() - if OPTIONS.wipe: - shutil.rmtree(OPTIONS.saltdir) -@@ -317,5 +359,6 @@ def main(argv): # pylint: disable=W0613 - if OPTIONS.cmd_umask is not None: - os.umask(old_umask) # pylint: disable=blacklisted-function - + if __name__ == '__main__': sys.exit(main(sys.argv)) -diff --git a/salt/client/ssh/wrapper/__init__.py b/salt/client/ssh/wrapper/__init__.py -index 04d751b51a..09f9344642 100644 ---- a/salt/client/ssh/wrapper/__init__.py -+++ b/salt/client/ssh/wrapper/__init__.py -@@ -113,7 +113,7 @@ class FunctionWrapper(object): - self.opts, - argv, - mods=self.mods, -- wipe=True, -+ disable_wipe=True, - fsclient=self.fsclient, - minion_opts=self.minion_opts, - **self.kwargs -diff --git a/salt/config/__init__.py b/salt/config/__init__.py -index 289991771d..432364b201 100644 ---- a/salt/config/__init__.py -+++ b/salt/config/__init__.py -@@ -1663,6 +1663,7 @@ DEFAULT_MASTER_OPTS = { - 'state_top': 'top.sls', - 'state_top_saltenv': None, - 'master_tops': {}, -+ 'master_tops_first': False, - 'order_masters': False, - 'job_cache': True, - 'ext_job_cache': '', -diff --git a/salt/state.py b/salt/state.py -index 09709347b1..e7288bce2e 100644 ---- a/salt/state.py -+++ b/salt/state.py -@@ -3383,7 +3383,7 @@ class BaseHighState(object): - ext_matches = self._master_tops() - for saltenv in ext_matches: - top_file_matches = matches.get(saltenv, []) -- if self.opts['master_tops_first']: -+ if self.opts.get('master_tops_first'): - first = ext_matches[saltenv] - second = top_file_matches - else: -diff --git a/salt/utils/hashutils.py b/salt/utils/hashutils.py -index b42a60d222..ee01be7377 100644 ---- a/salt/utils/hashutils.py -+++ b/salt/utils/hashutils.py -@@ -9,6 +9,7 @@ import base64 - import hashlib - import hmac - import random -+import os - - # Import Salt libs - from salt.ext import six -@@ -150,3 +151,39 @@ def get_hash(path, form='sha256', chunk_size=65536): - for chunk in iter(lambda: ifile.read(chunk_size), b''): - hash_obj.update(chunk) - return hash_obj.hexdigest() -+ -+ -+class DigestCollector(object): -+ ''' -+ Class to collect digest of the file tree. -+ ''' -+ -+ def __init__(self, form='sha256', buff=0x10000): -+ ''' -+ Constructor of the class. -+ :param form: -+ ''' -+ self.__digest = hasattr(hashlib, form) and getattr(hashlib, form)() or None -+ if self.__digest is None: -+ raise ValueError('Invalid hash type: {0}'.format(form)) -+ self.__buff = buff -+ -+ def add(self, path): -+ ''' -+ Update digest with the file content by path. -+ -+ :param path: -+ :return: -+ ''' -+ with salt.utils.files.fopen(path, 'rb') as ifile: -+ for chunk in iter(lambda: ifile.read(self.__buff), b''): -+ self.__digest.update(chunk) -+ -+ def digest(self): -+ ''' -+ Get digest. -+ -+ :return: -+ ''' -+ -+ return salt.utils.stringutils.to_str(self.__digest.hexdigest() + os.linesep) diff --git a/salt/utils/thin.py b/salt/utils/thin.py -index 4c0969ea96..e4b878eb19 100644 +index b60815225e..172b0938f5 100644 --- a/salt/utils/thin.py +++ b/salt/utils/thin.py -@@ -8,11 +8,14 @@ from __future__ import absolute_import, print_function, unicode_literals - +@@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals + import copy + import logging import os - import sys +import copy import shutil - import tarfile - import zipfile - import tempfile import subprocess -+import salt.utils.stringutils -+import logging - - # Import third party libs - import jinja2 -@@ -21,24 +24,26 @@ import msgpack - import salt.ext.six as _six - import tornado - -+try: -+ import zlib -+except ImportError: -+ zlib = None -+ - # pylint: disable=import-error,no-name-in-module - try: - import certifi -- HAS_CERTIFI = True - except ImportError: -- HAS_CERTIFI = False -+ certifi = None - - try: - import singledispatch -- HAS_SINGLEDISPATCH = True - except ImportError: -- HAS_SINGLEDISPATCH = False -+ singledispatch = None - - try: - import singledispatch_helpers -- HAS_SINGLEDISPATCH_HELPERS = True - except ImportError: -- HAS_SINGLEDISPATCH_HELPERS = False -+ singledispatch_helpers = None - - try: - import backports_abc -@@ -46,25 +51,22 @@ except ImportError: - import salt.ext.backports_abc as backports_abc - - try: -+ # New Jinja only - import markupsafe -- HAS_MARKUPSAFE = True - except ImportError: -- # Older jinja does not need markupsafe -- HAS_MARKUPSAFE = False -+ markupsafe = None - - # pylint: enable=import-error,no-name-in-module - - try: - # Older python where the backport from pypi is installed - from backports import ssl_match_hostname -- HAS_SSL_MATCH_HOSTNAME = True - except ImportError: - # Other older python we use our bundled copy - try: - from salt.ext import ssl_match_hostname -- HAS_SSL_MATCH_HOSTNAME = True - except ImportError: -- HAS_SSL_MATCH_HOSTNAME = False -+ ssl_match_hostname = None - - # Import salt libs - import salt -@@ -76,22 +78,52 @@ import salt.utils.stringutils - import salt.exceptions - import salt.version - --SALTCALL = ''' -+log = logging.getLogger(__name__) -+ -+ -+def _get_salt_call(*dirs, **namespaces): -+ ''' -+ Return salt-call source, based on configuration. -+ This will include additional namespaces for another versions of Salt, -+ if needed (e.g. older interpreters etc). -+ -+ :dirs: List of directories to include in the system path -+ :namespaces: Dictionary of namespace -+ :return: -+ ''' -+ template = '''# -*- coding: utf-8 -*- - import os import sys - --sys.path.insert( -- 0, -- os.path.join( -- os.path.dirname(__file__), -- 'py{0[0]}'.format(sys.version_info) -- ) --) -+# Namespaces is a map: {namespace: major/minor version}, like {'2016.11.4': [2, 6]} -+# Appears only when configured in Master configuration. -+namespaces = %namespaces% -+ -+# Default system paths alongside the namespaces -+syspaths = %dirs% -+syspaths.append('py{0}'.format(sys.version_info[0])) -+ -+curr_ver = (sys.version_info[0], sys.version_info[1],) -+ -+namespace = '' -+for ns in namespaces: -+ if curr_ver == tuple(namespaces[ns]): -+ namespace = ns -+ break -+ -+for base in syspaths: -+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), -+ namespace and os.path.join(namespace, base) or base)) - --from salt.scripts import salt_call - if __name__ == '__main__': -+ from salt.scripts import salt_call - salt_call() --'''.encode('utf-8') -+''' -+ -+ for tgt, cnt in [('%dirs%', dirs), ('%namespaces%', namespaces)]: -+ template = template.replace(tgt, salt.utils.json.dumps(cnt)) -+ -+ return salt.utils.stringutils.to_bytes(template) - - - def thin_path(cachedir): -@@ -101,29 +133,137 @@ def thin_path(cachedir): - return os.path.join(cachedir, 'thin', 'thin.tgz') - - --def get_tops(extra_mods='', so_mods=''): -- tops = [ -- os.path.dirname(salt.__file__), -- os.path.dirname(jinja2.__file__), -- os.path.dirname(yaml.__file__), -- os.path.dirname(tornado.__file__), -- os.path.dirname(msgpack.__file__), -- ] -+def _is_shareable(mod): -+ ''' -+ Return True if module is share-able between major Python versions. -+ -+ :param mod: -+ :return: -+ ''' -+ # This list is subject to change -+ shareable = ['salt', 'jinja2', -+ 'msgpack', 'certifi'] -+ -+ return os.path.basename(mod) in shareable -+ -+ -+def _add_dependency(container, obj): -+ ''' -+ Add a dependency to the top list. - -- tops.append(_six.__file__.replace('.pyc', '.py')) -- tops.append(backports_abc.__file__.replace('.pyc', '.py')) -+ :param obj: -+ :param is_file: -+ :return: -+ ''' -+ if os.path.basename(obj.__file__).split('.')[0] == '__init__': -+ container.append(os.path.dirname(obj.__file__)) -+ else: -+ container.append(obj.__file__.replace('.pyc', '.py')) -+ -+ -+def gte(): -+ ''' -+ This function is called externally from the alternative -+ Python interpreter from within _get_tops function. - -- if HAS_CERTIFI: -- tops.append(os.path.dirname(certifi.__file__)) -+ :param extra_mods: -+ :param so_mods: -+ :return: -+ ''' -+ extra = salt.utils.json.loads(sys.argv[1]) -+ tops = get_tops(**extra) - -- if HAS_SINGLEDISPATCH: -- tops.append(singledispatch.__file__.replace('.pyc', '.py')) -+ return salt.utils.json.dumps(tops, ensure_ascii=False) - -- if HAS_SINGLEDISPATCH_HELPERS: -- tops.append(singledispatch_helpers.__file__.replace('.pyc', '.py')) - -- if HAS_SSL_MATCH_HOSTNAME: -- tops.append(os.path.dirname(os.path.dirname(ssl_match_hostname.__file__))) -+def get_ext_tops(config): -+ ''' -+ Get top directories for the dependencies, based on external configuration. -+ -+ :return: -+ ''' -+ config = copy.deepcopy(config) -+ alternatives = {} -+ required = ['jinja2', 'yaml', 'tornado', 'msgpack'] -+ tops = [] -+ for ns, cfg in salt.ext.six.iteritems(config or {}): -+ alternatives[ns] = cfg -+ locked_py_version = cfg.get('py-version') -+ err_msg = None -+ if not locked_py_version: -+ err_msg = 'Alternative Salt library: missing specific locked Python version' -+ elif not isinstance(locked_py_version, (tuple, list)): -+ err_msg = ('Alternative Salt library: specific locked Python version ' -+ 'should be a list of major/minor version') -+ if err_msg: -+ raise salt.exceptions.SaltSystemExit(err_msg) -+ -+ if cfg.get('dependencies') == 'inherit': -+ # TODO: implement inheritance of the modules from _here_ -+ raise NotImplementedError('This feature is not yet implemented') -+ else: -+ for dep in cfg.get('dependencies'): -+ mod = cfg['dependencies'][dep] or '' -+ if not mod: -+ log.warning('Module %s has missing configuration', dep) -+ continue -+ elif mod.endswith('.py') and not os.path.isfile(mod): -+ log.warning('Module %s configured with not a file or does not exist: %s', dep, mod) -+ continue -+ elif not mod.endswith('.py') and not os.path.isfile(os.path.join(mod, '__init__.py')): -+ log.warning('Module %s is not a Python importable module with %s', dep, mod) -+ continue -+ tops.append(mod) -+ -+ if dep in required: -+ required.pop(required.index(dep)) -+ -+ required = ', '.join(required) -+ if required: -+ msg = 'Missing dependencies for the alternative version' \ -+ ' in the external configuration: {}'.format(required) -+ log.error(msg) -+ raise salt.exceptions.SaltSystemExit(msg) -+ alternatives[ns]['dependencies'] = tops -+ return alternatives -+ -+ -+def _get_ext_namespaces(config): -+ ''' -+ Get namespaces from the existing configuration. -+ -+ :param config: -+ :return: -+ ''' -+ namespaces = {} -+ if not config: -+ return namespaces -+ -+ for ns in config: -+ constraint_version = tuple(config[ns].get('py-version', [])) -+ if not constraint_version: -+ raise salt.exceptions.SaltSystemExit("An alternative version is configured, but not defined " -+ "to what Python's major/minor version it should be constrained.") -+ else: -+ namespaces[ns] = constraint_version -+ -+ return namespaces -+ -+ -+def get_tops(extra_mods='', so_mods=''): -+ ''' -+ Get top directories for the dependencies, based on Python interpreter. -+ -+ :param extra_mods: -+ :param so_mods: -+ :return: -+ ''' -+ tops = [] -+ for mod in [salt, jinja2, yaml, tornado, msgpack, certifi, singledispatch, -+ singledispatch_helpers, ssl_match_hostname, markupsafe, backports_abc]: -+ if mod: -+ log.debug('Adding module to the tops: "%s"', mod.__name__) -+ _add_dependency(tops, mod) - - for mod in [m for m in extra_mods.split(',') if m]: - if mod not in locals() and mod not in globals(): -@@ -135,28 +275,49 @@ def get_tops(extra_mods='', so_mods=''): - tops.append(moddir) - else: - tops.append(os.path.join(moddir, base + '.py')) -- except ImportError: -- # Not entirely sure this is the right thing, but the only -- # options seem to be 1) fail, 2) spew errors, or 3) pass. -- # Nothing else in here spits errors, and the markupsafe code -- # doesn't bail on import failure, so I followed that lead. -- # And of course, any other failure still S/T's. -- pass -+ except ImportError as err: -+ log.exception(err) -+ log.error('Unable to import extra-module "%s"', mod) -+ - for mod in [m for m in so_mods.split(',') if m]: - try: - locals()[mod] = __import__(mod) - tops.append(locals()[mod].__file__) -- except ImportError: -- pass # As per comment above -- if HAS_MARKUPSAFE: -- tops.append(os.path.dirname(markupsafe.__file__)) -+ except ImportError as err: -+ log.exception(err) -+ log.error('Unable to import so-module "%s"', mod) - - return tops - - -+def _get_supported_py_config(tops, extended_cfg): -+ ''' -+ Based on the Salt SSH configuration, create a YAML configuration -+ for the supported Python interpreter versions. This is then written into the thin.tgz -+ archive and then verified by salt.client.ssh.ssh_py_shim.get_executable() -+ -+ Note: Minimum default of 2.x versions is 2.7 and 3.x is 3.0, unless specified in namespaces. -+ -+ :return: -+ ''' -+ pymap = [] -+ for py_ver, tops in _six.iteritems(copy.deepcopy(tops)): -+ py_ver = int(py_ver) -+ if py_ver == 2: -+ pymap.append('py2:2:7') -+ elif py_ver == 3: -+ pymap.append('py3:3:0') -+ -+ for ns, cfg in _six.iteritems(copy.deepcopy(extended_cfg) or {}): -+ pymap.append('{}:{}:{}'.format(ns, *cfg.get('py-version'))) -+ pymap.append('') -+ -+ return salt.utils.stringutils.to_bytes(os.linesep.join(pymap)) -+ -+ - def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods='', - python2_bin='python2', python3_bin='python3', absonly=True, -- compress='gzip'): -+ compress='gzip', extended_cfg=None): - ''' - Generate the salt-thin tarball and print the location of the tarball - Optional additional mods to include (e.g. mako) can be supplied as a comma -@@ -171,19 +332,26 @@ def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods='', - salt-run thin.generate mako,wempy 1 - salt-run thin.generate overwrite=1 - ''' -+ if sys.version_info < (2, 6): -+ raise salt.exceptions.SaltSystemExit('The minimum required python version to run salt-ssh is "2.6".') -+ if compress not in ['gzip', 'zip']: -+ log.warning('Unknown compression type: "%s". Falling back to "gzip" compression.', compress) -+ compress = 'gzip' -+ - thindir = os.path.join(cachedir, 'thin') - if not os.path.isdir(thindir): - os.makedirs(thindir) -- if compress == 'gzip': -- thin_ext = 'tgz' -- elif compress == 'zip': -- thin_ext = 'zip' -- thintar = os.path.join(thindir, 'thin.' + thin_ext) -+ thintar = os.path.join(thindir, 'thin.' + (compress == 'gzip' and 'tgz' or 'zip')) - thinver = os.path.join(thindir, 'version') - pythinver = os.path.join(thindir, '.thin-gen-py-version') - salt_call = os.path.join(thindir, 'salt-call') -+ pymap_cfg = os.path.join(thindir, 'supported-versions') -+ code_checksum = os.path.join(thindir, 'code-checksum') -+ digest_collector = salt.utils.hashutils.DigestCollector() -+ - with salt.utils.files.fopen(salt_call, 'wb') as fp_: -- fp_.write(SALTCALL) -+ fp_.write(_get_salt_call('pyall', **_get_ext_namespaces(extended_cfg))) -+ - if os.path.isfile(thintar): - if not overwrite: - if os.path.isfile(thinver): -@@ -197,85 +365,88 @@ def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods='', - - if overwrite: - try: -+ log.debug('Removing %s archive file', thintar) - os.remove(thintar) -- except OSError: -- pass -+ except OSError as exc: -+ log.error('Error while removing %s file: %s', thintar, exc) -+ if os.path.exists(thintar): -+ raise salt.exceptions.SaltSystemExit('Unable to remove %s file. See logs for details.', thintar) - else: - return thintar - if _six.PY3: - # Let's check for the minimum python 2 version requirement, 2.6 -- py_shell_cmd = ( -- python2_bin + ' -c \'from __future__ import print_function; import sys; ' -- 'print("{0}.{1}".format(*(sys.version_info[:2])));\'' -- ) -+ py_shell_cmd = "{} -c 'import sys;sys.stdout.write(\"%s.%s\\n\" % sys.version_info[:2]);'".format(python2_bin) - cmd = subprocess.Popen(py_shell_cmd, stdout=subprocess.PIPE, shell=True) - stdout, _ = cmd.communicate() - if cmd.returncode == 0: - py2_version = tuple(int(n) for n in stdout.decode('utf-8').strip().split('.')) - if py2_version < (2, 6): -- # Bail! - raise salt.exceptions.SaltSystemExit( - 'The minimum required python version to run salt-ssh is "2.6".' - 'The version reported by "{0}" is "{1}". Please try "salt-ssh ' -- '--python2-bin=".'.format(python2_bin, -- stdout.strip()) -- ) -- elif sys.version_info < (2, 6): -- # Bail! Though, how did we reached this far in the first place. -- raise salt.exceptions.SaltSystemExit( -- 'The minimum required python version to run salt-ssh is "2.6".' -- ) -+ '--python2-bin=".'.format(python2_bin, stdout.strip())) -+ else: -+ log.error('Unable to detect Python-2 version') -+ log.debug(stdout) - -+ tops_failure_msg = 'Failed %s tops for Python binary %s.' - tops_py_version_mapping = {} - tops = get_tops(extra_mods=extra_mods, so_mods=so_mods) -- if _six.PY2: -- tops_py_version_mapping['2'] = tops -- else: -- tops_py_version_mapping['3'] = tops -+ tops_py_version_mapping[sys.version_info.major] = tops - -- # TODO: Consider putting known py2 and py3 compatible libs in it's own sharable directory. -- # This would reduce the thin size. -- if _six.PY2 and sys.version_info[0] == 2: -+ # Collect tops, alternative to 2.x version -+ if _six.PY2 and sys.version_info.major == 2: - # Get python 3 tops -- py_shell_cmd = ( -- python3_bin + ' -c \'import sys; import json; import salt.utils.thin; ' -- 'print(json.dumps(salt.utils.thin.get_tops(**(json.loads(sys.argv[1]))), ensure_ascii=False)); exit(0);\' ' -- '\'{0}\''.format(salt.utils.json.dumps({'extra_mods': extra_mods, 'so_mods': so_mods})) -- ) -+ py_shell_cmd = "{0} -c 'import salt.utils.thin as t;print(t.gte())' '{1}'".format( -+ python3_bin, salt.utils.json.dumps({'extra_mods': extra_mods, 'so_mods': so_mods})) - cmd = subprocess.Popen(py_shell_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) - stdout, stderr = cmd.communicate() - if cmd.returncode == 0: - try: - tops = salt.utils.json.loads(stdout) - tops_py_version_mapping['3'] = tops -- except ValueError: -- pass -- if _six.PY3 and sys.version_info[0] == 3: -+ except ValueError as err: -+ log.error(tops_failure_msg, 'parsing', python3_bin) -+ log.exception(err) -+ else: -+ log.error(tops_failure_msg, 'collecting', python3_bin) -+ log.debug(stderr) -+ -+ # Collect tops, alternative to 3.x version -+ if _six.PY3 and sys.version_info.major == 3: - # Get python 2 tops -- py_shell_cmd = ( -- python2_bin + ' -c \'from __future__ import print_function; ' -- 'import sys; import json; import salt.utils.thin; ' -- 'print(json.dumps(salt.utils.thin.get_tops(**(json.loads(sys.argv[1]))), ensure_ascii=False)); exit(0);\' ' -- '\'{0}\''.format(salt.utils.json.dumps({'extra_mods': extra_mods, 'so_mods': so_mods})) -- ) -+ py_shell_cmd = "{0} -c 'import salt.utils.thin as t;print(t.gte())' '{1}'".format( -+ python2_bin, salt.utils.json.dumps({'extra_mods': extra_mods, 'so_mods': so_mods})) - cmd = subprocess.Popen(py_shell_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) - stdout, stderr = cmd.communicate() - if cmd.returncode == 0: - try: - tops = salt.utils.json.loads(stdout.decode('utf-8')) - tops_py_version_mapping['2'] = tops -- except ValueError: -- pass -+ except ValueError as err: -+ log.error(tops_failure_msg, 'parsing', python2_bin) -+ log.exception(err) -+ else: -+ log.error(tops_failure_msg, 'collecting', python2_bin) -+ log.debug(stderr) -+ -+ with salt.utils.files.fopen(pymap_cfg, 'wb') as fp_: -+ fp_.write(_get_supported_py_config(tops=tops_py_version_mapping, extended_cfg=extended_cfg)) - - if compress == 'gzip': - tfp = tarfile.open(thintar, 'w:gz', dereference=True) - elif compress == 'zip': -- tfp = zipfile.ZipFile(thintar, 'w') -+ tfp = zipfile.ZipFile(thintar, 'w', compression=zlib and zipfile.ZIP_DEFLATED or zipfile.ZIP_STORED) -+ tfp.add = tfp.write -+ - try: # cwd may not exist if it was removed but salt was run from it - start_dir = os.getcwd() - except OSError: - start_dir = None - tempdir = None -+ -+ # Pack default data -+ log.debug('Packing default libraries based on current Salt version') - for py_ver, tops in _six.iteritems(tops_py_version_mapping): - for top in tops: - if absonly and not os.path.isabs(top): -@@ -291,48 +462,80 @@ def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods='', - egg.extractall(tempdir) - top = os.path.join(tempdir, base) - os.chdir(tempdir) -+ -+ site_pkg_dir = _is_shareable(base) and 'pyall' or 'py{}'.format(py_ver) -+ -+ log.debug('Packing "%s" to "%s" destination', base, site_pkg_dir) - if not os.path.isdir(top): - # top is a single file module - if os.path.exists(os.path.join(top_dirname, base)): -- if compress == 'gzip': -- tfp.add(base, arcname=os.path.join('py{0}'.format(py_ver), base)) -- elif compress == 'zip': -- tfp.write(base, arcname=os.path.join('py{0}'.format(py_ver), base)) -+ tfp.add(base, arcname=os.path.join(site_pkg_dir, base)) - continue - for root, dirs, files in salt.utils.path.os_walk(base, followlinks=True): - for name in files: - if not name.endswith(('.pyc', '.pyo')): -- if compress == 'gzip': -- tfp.add(os.path.join(root, name), -- arcname=os.path.join('py{0}'.format(py_ver), root, name)) -- elif compress == 'zip': -+ digest_collector.add(os.path.join(root, name)) -+ arcname = os.path.join(site_pkg_dir, root, name) -+ if hasattr(tfp, 'getinfo'): - try: - # This is a little slow but there's no clear way to detect duplicates -- tfp.getinfo(os.path.join('py{0}'.format(py_ver), root, name)) -+ tfp.getinfo(os.path.join(site_pkg_dir, root, name)) -+ arcname = None - except KeyError: -- tfp.write(os.path.join(root, name), arcname=os.path.join('py{0}'.format(py_ver), root, name)) -+ log.debug('ZIP: Unable to add "%s" with "getinfo"', arcname) -+ if arcname: -+ tfp.add(os.path.join(root, name), arcname=arcname) -+ - if tempdir is not None: - shutil.rmtree(tempdir) - tempdir = None -+ -+ # Pack alternative data -+ if extended_cfg: -+ log.debug('Packing libraries based on alternative Salt versions') -+ for ns, cfg in _six.iteritems(get_ext_tops(extended_cfg)): -+ tops = [cfg.get('path')] + cfg.get('dependencies') -+ py_ver_major, py_ver_minor = cfg.get('py-version') -+ for top in tops: -+ base, top_dirname = os.path.basename(top), os.path.dirname(top) -+ os.chdir(top_dirname) -+ site_pkg_dir = _is_shareable(base) and 'pyall' or 'py{0}'.format(py_ver_major) -+ log.debug('Packing alternative "%s" to "%s/%s" destination', base, ns, site_pkg_dir) -+ if not os.path.isdir(top): -+ # top is a single file module -+ if os.path.exists(os.path.join(top_dirname, base)): -+ tfp.add(base, arcname=os.path.join(ns, site_pkg_dir, base)) -+ continue -+ for root, dirs, files in salt.utils.path.os_walk(base, followlinks=True): -+ for name in files: -+ if not name.endswith(('.pyc', '.pyo')): -+ digest_collector.add(os.path.join(root, name)) -+ arcname = os.path.join(ns, site_pkg_dir, root, name) -+ if hasattr(tfp, 'getinfo'): -+ try: -+ tfp.getinfo(os.path.join(site_pkg_dir, root, name)) -+ arcname = None -+ except KeyError: -+ log.debug('ZIP: Unable to add "%s" with "getinfo"', arcname) -+ if arcname: -+ tfp.add(os.path.join(root, name), arcname=arcname) -+ - os.chdir(thindir) -- if compress == 'gzip': -- tfp.add('salt-call') -- elif compress == 'zip': -- tfp.write('salt-call') - with salt.utils.files.fopen(thinver, 'w+') as fp_: - fp_.write(salt.version.__version__) - with salt.utils.files.fopen(pythinver, 'w+') as fp_: -- fp_.write(str(sys.version_info[0])) # future lint: disable=blacklisted-function -+ fp_.write(str(sys.version_info.major)) # future lint: disable=blacklisted-function -+ with salt.utils.files.fopen(code_checksum, 'w+') as fp_: -+ fp_.write(digest_collector.digest()) - os.chdir(os.path.dirname(thinver)) -- if compress == 'gzip': -- tfp.add('version') -- tfp.add('.thin-gen-py-version') -- elif compress == 'zip': -- tfp.write('version') -- tfp.write('.thin-gen-py-version') -+ -+ for fname in ['version', '.thin-gen-py-version', 'salt-call', 'supported-versions', 'code-checksum']: -+ tfp.add(fname) -+ - if start_dir: - os.chdir(start_dir) - tfp.close() -+ - return thintar - - -@@ -341,7 +544,14 @@ def thin_sum(cachedir, form='sha1'): - Return the checksum of the current thin tarball - ''' - thintar = gen_thin(cachedir) -- return salt.utils.hashutils.get_hash(thintar, form) -+ code_checksum_path = os.path.join(cachedir, 'thin', 'code-checksum') -+ if os.path.isfile(code_checksum_path): -+ with salt.utils.fopen(code_checksum_path, 'r') as fh: -+ code_checksum = "'{0}'".format(fh.read().strip()) -+ else: -+ code_checksum = "'0'" -+ -+ return code_checksum, salt.utils.hashutils.get_hash(thintar, form) - - - def gen_min(cachedir, extra_mods='', overwrite=False, so_mods='', -@@ -368,7 +578,7 @@ def gen_min(cachedir, extra_mods='', overwrite=False, so_mods='', - pyminver = os.path.join(mindir, '.min-gen-py-version') - salt_call = os.path.join(mindir, 'salt-call') - with salt.utils.files.fopen(salt_call, 'wb') as fp_: -- fp_.write(SALTCALL) -+ fp_.write(_get_salt_call()) - if os.path.isfile(mintar): - if not overwrite: - if os.path.isfile(minver): -diff --git a/tests/unit/utils/test_thin.py b/tests/unit/utils/test_thin.py -new file mode 100644 -index 0000000000..549d48a703 ---- /dev/null -+++ b/tests/unit/utils/test_thin.py -@@ -0,0 +1,612 @@ -+# -*- coding: utf-8 -*- -+''' -+ :codeauthor: :email:`Bo Maryniuk ` -+''' -+from __future__ import absolute_import, print_function, unicode_literals -+ -+import os -+import sys -+from tests.support.unit import TestCase, skipIf -+from tests.support.mock import ( -+ NO_MOCK, -+ NO_MOCK_REASON, -+ MagicMock, -+ patch) -+ -+import salt.exceptions -+from salt.utils import thin -+from salt.utils import json -+import salt.utils.stringutils -+from salt.utils.stringutils import to_bytes as bts -+ -+try: -+ import pytest -+except ImportError: -+ pytest = None -+ -+ -+@skipIf(NO_MOCK, NO_MOCK_REASON) -+@skipIf(pytest is None, 'PyTest is missing') -+class SSHThinTestCase(TestCase): -+ ''' -+ TestCase for SaltSSH-related parts. -+ ''' -+ def _popen(self, return_value=None, side_effect=None, returncode=0): -+ ''' -+ Fake subprocess.Popen -+ -+ :return: -+ ''' -+ -+ proc = MagicMock() -+ proc.communicate = MagicMock(return_value=return_value, side_effect=side_effect) -+ proc.returncode = returncode -+ popen = MagicMock(return_value=proc) -+ -+ return popen -+ -+ def _version_info(self, major=None, minor=None): -+ ''' -+ Fake version info. -+ -+ :param major: -+ :param minor: -+ :return: -+ ''' -+ class VersionInfo(tuple): -+ pass -+ -+ vi = VersionInfo([major, minor]) -+ vi.major = major or sys.version_info.major -+ vi.minor = minor or sys.version_info.minor -+ -+ return vi -+ -+ def _tarfile(self, getinfo=False): -+ ''' -+ Fake tarfile handler. -+ -+ :return: -+ ''' -+ spec = ['add', 'close'] -+ if getinfo: -+ spec.append('getinfo') -+ -+ tf = MagicMock() -+ tf.open = MagicMock(return_value=MagicMock(spec=spec)) -+ -+ return tf -+ -+ @patch('salt.exceptions.SaltSystemExit', Exception) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.path.isfile', MagicMock(return_value=False)) -+ def test_get_ext_tops_cfg_missing_dependencies(self): -+ ''' -+ Test thin.get_ext_tops contains all required dependencies. -+ -+ :return: -+ ''' -+ cfg = {'namespace': {'py-version': [0, 0], 'path': '/foo', 'dependencies': []}} -+ -+ with pytest.raises(Exception) as err: -+ thin.get_ext_tops(cfg) -+ assert 'Missing dependencies' in str(err) -+ assert thin.log.error.called -+ assert 'Missing dependencies' in thin.log.error.call_args[0][0] -+ assert 'jinja2, yaml, tornado, msgpack' in thin.log.error.call_args[0][0] -+ -+ @patch('salt.exceptions.SaltSystemExit', Exception) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.path.isfile', MagicMock(return_value=False)) -+ def test_get_ext_tops_cfg_missing_interpreter(self): -+ ''' -+ Test thin.get_ext_tops contains interpreter configuration. -+ -+ :return: -+ ''' -+ cfg = {'namespace': {'path': '/foo', -+ 'dependencies': []}} -+ with pytest.raises(salt.exceptions.SaltSystemExit) as err: -+ thin.get_ext_tops(cfg) -+ assert 'missing specific locked Python version' in str(err) -+ -+ @patch('salt.exceptions.SaltSystemExit', Exception) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.path.isfile', MagicMock(return_value=False)) -+ def test_get_ext_tops_cfg_wrong_interpreter(self): -+ ''' -+ Test thin.get_ext_tops contains correct interpreter configuration. -+ -+ :return: -+ ''' -+ cfg = {'namespace': {'path': '/foo', -+ 'py-version': 2, -+ 'dependencies': []}} -+ -+ with pytest.raises(salt.exceptions.SaltSystemExit) as err: -+ thin.get_ext_tops(cfg) -+ assert 'specific locked Python version should be a list of major/minor version' in str(err) -+ -+ @patch('salt.exceptions.SaltSystemExit', Exception) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.path.isfile', MagicMock(return_value=False)) -+ def test_get_ext_tops_cfg_interpreter(self): -+ ''' -+ Test thin.get_ext_tops interpreter configuration. -+ -+ :return: -+ ''' -+ cfg = {'namespace': {'path': '/foo', -+ 'py-version': [2, 6], -+ 'dependencies': {'jinja2': '', -+ 'yaml': '', -+ 'tornado': '', -+ 'msgpack': ''}}} -+ -+ with pytest.raises(salt.exceptions.SaltSystemExit): -+ thin.get_ext_tops(cfg) -+ assert len(thin.log.warning.mock_calls) == 4 -+ assert sorted([x[1][1] for x in thin.log.warning.mock_calls]) == ['jinja2', 'msgpack', 'tornado', 'yaml'] -+ assert 'Module test has missing configuration' == thin.log.warning.mock_calls[0][1][0] % 'test' -+ -+ @patch('salt.exceptions.SaltSystemExit', Exception) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.path.isfile', MagicMock(return_value=False)) -+ def test_get_ext_tops_dependency_config_check(self): -+ ''' -+ Test thin.get_ext_tops dependencies are importable -+ -+ :return: -+ ''' -+ cfg = {'namespace': {'path': '/foo', -+ 'py-version': [2, 6], -+ 'dependencies': {'jinja2': '/jinja/foo.py', -+ 'yaml': '/yaml/', -+ 'tornado': '/tornado/wrong.rb', -+ 'msgpack': 'msgpack.sh'}}} -+ -+ with pytest.raises(salt.exceptions.SaltSystemExit) as err: -+ thin.get_ext_tops(cfg) -+ assert 'Missing dependencies for the alternative version in the external configuration' in str(err) -+ -+ messages = {} -+ for cl in thin.log.warning.mock_calls: -+ messages[cl[1][1]] = cl[1][0] % (cl[1][1], cl[1][2]) -+ for mod in ['tornado', 'yaml', 'msgpack']: -+ assert 'not a Python importable module' in messages[mod] -+ assert 'configured with not a file or does not exist' in messages['jinja2'] -+ -+ @patch('salt.exceptions.SaltSystemExit', Exception) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.path.isfile', MagicMock(return_value=True)) -+ def test_get_ext_tops_config_pass(self): -+ ''' -+ Test thin.get_ext_tops configuration -+ -+ :return: -+ ''' -+ cfg = {'namespace': {'path': '/foo', -+ 'py-version': [2, 6], -+ 'dependencies': {'jinja2': '/jinja/foo.py', -+ 'yaml': '/yaml/', -+ 'tornado': '/tornado/tornado.py', -+ 'msgpack': 'msgpack.py'}}} -+ out = thin.get_ext_tops(cfg) -+ assert out['namespace']['py-version'] == cfg['namespace']['py-version'] -+ assert out['namespace']['path'] == cfg['namespace']['path'] -+ assert sorted(out['namespace']['dependencies']) == sorted(['/tornado/tornado.py', -+ '/jinja/foo.py', '/yaml/', 'msgpack.py']) -+ -+ @patch('salt.utils.thin.sys.argv', [None, '{"foo": "bar"}']) -+ @patch('salt.utils.thin.get_tops', lambda **kw: kw) -+ def test_gte(self): -+ ''' -+ Test thin.gte external call for processing the info about tops per interpreter. -+ -+ :return: -+ ''' -+ assert json.loads(thin.gte()).get('foo') == 'bar' -+ -+ def test_add_dep_path(self): -+ ''' -+ Test thin._add_dependency function to setup dependency paths -+ :return: -+ ''' -+ container = [] -+ for pth in ['/foo/bar.py', '/something/else/__init__.py']: -+ thin._add_dependency(container, type(str('obj'), (), {'__file__': pth})()) -+ assert '__init__' not in container[1] -+ assert container == ['/foo/bar.py', '/something/else'] -+ -+ def test_thin_path(self): -+ ''' -+ Test thin.thin_path returns the expected path. -+ -+ :return: -+ ''' -+ assert thin.thin_path('/path/to') == '/path/to/thin/thin.tgz' -+ -+ def test_get_salt_call_script(self): -+ ''' -+ Test get salt-call script rendered. -+ -+ :return: -+ ''' -+ out = thin._get_salt_call('foo', 'bar', py26=[2, 6], py27=[2, 7], py34=[3, 4]) -+ for line in salt.utils.stringutils.to_str(out).split(os.linesep): -+ if line.startswith('namespaces = {'): -+ data = json.loads(line.replace('namespaces = ', '').strip()) -+ assert data.get('py26') == [2, 6] -+ assert data.get('py27') == [2, 7] -+ assert data.get('py34') == [3, 4] -+ if line.startswith('syspaths = '): -+ data = json.loads(line.replace('syspaths = ', '')) -+ assert data == ['foo', 'bar'] -+ -+ def test_get_ext_namespaces_empty(self): -+ ''' -+ Test thin._get_ext_namespaces function returns an empty dictionary on nothing -+ :return: -+ ''' -+ for obj in [None, {}, []]: -+ assert thin._get_ext_namespaces(obj) == {} -+ -+ def test_get_ext_namespaces(self): -+ ''' -+ Test thin._get_ext_namespaces function returns namespaces properly out of the config. -+ :return: -+ ''' -+ cfg = {'ns': {'py-version': [2, 7]}} -+ assert thin._get_ext_namespaces(cfg).get('ns') == (2, 7,) -+ assert isinstance(thin._get_ext_namespaces(cfg).get('ns'), tuple) -+ -+ def test_get_ext_namespaces_failure(self): -+ ''' -+ Test thin._get_ext_namespaces function raises an exception -+ if python major/minor version is not configured. -+ :return: -+ ''' -+ with pytest.raises(salt.exceptions.SaltSystemExit): -+ thin._get_ext_namespaces({'ns': {}}) -+ -+ @patch('salt.utils.thin.salt', type(str('salt'), (), {'__file__': '/site-packages/salt'})) -+ @patch('salt.utils.thin.jinja2', type(str('jinja2'), (), {'__file__': '/site-packages/jinja2'})) -+ @patch('salt.utils.thin.yaml', type(str('yaml'), (), {'__file__': '/site-packages/yaml'})) -+ @patch('salt.utils.thin.tornado', type(str('tornado'), (), {'__file__': '/site-packages/tornado'})) -+ @patch('salt.utils.thin.msgpack', type(str('msgpack'), (), {'__file__': '/site-packages/msgpack'})) -+ @patch('salt.utils.thin.certifi', type(str('certifi'), (), {'__file__': '/site-packages/certifi'})) -+ @patch('salt.utils.thin.singledispatch', type(str('singledispatch'), (), {'__file__': '/site-packages/sdp'})) -+ @patch('salt.utils.thin.singledispatch_helpers', type(str('singledispatch_helpers'), (), {'__file__': '/site-packages/sdp_hlp'})) -+ @patch('salt.utils.thin.ssl_match_hostname', type(str('ssl_match_hostname'), (), {'__file__': '/site-packages/ssl_mh'})) -+ @patch('salt.utils.thin.markupsafe', type(str('markupsafe'), (), {'__file__': '/site-packages/markupsafe'})) -+ @patch('salt.utils.thin.backports_abc', type(str('backports_abc'), (), {'__file__': '/site-packages/backports_abc'})) -+ @patch('salt.utils.thin.log', MagicMock()) -+ def test_get_tops(self): -+ ''' -+ Test thin.get_tops to get top directories, based on the interpreter. -+ :return: -+ ''' -+ base_tops = ['/site-packages/salt', '/site-packages/jinja2', '/site-packages/yaml', -+ '/site-packages/tornado', '/site-packages/msgpack', '/site-packages/certifi', -+ '/site-packages/sdp', '/site-packages/sdp_hlp', '/site-packages/ssl_mh', -+ '/site-packages/markupsafe', '/site-packages/backports_abc'] -+ -+ tops = thin.get_tops() -+ assert len(tops) == len(base_tops) -+ assert sorted(tops) == sorted(base_tops) -+ -+ @patch('salt.utils.thin.salt', type(str('salt'), (), {'__file__': '/site-packages/salt'})) -+ @patch('salt.utils.thin.jinja2', type(str('jinja2'), (), {'__file__': '/site-packages/jinja2'})) -+ @patch('salt.utils.thin.yaml', type(str('yaml'), (), {'__file__': '/site-packages/yaml'})) -+ @patch('salt.utils.thin.tornado', type(str('tornado'), (), {'__file__': '/site-packages/tornado'})) -+ @patch('salt.utils.thin.msgpack', type(str('msgpack'), (), {'__file__': '/site-packages/msgpack'})) -+ @patch('salt.utils.thin.certifi', type(str('certifi'), (), {'__file__': '/site-packages/certifi'})) -+ @patch('salt.utils.thin.singledispatch', type(str('singledispatch'), (), {'__file__': '/site-packages/sdp'})) -+ @patch('salt.utils.thin.singledispatch_helpers', type(str('singledispatch_helpers'), (), {'__file__': '/site-packages/sdp_hlp'})) -+ @patch('salt.utils.thin.ssl_match_hostname', type(str('ssl_match_hostname'), (), {'__file__': '/site-packages/ssl_mh'})) -+ @patch('salt.utils.thin.markupsafe', type(str('markupsafe'), (), {'__file__': '/site-packages/markupsafe'})) -+ @patch('salt.utils.thin.backports_abc', type(str('backports_abc'), (), {'__file__': '/site-packages/backports_abc'})) -+ @patch('salt.utils.thin.log', MagicMock()) -+ def test_get_tops_extra_mods(self): -+ ''' -+ Test thin.get_tops to get extra-modules alongside the top directories, based on the interpreter. -+ :return: -+ ''' -+ base_tops = ['/site-packages/salt', '/site-packages/jinja2', '/site-packages/yaml', -+ '/site-packages/tornado', '/site-packages/msgpack', '/site-packages/certifi', -+ '/site-packages/sdp', '/site-packages/sdp_hlp', '/site-packages/ssl_mh', -+ '/site-packages/markupsafe', '/site-packages/backports_abc', '/custom/foo', '/custom/bar.py'] -+ builtins = sys.version_info.major == 3 and 'builtins' or '__builtin__' -+ with patch('{}.__import__'.format(builtins), -+ MagicMock(side_effect=[type(str('foo'), (), {'__file__': '/custom/foo/__init__.py'}), -+ type(str('bar'), (), {'__file__': '/custom/bar'})])): -+ tops = thin.get_tops(extra_mods='foo,bar') -+ assert len(tops) == len(base_tops) -+ assert sorted(tops) == sorted(base_tops) -+ -+ @patch('salt.utils.thin.salt', type(str('salt'), (), {'__file__': '/site-packages/salt'})) -+ @patch('salt.utils.thin.jinja2', type(str('jinja2'), (), {'__file__': '/site-packages/jinja2'})) -+ @patch('salt.utils.thin.yaml', type(str('yaml'), (), {'__file__': '/site-packages/yaml'})) -+ @patch('salt.utils.thin.tornado', type(str('tornado'), (), {'__file__': '/site-packages/tornado'})) -+ @patch('salt.utils.thin.msgpack', type(str('msgpack'), (), {'__file__': '/site-packages/msgpack'})) -+ @patch('salt.utils.thin.certifi', type(str('certifi'), (), {'__file__': '/site-packages/certifi'})) -+ @patch('salt.utils.thin.singledispatch', type(str('singledispatch'), (), {'__file__': '/site-packages/sdp'})) -+ @patch('salt.utils.thin.singledispatch_helpers', type(str('singledispatch_helpers'), (), {'__file__': '/site-packages/sdp_hlp'})) -+ @patch('salt.utils.thin.ssl_match_hostname', type(str('ssl_match_hostname'), (), {'__file__': '/site-packages/ssl_mh'})) -+ @patch('salt.utils.thin.markupsafe', type(str('markupsafe'), (), {'__file__': '/site-packages/markupsafe'})) -+ @patch('salt.utils.thin.backports_abc', type(str('backports_abc'), (), {'__file__': '/site-packages/backports_abc'})) -+ @patch('salt.utils.thin.log', MagicMock()) -+ def test_get_tops_so_mods(self): -+ ''' -+ Test thin.get_tops to get extra-modules alongside the top directories, based on the interpreter. -+ :return: -+ ''' -+ base_tops = ['/site-packages/salt', '/site-packages/jinja2', '/site-packages/yaml', -+ '/site-packages/tornado', '/site-packages/msgpack', '/site-packages/certifi', -+ '/site-packages/sdp', '/site-packages/sdp_hlp', '/site-packages/ssl_mh', -+ '/site-packages/markupsafe', '/site-packages/backports_abc', '/custom/foo.so', '/custom/bar.so'] -+ builtins = sys.version_info.major == 3 and 'builtins' or '__builtin__' -+ with patch('{}.__import__'.format(builtins), -+ MagicMock(side_effect=[type(str('salt'), (), {'__file__': '/custom/foo.so'}), -+ type(str('salt'), (), {'__file__': '/custom/bar.so'})])): -+ tops = thin.get_tops(so_mods='foo,bar') -+ assert len(tops) == len(base_tops) -+ assert sorted(tops) == sorted(base_tops) -+ -+ @patch('salt.utils.thin.gen_thin', MagicMock(return_value='/path/to/thin/thin.tgz')) -+ @patch('salt.utils.hashutils.get_hash', MagicMock(return_value=12345)) -+ def test_thin_sum(self): -+ ''' -+ Test thin.thin_sum function. -+ -+ :return: -+ ''' -+ assert thin.thin_sum('/cachedir', form='sha256')[1] == 12345 -+ thin.salt.utils.hashutils.get_hash.assert_called() -+ assert thin.salt.utils.hashutils.get_hash.call_count == 1 -+ -+ path, form = thin.salt.utils.hashutils.get_hash.call_args[0] -+ assert path == '/path/to/thin/thin.tgz' -+ assert form == 'sha256' -+ -+ @patch('salt.utils.thin.gen_min', MagicMock(return_value='/path/to/thin/min.tgz')) -+ @patch('salt.utils.hashutils.get_hash', MagicMock(return_value=12345)) -+ def test_min_sum(self): -+ ''' -+ Test thin.thin_sum function. -+ -+ :return: -+ ''' -+ assert thin.min_sum('/cachedir', form='sha256') == 12345 -+ thin.salt.utils.hashutils.get_hash.assert_called() -+ assert thin.salt.utils.hashutils.get_hash.call_count == 1 -+ -+ path, form = thin.salt.utils.hashutils.get_hash.call_args[0] -+ assert path == '/path/to/thin/min.tgz' -+ assert form == 'sha256' -+ -+ @patch('salt.utils.thin.sys.version_info', (2, 5)) -+ @patch('salt.exceptions.SaltSystemExit', Exception) -+ def test_gen_thin_fails_ancient_python_version(self): -+ ''' -+ Test thin.gen_thin function raises an exception -+ if Python major/minor version is lower than 2.6 -+ -+ :return: -+ ''' -+ with pytest.raises(salt.exceptions.SaltSystemExit) as err: -+ thin.sys.exc_clear = lambda: None -+ thin.gen_thin('') -+ assert 'The minimum required python version to run salt-ssh is "2.6"' in str(err) -+ -+ @patch('salt.exceptions.SaltSystemExit', Exception) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.makedirs', MagicMock()) -+ @patch('salt.utils.files.fopen', MagicMock()) -+ @patch('salt.utils.thin._get_salt_call', MagicMock()) -+ @patch('salt.utils.thin._get_ext_namespaces', MagicMock()) -+ @patch('salt.utils.thin.get_tops', MagicMock(return_value=['/foo3', '/bar3'])) -+ @patch('salt.utils.thin.get_ext_tops', MagicMock(return_value={})) -+ @patch('salt.utils.thin.os.path.isfile', MagicMock()) -+ @patch('salt.utils.thin.os.path.isdir', MagicMock(return_value=True)) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.remove', MagicMock()) -+ @patch('salt.utils.thin.os.path.exists', MagicMock()) -+ @patch('salt.utils.path.os_walk', MagicMock(return_value=[])) -+ @patch('salt.utils.thin.subprocess.Popen', -+ _popen(None, side_effect=[(bts('2.7'), bts('')), (bts('["/foo27", "/bar27"]'), bts(''))])) -+ @patch('salt.utils.thin.tarfile', MagicMock()) -+ @patch('salt.utils.thin.zipfile', MagicMock()) -+ @patch('salt.utils.thin.os.getcwd', MagicMock()) -+ @patch('salt.utils.thin.os.chdir', MagicMock()) -+ @patch('salt.utils.thin.tempfile', MagicMock(mkdtemp=MagicMock(return_value=''))) -+ @patch('salt.utils.thin.shutil', MagicMock()) -+ @patch('salt.utils.thin._six.PY3', True) -+ @patch('salt.utils.thin._six.PY2', False) -+ @patch('salt.utils.thin.sys.version_info', _version_info(None, 3, 6)) -+ def test_gen_thin_compression_fallback_py3(self): -+ ''' -+ Test thin.gen_thin function if fallbacks to the gzip compression, once setup wrong. -+ NOTE: Py2 version of this test is not required, as code shares the same spot across the versions. -+ -+ :return: -+ ''' -+ thin.gen_thin('', compress='arj') -+ thin.log.warning.assert_called() -+ pt, msg = thin.log.warning.mock_calls[0][1] -+ assert pt % msg == 'Unknown compression type: "arj". Falling back to "gzip" compression.' -+ thin.zipfile.ZipFile.assert_not_called() -+ thin.tarfile.open.assert_called() -+ -+ @patch('salt.exceptions.SaltSystemExit', Exception) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.makedirs', MagicMock()) -+ @patch('salt.utils.files.fopen', MagicMock()) -+ @patch('salt.utils.thin._get_salt_call', MagicMock()) -+ @patch('salt.utils.thin._get_ext_namespaces', MagicMock()) -+ @patch('salt.utils.thin.get_tops', MagicMock(return_value=['/foo3', '/bar3'])) -+ @patch('salt.utils.thin.get_ext_tops', MagicMock(return_value={})) -+ @patch('salt.utils.thin.os.path.isfile', MagicMock()) -+ @patch('salt.utils.thin.os.path.isdir', MagicMock(return_value=False)) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.remove', MagicMock()) -+ @patch('salt.utils.thin.os.path.exists', MagicMock()) -+ @patch('salt.utils.path.os_walk', MagicMock(return_value=[])) -+ @patch('salt.utils.thin.subprocess.Popen', -+ _popen(None, side_effect=[(bts('2.7'), bts('')), (bts('["/foo27", "/bar27"]'), bts(''))])) -+ @patch('salt.utils.thin.tarfile', MagicMock()) -+ @patch('salt.utils.thin.zipfile', MagicMock()) -+ @patch('salt.utils.thin.os.getcwd', MagicMock()) -+ @patch('salt.utils.thin.os.chdir', MagicMock()) -+ @patch('salt.utils.thin.tempfile', MagicMock(mkdtemp=MagicMock(return_value=''))) -+ @patch('salt.utils.thin.shutil', MagicMock()) -+ @patch('salt.utils.thin._six.PY3', True) -+ @patch('salt.utils.thin._six.PY2', False) -+ @patch('salt.utils.thin.sys.version_info', _version_info(None, 3, 6)) -+ def test_gen_thin_control_files_written_py3(self): -+ ''' -+ Test thin.gen_thin function if control files are written (version, salt-call etc). -+ NOTE: Py2 version of this test is not required, as code shares the same spot across the versions. -+ -+ :return: -+ ''' -+ thin.gen_thin('') -+ arc_name, arc_mode = thin.tarfile.method_calls[0][1] -+ assert arc_name == 'thin/thin.tgz' -+ assert arc_mode == 'w:gz' -+ for idx, fname in enumerate(['version', '.thin-gen-py-version', 'salt-call', 'supported-versions']): -+ assert thin.tarfile.open().method_calls[idx + 4][1][0] == fname -+ thin.tarfile.open().close.assert_called() -+ -+ @patch('salt.exceptions.SaltSystemExit', Exception) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.makedirs', MagicMock()) -+ @patch('salt.utils.files.fopen', MagicMock()) -+ @patch('salt.utils.thin._get_salt_call', MagicMock()) -+ @patch('salt.utils.thin._get_ext_namespaces', MagicMock()) -+ @patch('salt.utils.thin.get_tops', MagicMock(return_value=['/salt', '/bar3'])) -+ @patch('salt.utils.thin.get_ext_tops', MagicMock(return_value={})) -+ @patch('salt.utils.thin.os.path.isfile', MagicMock()) -+ @patch('salt.utils.thin.os.path.isdir', MagicMock(return_value=True)) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.remove', MagicMock()) -+ @patch('salt.utils.thin.os.path.exists', MagicMock()) -+ @patch('salt.utils.path.os_walk', -+ MagicMock(return_value=(('root', [], ['r1', 'r2', 'r3']), ('root2', [], ['r4', 'r5', 'r6'])))) -+ @patch('salt.utils.thin.subprocess.Popen', -+ _popen(None, side_effect=[(bts('2.7'), bts('')), (bts('["/foo27", "/bar27"]'), bts(''))])) -+ @patch('salt.utils.thin.tarfile', _tarfile(None)) -+ @patch('salt.utils.thin.zipfile', MagicMock()) -+ @patch('salt.utils.thin.os.getcwd', MagicMock()) -+ @patch('salt.utils.thin.os.chdir', MagicMock()) -+ @patch('salt.utils.thin.tempfile', MagicMock()) -+ @patch('salt.utils.thin.shutil', MagicMock()) -+ @patch('salt.utils.thin._six.PY3', True) -+ @patch('salt.utils.thin._six.PY2', False) -+ @patch('salt.utils.thin.sys.version_info', _version_info(None, 3, 6)) -+ @patch('salt.utils.hashutils.DigestCollector', MagicMock()) -+ def test_gen_thin_main_content_files_written_py3(self): -+ ''' -+ Test thin.gen_thin function if main content files are written. -+ NOTE: Py2 version of this test is not required, as code shares the same spot across the versions. -+ -+ :return: -+ ''' -+ thin.gen_thin('') -+ files = [ -+ 'py2/root/r1', 'py2/root/r2', 'py2/root/r3', 'py2/root2/r4', 'py2/root2/r5', 'py2/root2/r6', -+ 'py2/root/r1', 'py2/root/r2', 'py2/root/r3', 'py2/root2/r4', 'py2/root2/r5', 'py2/root2/r6', -+ 'py3/root/r1', 'py3/root/r2', 'py3/root/r3', 'py3/root2/r4', 'py3/root2/r5', 'py3/root2/r6', -+ 'pyall/root/r1', 'pyall/root/r2', 'pyall/root/r3', 'pyall/root2/r4', 'pyall/root2/r5', 'pyall/root2/r6' -+ ] -+ for cl in thin.tarfile.open().method_calls[:-6]: -+ arcname = cl[2].get('arcname') -+ assert arcname in files -+ files.pop(files.index(arcname)) -+ assert not bool(files) -+ -+ @patch('salt.exceptions.SaltSystemExit', Exception) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.makedirs', MagicMock()) -+ @patch('salt.utils.files.fopen', MagicMock()) -+ @patch('salt.utils.thin._get_salt_call', MagicMock()) -+ @patch('salt.utils.thin._get_ext_namespaces', MagicMock()) -+ @patch('salt.utils.thin.get_tops', MagicMock(return_value=[])) -+ @patch('salt.utils.thin.get_ext_tops', -+ MagicMock(return_value={'namespace': {'py-version': [2, 7], -+ 'path': '/opt/2015.8/salt', -+ 'dependencies': ['/opt/certifi', '/opt/whatever']}})) -+ @patch('salt.utils.thin.os.path.isfile', MagicMock()) -+ @patch('salt.utils.thin.os.path.isdir', MagicMock(return_value=True)) -+ @patch('salt.utils.thin.log', MagicMock()) -+ @patch('salt.utils.thin.os.remove', MagicMock()) -+ @patch('salt.utils.thin.os.path.exists', MagicMock()) -+ @patch('salt.utils.path.os_walk', -+ MagicMock(return_value=(('root', [], ['r1', 'r2', 'r3']), ('root2', [], ['r4', 'r5', 'r6'])))) -+ @patch('salt.utils.thin.subprocess.Popen', -+ _popen(None, side_effect=[(bts('2.7'), bts('')), (bts('["/foo27", "/bar27"]'), bts(''))])) -+ @patch('salt.utils.thin.tarfile', _tarfile(None)) -+ @patch('salt.utils.thin.zipfile', MagicMock()) -+ @patch('salt.utils.thin.os.getcwd', MagicMock()) -+ @patch('salt.utils.thin.os.chdir', MagicMock()) -+ @patch('salt.utils.thin.tempfile', MagicMock(mkdtemp=MagicMock(return_value=''))) -+ @patch('salt.utils.thin.shutil', MagicMock()) -+ @patch('salt.utils.thin._six.PY3', True) -+ @patch('salt.utils.thin._six.PY2', False) -+ @patch('salt.utils.thin.sys.version_info', _version_info(None, 3, 6)) -+ @patch('salt.utils.hashutils.DigestCollector', MagicMock()) -+ def test_gen_thin_ext_alternative_content_files_written_py3(self): -+ ''' -+ Test thin.gen_thin function if external alternative content files are written. -+ NOTE: Py2 version of this test is not required, as code shares the same spot across the versions. -+ -+ :return: -+ ''' -+ thin.gen_thin('') -+ files = ['namespace/pyall/root/r1', 'namespace/pyall/root/r2', 'namespace/pyall/root/r3', -+ 'namespace/pyall/root2/r4', 'namespace/pyall/root2/r5', 'namespace/pyall/root2/r6', -+ 'namespace/pyall/root/r1', 'namespace/pyall/root/r2', 'namespace/pyall/root/r3', -+ 'namespace/pyall/root2/r4', 'namespace/pyall/root2/r5', 'namespace/pyall/root2/r6', -+ 'namespace/py2/root/r1', 'namespace/py2/root/r2', 'namespace/py2/root/r3', -+ 'namespace/py2/root2/r4', 'namespace/py2/root2/r5', 'namespace/py2/root2/r6' -+ ] -+ for idx, cl in enumerate(thin.tarfile.open().method_calls[12:-6]): -+ arcname = cl[2].get('arcname') -+ assert arcname in files -+ files.pop(files.index(arcname)) -+ assert not bool(files) -+ -+ def test_get_supported_py_config_typecheck(self): -+ ''' -+ Test collecting proper py-versions. Should return bytes type. -+ :return: -+ ''' -+ tops = {} -+ ext_cfg = {} -+ out = thin._get_supported_py_config(tops=tops, extended_cfg=ext_cfg) -+ assert type(salt.utils.stringutils.to_bytes('')) == type(out) -+ -+ def test_get_supported_py_config_base_tops(self): -+ ''' -+ Test collecting proper py-versions. Should return proper base tops. -+ :return: -+ ''' -+ tops = {'3': ['/groundkeepers', '/stole'], '2': ['/the-root', '/password']} -+ ext_cfg = {} -+ out = salt.utils.stringutils.to_str(thin._get_supported_py_config( -+ tops=tops, extended_cfg=ext_cfg)).strip().split('\n') -+ assert len(out) == 2 -+ for t_line in ['py3:3:0', 'py2:2:7']: -+ assert t_line in out -+ -+ def test_get_supported_py_config_ext_tops(self): -+ ''' -+ Test collecting proper py-versions. Should return proper ext conf tops. -+ :return: -+ ''' -+ tops = {} -+ ext_cfg = {'solar-interference': {'py-version': [2, 6]}, 'second-system-effect': {'py-version': [2, 7]}} -+ out = salt.utils.stringutils.to_str(thin._get_supported_py_config( -+ tops=tops, extended_cfg=ext_cfg)).strip().split('\n') -+ for t_line in ['second-system-effect:2:7', 'solar-interference:2:6']: -+ assert t_line in out -- -2.13.7 +2.17.1 diff --git a/add-support-for-python-3.7.patch b/add-support-for-python-3.7.patch deleted file mode 100644 index 9bcd8b0..0000000 --- a/add-support-for-python-3.7.patch +++ /dev/null @@ -1,1171 +0,0 @@ -From 2c4ea84af38aca525e516e0391ea831d1fe6f611 Mon Sep 17 00:00:00 2001 -From: Bo Maryniuk -Date: Mon, 23 Jul 2018 10:51:41 +0200 -Subject: [PATCH] Add support for Python 3.7 - -Rename module to full wording - -Fix imports - -Fix docstring typo - -Fix CLI config - -Fix comments - -Fix docstrings - -Rename async function to asynchronous - -Change internal function signatures to avoid reserved word - -Remove internal variables/properties with the reserved words - -Fix local opts from CLI - -Fix log error/info/warning and exception messages - -Cleanup docstrings at module level - -Fix function signatures in Cassandra module - -Lintfix: PEP8 requires two empty lines - -Deprecate 'async' parameter in Mandrill API - -Revert api call: it is about "functionname_async" suffix. - -Add 'async' backward compatibility - -Update docstring - -Use kwargs instead of directly named parameters - -Support original API - -Fix nag-message - -Keep runner API unchanged - -fix unicode literals - -Remove async keyword, moving it into the kwargs. - -Fix configuration setting ---- - salt/client/__init__.py | 2 +- - salt/client/api.py | 6 ++--- - salt/client/mixins.py | 4 ++-- - salt/cloud/clouds/msazure.py | 2 +- - salt/cloud/clouds/profitbricks.py | 2 +- - salt/cloud/clouds/xen.py | 2 +- - salt/daemons/masterapi.py | 6 ++--- - salt/engines/slack.py | 4 ++-- - salt/master.py | 6 ++--- - salt/minion.py | 4 ++-- - salt/modules/cassandra_cql.py | 22 ++++++++++--------- - salt/modules/mandrill.py | 21 ++++++++++++------ - salt/modules/saltutil.py | 6 +++-- - salt/netapi/rest_cherrypy/app.py | 4 ++-- - salt/netapi/rest_cherrypy/event_processor.py | 2 +- - salt/netapi/rest_tornado/event_processor.py | 2 +- - salt/netapi/rest_tornado/saltnado.py | 8 +++---- - .../rest_tornado/saltnado_websockets.py | 2 +- - salt/returners/cassandra_cql_return.py | 8 +++---- - salt/runner.py | 10 ++++----- - salt/thorium/runner.py | 6 ++--- - salt/thorium/wheel.py | 4 ++-- - salt/transport/client.py | 2 +- - salt/transport/ipc.py | 10 ++++----- - salt/transport/server.py | 2 +- - salt/transport/tcp.py | 16 +++++++------- - salt/utils/{async.py => asynchronous.py} | 22 +++++++++---------- - salt/utils/event.py | 18 +++++++-------- - salt/utils/process.py | 4 ++-- - salt/utils/thin.py | 2 +- - salt/wheel/__init__.py | 4 ++-- - .../files/engines/runtests_engine.py | 4 ++-- - .../netapi/rest_tornado/test_app.py | 2 +- - tests/support/case.py | 10 +++++---- - tests/unit/utils/test_async.py | 20 ++++++++--------- - 35 files changed, 131 insertions(+), 118 deletions(-) - rename salt/utils/{async.py => asynchronous.py} (81%) - -diff --git a/salt/client/__init__.py b/salt/client/__init__.py -index 9bf8e32491..dcbc1473e1 100644 ---- a/salt/client/__init__.py -+++ b/salt/client/__init__.py -@@ -284,7 +284,7 @@ class LocalClient(object): - 'No command was sent, no jid was assigned.') - return {} - -- # don't install event subscription listeners when the request is async -+ # don't install event subscription listeners when the request is asynchronous - # and doesn't care. this is important as it will create event leaks otherwise - if not listen: - return pub_data -diff --git a/salt/client/api.py b/salt/client/api.py -index ac6f6de24a..b2aab460fa 100644 ---- a/salt/client/api.py -+++ b/salt/client/api.py -@@ -93,7 +93,7 @@ class APIClient(object): - - The cmd dict items are as follows: - -- mode: either 'sync' or 'async'. Defaults to 'async' if missing -+ mode: either 'sync' or 'asynchronous'. Defaults to 'asynchronous' if missing - fun: required. If the function is to be run on the master using either - a wheel or runner client then the fun: includes either - 'wheel.' or 'runner.' as a prefix and has three parts separated by '.'. -@@ -120,7 +120,7 @@ class APIClient(object): - ''' - cmd = dict(cmd) # make copy - client = 'minion' # default to local minion client -- mode = cmd.get('mode', 'async') # default to 'async' -+ mode = cmd.get('mode', 'async') - - # check for wheel or runner prefix to fun name to use wheel or runner client - funparts = cmd.get('fun', '').split('.') -@@ -162,7 +162,7 @@ class APIClient(object): - ''' - return self.runnerClient.master_call(**kwargs) - -- runner_sync = runner_async # always runner async, so works in either mode -+ runner_sync = runner_async # always runner asynchronous, so works in either mode - - def wheel_sync(self, **kwargs): - ''' -diff --git a/salt/client/mixins.py b/salt/client/mixins.py -index 29b6077661..4182fa5b81 100644 ---- a/salt/client/mixins.py -+++ b/salt/client/mixins.py -@@ -458,7 +458,7 @@ class SyncClientMixin(object): - - class AsyncClientMixin(object): - ''' -- A mixin for *Client interfaces to enable easy async function execution -+ A mixin for *Client interfaces to enable easy asynchronous function execution - ''' - client = None - tag_prefix = None -@@ -510,7 +510,7 @@ class AsyncClientMixin(object): - tag = salt.utils.event.tagify(jid, prefix=self.tag_prefix) - return {'tag': tag, 'jid': jid} - -- def async(self, fun, low, user='UNKNOWN', pub=None): -+ def asynchronous(self, fun, low, user='UNKNOWN', pub=None): - ''' - Execute the function in a multiprocess and return the event tag to use - to watch for the return -diff --git a/salt/cloud/clouds/msazure.py b/salt/cloud/clouds/msazure.py -index aa5cd14255..4a95c3af96 100644 ---- a/salt/cloud/clouds/msazure.py -+++ b/salt/cloud/clouds/msazure.py -@@ -888,7 +888,7 @@ def _wait_for_async(conn, request_id): - while result.status == 'InProgress': - count = count + 1 - if count > 120: -- raise ValueError('Timed out waiting for async operation to complete.') -+ raise ValueError('Timed out waiting for asynchronous operation to complete.') - time.sleep(5) - result = conn.get_operation_status(request_id) - -diff --git a/salt/cloud/clouds/profitbricks.py b/salt/cloud/clouds/profitbricks.py -index 1ce0a162f0..8d13bf7b70 100644 ---- a/salt/cloud/clouds/profitbricks.py -+++ b/salt/cloud/clouds/profitbricks.py -@@ -1098,7 +1098,7 @@ def _wait_for_completion(conn, promise, wait_timeout, msg): - ) - - raise Exception( -- 'Timed out waiting for async operation {0} "{1}" to complete.'.format( -+ 'Timed out waiting for asynchronous operation {0} "{1}" to complete.'.format( - msg, six.text_type(promise['requestId']) - ) - ) -diff --git a/salt/cloud/clouds/xen.py b/salt/cloud/clouds/xen.py -index 0b79d4dfb9..6f23b813a7 100644 ---- a/salt/cloud/clouds/xen.py -+++ b/salt/cloud/clouds/xen.py -@@ -719,7 +719,7 @@ def _wait_for_ip(name, session): - - def _run_async_task(task=None, session=None): - ''' -- Run XenAPI task in async mode to prevent timeouts -+ Run XenAPI task in asynchronous mode to prevent timeouts - ''' - if task is None or session is None: - return None -diff --git a/salt/daemons/masterapi.py b/salt/daemons/masterapi.py -index 84537fab3b..62dd0cd1ea 100644 ---- a/salt/daemons/masterapi.py -+++ b/salt/daemons/masterapi.py -@@ -1068,9 +1068,9 @@ class LocalFuncs(object): - try: - fun = load.pop('fun') - runner_client = salt.runner.RunnerClient(self.opts) -- return runner_client.async(fun, -- load.get('kwarg', {}), -- username) -+ return runner_client.asynchronous(fun, -+ load.get('kwarg', {}), -+ username) - except Exception as exc: - log.exception('Exception occurred while introspecting %s') - return {'error': {'name': exc.__class__.__name__, -diff --git a/salt/engines/slack.py b/salt/engines/slack.py -index e664bbee03..c35435e42e 100644 ---- a/salt/engines/slack.py -+++ b/salt/engines/slack.py -@@ -740,7 +740,7 @@ class SlackClient(object): - :param interval: time to wait between ending a loop and beginning the next - - ''' -- log.debug('Going to run a command async') -+ log.debug('Going to run a command asynchronous') - runner_functions = sorted(salt.runner.Runner(__opts__).functions) - # Parse args and kwargs - cmd = msg['cmdline'][0] -@@ -762,7 +762,7 @@ class SlackClient(object): - log.debug('Command %s will run via runner_functions', cmd) - # pylint is tripping - # pylint: disable=missing-whitespace-after-comma -- job_id_dict = runner.async(cmd, {'args': args, 'kwargs': kwargs}) -+ job_id_dict = runner.asynchronous(cmd, {'args': args, 'kwargs': kwargs}) - job_id = job_id_dict['jid'] - - # Default to trying to run as a client module. -diff --git a/salt/master.py b/salt/master.py -index e400054d72..86b639dd5b 100644 ---- a/salt/master.py -+++ b/salt/master.py -@@ -1878,9 +1878,9 @@ class ClearFuncs(object): - try: - fun = clear_load.pop('fun') - runner_client = salt.runner.RunnerClient(self.opts) -- return runner_client.async(fun, -- clear_load.get('kwarg', {}), -- username) -+ return runner_client.asynchronous(fun, -+ clear_load.get('kwarg', {}), -+ username) - except Exception as exc: - log.error('Exception occurred while introspecting %s: %s', fun, exc) - return {'error': {'name': exc.__class__.__name__, -diff --git a/salt/minion.py b/salt/minion.py -index 0a6771dccd..17e11c0ebe 100644 ---- a/salt/minion.py -+++ b/salt/minion.py -@@ -926,7 +926,7 @@ class MinionManager(MinionBase): - install_zmq() - self.io_loop = ZMQDefaultLoop.current() - self.process_manager = ProcessManager(name='MultiMinionProcessManager') -- self.io_loop.spawn_callback(self.process_manager.run, async=True) -+ self.io_loop.spawn_callback(self.process_manager.run, **{'async': True}) # Tornado backward compat - - def __del__(self): - self.destroy() -@@ -1123,7 +1123,7 @@ class Minion(MinionBase): - time.sleep(sleep_time) - - self.process_manager = ProcessManager(name='MinionProcessManager') -- self.io_loop.spawn_callback(self.process_manager.run, async=True) -+ self.io_loop.spawn_callback(self.process_manager.run, **{'async': True}) - # We don't have the proxy setup yet, so we can't start engines - # Engines need to be able to access __proxy__ - if not salt.utils.platform.is_proxy(): -diff --git a/salt/modules/cassandra_cql.py b/salt/modules/cassandra_cql.py -index 82b211bddf..30db93dccc 100644 ---- a/salt/modules/cassandra_cql.py -+++ b/salt/modules/cassandra_cql.py -@@ -93,6 +93,7 @@ from salt.exceptions import CommandExecutionError - # Import 3rd-party libs - from salt.ext import six - from salt.ext.six.moves import range -+import salt.utils.versions - - SSL_VERSION = 'ssl_version' - -@@ -128,7 +129,7 @@ def __virtual__(): - - - def _async_log_errors(errors): -- log.error('Cassandra_cql async call returned: %s', errors) -+ log.error('Cassandra_cql asynchronous call returned: %s', errors) - - - def _load_properties(property_name, config_option, set_default=False, default=None): -@@ -361,9 +362,8 @@ def cql_query(query, contact_points=None, port=None, cql_user=None, cql_pass=Non - return ret - - --def cql_query_with_prepare(query, statement_name, statement_arguments, async=False, -- callback_errors=None, -- contact_points=None, port=None, cql_user=None, cql_pass=None): -+def cql_query_with_prepare(query, statement_name, statement_arguments, callback_errors=None, contact_points=None, -+ port=None, cql_user=None, cql_pass=None, **kwargs): - ''' - Run a query on a Cassandra cluster and return a dictionary. - -@@ -377,8 +377,8 @@ def cql_query_with_prepare(query, statement_name, statement_arguments, async=Fal - :type statement_name: str - :param statement_arguments: Bind parameters for the SQL statement - :type statement_arguments: list[str] -- :param async: Run this query in asynchronous mode -- :type async: bool -+ :param async: Run this query in asynchronous mode -+ :type async: bool - :param callback_errors: Function to call after query runs if there is an error - :type callback_errors: Function callable - :param contact_points: The Cassandra cluster addresses, can either be a string or a list of IPs. -@@ -401,12 +401,14 @@ def cql_query_with_prepare(query, statement_name, statement_arguments, async=Fal - - # Insert data asynchronously - salt this-node cassandra_cql.cql_query_with_prepare "name_insert" "INSERT INTO USERS (first_name, last_name) VALUES (?, ?)" \ -- statement_arguments=['John','Doe'], async=True -+ statement_arguments=['John','Doe'], asynchronous=True - - # Select data, should not be asynchronous because there is not currently a facility to return data from a future - salt this-node cassandra_cql.cql_query_with_prepare "name_select" "SELECT * FROM USERS WHERE first_name=?" \ - statement_arguments=['John'] - ''' -+ # Backward-compatibility with Python 3.7: "async" is a reserved word -+ asynchronous = kwargs.get('async', False) - try: - cluster, session = _connect(contact_points=contact_points, port=port, - cql_user=cql_user, cql_pass=cql_pass) -@@ -431,7 +433,7 @@ def cql_query_with_prepare(query, statement_name, statement_arguments, async=Fal - ret = [] - - try: -- if async: -+ if asynchronous: - future_results = session.execute_async(bound_statement.bind(statement_arguments)) - # future_results.add_callbacks(_async_log_errors) - else: -@@ -441,7 +443,7 @@ def cql_query_with_prepare(query, statement_name, statement_arguments, async=Fal - msg = "ERROR: Cassandra query failed: {0} reason: {1}".format(query, e) - raise CommandExecutionError(msg) - -- if not async and results: -+ if not asynchronous and results: - for result in results: - values = {} - for key, value in six.iteritems(result): -@@ -456,7 +458,7 @@ def cql_query_with_prepare(query, statement_name, statement_arguments, async=Fal - - # If this was a synchronous call, then we either have an empty list - # because there was no return, or we have a return -- # If this was an async call we only return the empty list -+ # If this was an asynchronous call we only return the empty list - return ret - - -diff --git a/salt/modules/mandrill.py b/salt/modules/mandrill.py -index 248939d09c..7044060154 100644 ---- a/salt/modules/mandrill.py -+++ b/salt/modules/mandrill.py -@@ -24,6 +24,7 @@ import logging - - # Import Salt libs - import salt.utils.json -+import salt.utils.versions - - # import third party - try: -@@ -137,12 +138,13 @@ def _http_request(url, - - - def send(message, -- async=False, -+ asynchronous=False, - ip_pool=None, - send_at=None, - api_url=None, - api_version=None, -- api_key=None): -+ api_key=None, -+ **kwargs): - ''' - Send out the email using the details from the ``message`` argument. - -@@ -151,14 +153,14 @@ def send(message, - sent as dictionary with at fields as specified in the Mandrill API - documentation. - -- async: ``False`` -+ asynchronous: ``False`` - Enable a background sending mode that is optimized for bulk sending. -- In async mode, messages/send will immediately return a status of -- "queued" for every recipient. To handle rejections when sending in async -+ In asynchronous mode, messages/send will immediately return a status of -+ "queued" for every recipient. To handle rejections when sending in asynchronous - mode, set up a webhook for the 'reject' event. Defaults to false for - messages with no more than 10 recipients; messages with more than 10 - recipients are always sent asynchronously, regardless of the value of -- async. -+ asynchronous. - - ip_pool - The name of the dedicated ip pool that should be used to send the -@@ -229,6 +231,11 @@ def send(message, - result: - True - ''' -+ if 'async' in kwargs: # Remove this in Sodium -+ salt.utils.versions.warn_until('Sodium', 'Parameter "async" is renamed to "asynchronous" ' -+ 'and will be removed in version {version}.') -+ asynchronous = bool(kwargs['async']) -+ - params = _get_api_params(api_url=api_url, - api_version=api_version, - api_key=api_key) -@@ -238,7 +245,7 @@ def send(message, - data = { - 'key': params['api_key'], - 'message': message, -- 'async': async, -+ 'async': asynchronous, - 'ip_pool': ip_pool, - 'send_at': send_at - } -diff --git a/salt/modules/saltutil.py b/salt/modules/saltutil.py -index 9cb27858d1..2c152e3ff1 100644 ---- a/salt/modules/saltutil.py -+++ b/salt/modules/saltutil.py -@@ -947,10 +947,11 @@ def refresh_pillar(): - ret = False # Effectively a no-op, since we can't really return without an event system - return ret - -+ - pillar_refresh = salt.utils.functools.alias_function(refresh_pillar, 'pillar_refresh') - - --def refresh_modules(async=True): -+def refresh_modules(**kwargs): - ''' - Signal the minion to refresh the module and grain data - -@@ -964,8 +965,9 @@ def refresh_modules(async=True): - - salt '*' saltutil.refresh_modules - ''' -+ asynchronous = bool(kwargs.get('async', True)) - try: -- if async: -+ if asynchronous: - # If we're going to block, first setup a listener - ret = __salt__['event.fire']({}, 'module_refresh') - else: -diff --git a/salt/netapi/rest_cherrypy/app.py b/salt/netapi/rest_cherrypy/app.py -index 077ccce0be..78ea3c3fef 100644 ---- a/salt/netapi/rest_cherrypy/app.py -+++ b/salt/netapi/rest_cherrypy/app.py -@@ -529,7 +529,7 @@ described above, the most effective and most scalable way to use both Salt and - salt-api is to run commands asynchronously using the ``local_async``, - ``runner_async``, and ``wheel_async`` clients. - --Running async jobs results in being able to process 3x more commands per second -+Running asynchronous jobs results in being able to process 3x more commands per second - for ``LocalClient`` and 17x more commands per second for ``RunnerClient``, in - addition to much less network traffic and memory requirements. Job returns can - be fetched from Salt's job cache via the ``/jobs/`` endpoint, or they can -@@ -2534,7 +2534,7 @@ class WebsocketEndpoint(object): - parent_pipe, child_pipe = Pipe() - handler.pipe = parent_pipe - handler.opts = self.opts -- # Process to handle async push to a client. -+ # Process to handle asynchronous push to a client. - # Each GET request causes a process to be kicked off. - proc = Process(target=event_stream, args=(handler, child_pipe)) - proc.start() -diff --git a/salt/netapi/rest_cherrypy/event_processor.py b/salt/netapi/rest_cherrypy/event_processor.py -index e409a00180..f0cf6d361a 100644 ---- a/salt/netapi/rest_cherrypy/event_processor.py -+++ b/salt/netapi/rest_cherrypy/event_processor.py -@@ -180,7 +180,7 @@ class SaltInfo(object): - 'expr_type': 'list', - 'mode': 'client', - 'client': 'local', -- 'async': 'local_async', -+ 'asynchronous': 'local_async', - 'token': token, - }) - -diff --git a/salt/netapi/rest_tornado/event_processor.py b/salt/netapi/rest_tornado/event_processor.py -index d8c338836e..70a379e2c5 100644 ---- a/salt/netapi/rest_tornado/event_processor.py -+++ b/salt/netapi/rest_tornado/event_processor.py -@@ -194,7 +194,7 @@ class SaltInfo(object): - 'expr_type': 'list', - 'mode': 'client', - 'client': 'local', -- 'async': 'local_async', -+ 'asynchronous': 'local_async', - 'token': token, - }) - -diff --git a/salt/netapi/rest_tornado/saltnado.py b/salt/netapi/rest_tornado/saltnado.py -index 2da44960c8..7942033c59 100644 ---- a/salt/netapi/rest_tornado/saltnado.py -+++ b/salt/netapi/rest_tornado/saltnado.py -@@ -244,7 +244,7 @@ def _json_dumps(obj, **kwargs): - - # # master side - # - "runner" (done) --# - "wheel" (need async api...) -+# - "wheel" (need asynchronous api...) - - - AUTH_TOKEN_HEADER = 'X-Auth-Token' -@@ -273,7 +273,7 @@ class Any(Future): - class EventListener(object): - ''' - Class responsible for listening to the salt master event bus and updating -- futures. This is the core of what makes this async, this allows us to do -+ futures. This is the core of what makes this asynchronous, this allows us to do - non-blocking work in the main processes and "wait" for an event to happen - ''' - -@@ -336,7 +336,7 @@ class EventListener(object): - timeout=None - ): - ''' -- Get an event (async of course) return a future that will get it later -+ Get an event (asynchronous of course) return a future that will get it later - ''' - # if the request finished, no reason to allow event fetching, since we - # can't send back to the client -@@ -653,7 +653,7 @@ class SaltAuthHandler(BaseSaltAPIHandler): # pylint: disable=W0223 - - self.write(self.serialize(ret)) - -- # TODO: make async? Underlying library isn't... and we ARE making disk calls :( -+ # TODO: make asynchronous? Underlying library isn't... and we ARE making disk calls :( - def post(self): - ''' - :ref:`Authenticate ` against Salt's eauth system -diff --git a/salt/netapi/rest_tornado/saltnado_websockets.py b/salt/netapi/rest_tornado/saltnado_websockets.py -index 89cdfd039a..cf6d51852f 100644 ---- a/salt/netapi/rest_tornado/saltnado_websockets.py -+++ b/salt/netapi/rest_tornado/saltnado_websockets.py -@@ -411,7 +411,7 @@ class FormattedEventsHandler(AllEventsHandler): # pylint: disable=W0223,W0232 - 'tgt': '*', - 'token': self.token, - 'mode': 'client', -- 'async': 'local_async', -+ 'asynchronous': 'local_async', - 'client': 'local' - }) - while True: -diff --git a/salt/returners/cassandra_cql_return.py b/salt/returners/cassandra_cql_return.py -index 8e92e32147..0ec8c2db27 100644 ---- a/salt/returners/cassandra_cql_return.py -+++ b/salt/returners/cassandra_cql_return.py -@@ -204,7 +204,7 @@ def returner(ret): - __salt__['cassandra_cql.cql_query_with_prepare'](query, - 'returner_return', - tuple(statement_arguments), -- async=True) -+ asynchronous=True) - except CommandExecutionError: - log.critical('Could not insert into salt_returns with Cassandra returner.') - raise -@@ -228,7 +228,7 @@ def returner(ret): - __salt__['cassandra_cql.cql_query_with_prepare'](query, - 'returner_minion', - tuple(statement_arguments), -- async=True) -+ asynchronous=True) - except CommandExecutionError: - log.critical('Could not store minion ID with Cassandra returner.') - raise -@@ -270,7 +270,7 @@ def event_return(events): - try: - __salt__['cassandra_cql.cql_query_with_prepare'](query, 'salt_events', - statement_arguments, -- async=True) -+ asynchronous=True) - except CommandExecutionError: - log.critical('Could not store events with Cassandra returner.') - raise -@@ -300,7 +300,7 @@ def save_load(jid, load, minions=None): - try: - __salt__['cassandra_cql.cql_query_with_prepare'](query, 'save_load', - statement_arguments, -- async=True) -+ asynchronous=True) - except CommandExecutionError: - log.critical('Could not save load in jids table.') - raise -diff --git a/salt/runner.py b/salt/runner.py -index 188064665b..ec389a45b0 100644 ---- a/salt/runner.py -+++ b/salt/runner.py -@@ -240,13 +240,13 @@ class Runner(RunnerClient): - if self.opts.get('eauth'): - async_pub = self.cmd_async(low) - else: -- async_pub = self.async(self.opts['fun'], -- low, -- user=user, -- pub=async_pub) -+ async_pub = self.asynchronous(self.opts['fun'], -+ low, -+ user=user, -+ pub=async_pub) - # by default: info will be not enougth to be printed out ! - log.warning( -- 'Running in async mode. Results of this execution may ' -+ 'Running in asynchronous mode. Results of this execution may ' - 'be collected by attaching to the master event bus or ' - 'by examing the master job cache, if configured. ' - 'This execution is running under tag %s', async_pub['tag'] -diff --git a/salt/thorium/runner.py b/salt/thorium/runner.py -index d6235d40e7..9545eac35c 100644 ---- a/salt/thorium/runner.py -+++ b/salt/thorium/runner.py -@@ -1,6 +1,6 @@ - # -*- coding: utf-8 -*- - ''' --React by calling async runners -+React by calling asynchronous runners - ''' - # Import python libs - from __future__ import absolute_import, print_function, unicode_literals -@@ -14,7 +14,7 @@ def cmd( - arg=(), - **kwargs): - ''' -- Execute a runner async: -+ Execute a runner asynchronous: - - USAGE: - -@@ -42,7 +42,7 @@ def cmd( - func = name - local_opts = {} - local_opts.update(__opts__) -- local_opts['async'] = True # ensure this will be run async -+ local_opts['async'] = True # ensure this will be run asynchronous - local_opts.update({ - 'fun': func, - 'arg': arg, -diff --git a/salt/thorium/wheel.py b/salt/thorium/wheel.py -index 7c98eff4bd..e3c4bf1701 100644 ---- a/salt/thorium/wheel.py -+++ b/salt/thorium/wheel.py -@@ -1,6 +1,6 @@ - # -*- coding: utf-8 -*- - ''' --React by calling async runners -+React by calling asynchronous runners - ''' - # Import python libs - from __future__ import absolute_import, print_function, unicode_literals -@@ -14,7 +14,7 @@ def cmd( - arg=(), - **kwargs): - ''' -- Execute a runner async: -+ Execute a runner asynchronous: - - USAGE: - -diff --git a/salt/transport/client.py b/salt/transport/client.py -index 86c4962f94..ca83ac9376 100644 ---- a/salt/transport/client.py -+++ b/salt/transport/client.py -@@ -10,7 +10,7 @@ from __future__ import absolute_import, print_function, unicode_literals - import logging - - # Import Salt Libs --from salt.utils.async import SyncWrapper -+from salt.utils.asynchronous import SyncWrapper - - log = logging.getLogger(__name__) - -diff --git a/salt/transport/ipc.py b/salt/transport/ipc.py -index 108e62da1f..a6e46e8eed 100644 ---- a/salt/transport/ipc.py -+++ b/salt/transport/ipc.py -@@ -130,7 +130,7 @@ class IPCServer(object): - else: - self.sock = tornado.netutil.bind_unix_socket(self.socket_path) - -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - tornado.netutil.add_accept_handler( - self.sock, - self.handle_connection, -@@ -196,7 +196,7 @@ class IPCServer(object): - log.trace('IPCServer: Handling connection ' - 'to address: %s', address) - try: -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - stream = IOStream( - connection, - ) -@@ -329,7 +329,7 @@ class IPCClient(object): - break - - if self.stream is None: -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - self.stream = IOStream( - socket.socket(sock_type, socket.SOCK_STREAM), - ) -@@ -510,7 +510,7 @@ class IPCMessagePublisher(object): - else: - self.sock = tornado.netutil.bind_unix_socket(self.socket_path) - -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - tornado.netutil.add_accept_handler( - self.sock, - self.handle_connection, -@@ -549,7 +549,7 @@ class IPCMessagePublisher(object): - if self.opts['ipc_write_buffer'] > 0: - kwargs['max_write_buffer_size'] = self.opts['ipc_write_buffer'] - log.trace('Setting IPC connection write buffer: %s', (self.opts['ipc_write_buffer'])) -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - stream = IOStream( - connection, - **kwargs -diff --git a/salt/transport/server.py b/salt/transport/server.py -index 46c14bdb39..1d67dc98af 100644 ---- a/salt/transport/server.py -+++ b/salt/transport/server.py -@@ -55,7 +55,7 @@ class ReqServerChannel(object): - ''' - Do anything you need post-fork. This should handle all incoming payloads - and call payload_handler. You will also be passed io_loop, for all of your -- async needs -+ asynchronous needs - ''' - pass - -diff --git a/salt/transport/tcp.py b/salt/transport/tcp.py -index 4b9f14768a..d9c15773a9 100644 ---- a/salt/transport/tcp.py -+++ b/salt/transport/tcp.py -@@ -19,7 +19,7 @@ import traceback - - # Import Salt Libs - import salt.crypt --import salt.utils.async -+import salt.utils.asynchronous - import salt.utils.event - import salt.utils.files - import salt.utils.platform -@@ -476,7 +476,7 @@ class AsyncTCPPubChannel(salt.transport.mixins.auth.AESPubClientMixin, salt.tran - 'tok': self.tok, - 'data': data, - 'tag': tag} -- req_channel = salt.utils.async.SyncWrapper( -+ req_channel = salt.utils.asynchronous.SyncWrapper( - AsyncTCPReqChannel, (self.opts,) - ) - try: -@@ -603,7 +603,7 @@ class TCPReqServerChannel(salt.transport.mixins.auth.AESReqServerMixin, salt.tra - self.payload_handler = payload_handler - self.io_loop = io_loop - self.serial = salt.payload.Serial(self.opts) -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - if USE_LOAD_BALANCER: - self.req_server = LoadBalancerWorker(self.socket_queue, - self.handle_message, -@@ -869,7 +869,7 @@ class SaltMessageClient(object): - - self.io_loop = io_loop or tornado.ioloop.IOLoop.current() - -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - self._tcp_client = TCPClientKeepAlive(opts, resolver=resolver) - - self._mid = 1 -@@ -895,7 +895,7 @@ class SaltMessageClient(object): - if hasattr(self, '_stream') and not self._stream.closed(): - # If _stream_return() hasn't completed, it means the IO - # Loop is stopped (such as when using -- # 'salt.utils.async.SyncWrapper'). Ensure that -+ # 'salt.utils.asynchronous.SyncWrapper'). Ensure that - # _stream_return() completes by restarting the IO Loop. - # This will prevent potential errors on shutdown. - try: -@@ -969,7 +969,7 @@ class SaltMessageClient(object): - 'source_port': self.source_port} - else: - log.warning('If you need a certain source IP/port, consider upgrading Tornado >= 4.5') -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - self._stream = yield self._tcp_client.connect(self.host, - self.port, - ssl_options=self.opts.get('ssl'), -@@ -1441,9 +1441,9 @@ class TCPPubServerChannel(salt.transport.server.PubServerChannel): - pull_uri = int(self.opts.get('tcp_master_publish_pull', 4514)) - else: - pull_uri = os.path.join(self.opts['sock_dir'], 'publish_pull.ipc') -- # TODO: switch to the actual async interface -+ # TODO: switch to the actual asynchronous interface - #pub_sock = salt.transport.ipc.IPCMessageClient(self.opts, io_loop=self.io_loop) -- pub_sock = salt.utils.async.SyncWrapper( -+ pub_sock = salt.utils.asynchronous.SyncWrapper( - salt.transport.ipc.IPCMessageClient, - (pull_uri,) - ) -diff --git a/salt/utils/async.py b/salt/utils/asynchronous.py -similarity index 81% -rename from salt/utils/async.py -rename to salt/utils/asynchronous.py -index 55d21d0ccc..16a7088360 100644 ---- a/salt/utils/async.py -+++ b/salt/utils/asynchronous.py -@@ -1,6 +1,6 @@ - # -*- coding: utf-8 -*- - ''' --Helpers/utils for working with tornado async stuff -+Helpers/utils for working with tornado asynchronous stuff - ''' - - from __future__ import absolute_import, print_function, unicode_literals -@@ -30,9 +30,9 @@ class SyncWrapper(object): - - This is uses as a simple wrapper, for example: - -- async = AsyncClass() -+ asynchronous = AsyncClass() - # this method would reguarly return a future -- future = async.async_method() -+ future = asynchronous.async_method() - - sync = SyncWrapper(async_factory_method, (arg1, arg2), {'kwarg1': 'val'}) - # the sync wrapper will automatically wait on the future -@@ -46,15 +46,15 @@ class SyncWrapper(object): - kwargs['io_loop'] = self.io_loop - - with current_ioloop(self.io_loop): -- self.async = method(*args, **kwargs) -+ self.asynchronous = method(*args, **kwargs) - - def __getattribute__(self, key): - try: - return object.__getattribute__(self, key) - except AttributeError as ex: -- if key == 'async': -+ if key == 'asynchronous': - raise ex -- attr = getattr(self.async, key) -+ attr = getattr(self.asynchronous, key) - if hasattr(attr, '__call__'): - def wrap(*args, **kwargs): - # Overload the ioloop for the func call-- since it might call .current() -@@ -75,15 +75,15 @@ class SyncWrapper(object): - - def __del__(self): - ''' -- On deletion of the async wrapper, make sure to clean up the async stuff -+ On deletion of the asynchronous wrapper, make sure to clean up the asynchronous stuff - ''' -- if hasattr(self, 'async'): -- if hasattr(self.async, 'close'): -+ if hasattr(self, 'asynchronous'): -+ if hasattr(self.asynchronous, 'close'): - # Certain things such as streams should be closed before - # their associated io_loop is closed to allow for proper - # cleanup. -- self.async.close() -- del self.async -+ self.asynchronous.close() -+ del self.asynchronous - self.io_loop.close() - del self.io_loop - elif hasattr(self, 'io_loop'): -diff --git a/salt/utils/event.py b/salt/utils/event.py -index 9a62b6c353..a2390730fe 100644 ---- a/salt/utils/event.py -+++ b/salt/utils/event.py -@@ -72,7 +72,7 @@ import tornado.iostream - # Import salt libs - import salt.config - import salt.payload --import salt.utils.async -+import salt.utils.asynchronous - import salt.utils.cache - import salt.utils.dicttrim - import salt.utils.files -@@ -228,7 +228,7 @@ class SaltEvent(object): - :param Bool keep_loop: Pass a boolean to determine if we want to keep - the io loop or destroy it when the event handle - is destroyed. This is useful when using event -- loops from within third party async code -+ loops from within third party asynchronous code - ''' - self.serial = salt.payload.Serial({'serial': 'msgpack'}) - self.keep_loop = keep_loop -@@ -364,7 +364,7 @@ class SaltEvent(object): - return True - - if self._run_io_loop_sync: -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - if self.subscriber is None: - self.subscriber = salt.transport.ipc.IPCMessageSubscriber( - self.puburi, -@@ -383,7 +383,7 @@ class SaltEvent(object): - io_loop=self.io_loop - ) - -- # For the async case, the connect will be defered to when -+ # For the asynchronous case, the connect will be defered to when - # set_event_handler() is invoked. - self.cpub = True - return self.cpub -@@ -409,7 +409,7 @@ class SaltEvent(object): - return True - - if self._run_io_loop_sync: -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - if self.pusher is None: - self.pusher = salt.transport.ipc.IPCMessageClient( - self.pulluri, -@@ -427,7 +427,7 @@ class SaltEvent(object): - self.pulluri, - io_loop=self.io_loop - ) -- # For the async case, the connect will be deferred to when -+ # For the asynchronous case, the connect will be deferred to when - # fire_event() is invoked. - self.cpush = True - return self.cpush -@@ -632,7 +632,7 @@ class SaltEvent(object): - - ret = self._check_pending(tag, match_func) - if ret is None: -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - if auto_reconnect: - raise_errors = self.raise_errors - self.raise_errors = True -@@ -743,7 +743,7 @@ class SaltEvent(object): - serialized_data]) - msg = salt.utils.stringutils.to_bytes(event, 'utf-8') - if self._run_io_loop_sync: -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - try: - self.io_loop.run_sync(lambda: self.pusher.send(msg)) - except Exception as ex: -@@ -1083,7 +1083,7 @@ class EventPublisher(salt.utils.process.SignalHandlingMultiprocessingProcess): - ''' - salt.utils.process.appendproctitle(self.__class__.__name__) - self.io_loop = tornado.ioloop.IOLoop() -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - if self.opts['ipc_mode'] == 'tcp': - epub_uri = int(self.opts['tcp_master_pub_port']) - epull_uri = int(self.opts['tcp_master_pull_port']) -diff --git a/salt/utils/process.py b/salt/utils/process.py -index 20f7feee8a..95c2288da3 100644 ---- a/salt/utils/process.py -+++ b/salt/utils/process.py -@@ -472,7 +472,7 @@ class ProcessManager(object): - del self._process_map[pid] - - @gen.coroutine -- def run(self, async=False): -+ def run(self, asynchronous=False): - ''' - Load and start all available api modules - ''' -@@ -495,7 +495,7 @@ class ProcessManager(object): - # The event-based subprocesses management code was removed from here - # because os.wait() conflicts with the subprocesses management logic - # implemented in `multiprocessing` package. See #35480 for details. -- if async: -+ if asynchronous: - yield gen.sleep(10) - else: - time.sleep(10) -diff --git a/salt/utils/thin.py b/salt/utils/thin.py -index b99e407583..9a74b8d7d6 100644 ---- a/salt/utils/thin.py -+++ b/salt/utils/thin.py -@@ -701,7 +701,7 @@ def gen_min(cachedir, extra_mods='', overwrite=False, so_mods='', - 'salt/utils/openstack', - 'salt/utils/openstack/__init__.py', - 'salt/utils/openstack/swift.py', -- 'salt/utils/async.py', -+ 'salt/utils/asynchronous.py', - 'salt/utils/process.py', - 'salt/utils/jinja.py', - 'salt/utils/rsax931.py', -diff --git a/salt/wheel/__init__.py b/salt/wheel/__init__.py -index abfd776342..65092ef974 100644 ---- a/salt/wheel/__init__.py -+++ b/salt/wheel/__init__.py -@@ -57,7 +57,7 @@ class WheelClient(salt.client.mixins.SyncClientMixin, - return self.low(fun, kwargs, print_event=kwargs.get('print_event', True), full_return=kwargs.get('full_return', False)) - - # TODO: Inconsistent with runner client-- the runner client's master_call gives -- # an async return, unlike this -+ # an asynchronous return, unlike this - def master_call(self, **kwargs): - ''' - Execute a wheel function through the master network interface (eauth). -@@ -120,7 +120,7 @@ class WheelClient(salt.client.mixins.SyncClientMixin, - {'jid': '20131219224744416681', 'tag': 'salt/wheel/20131219224744416681'} - ''' - fun = low.pop('fun') -- return self.async(fun, low) -+ return self.asynchronous(fun, low) - - def cmd(self, fun, arg=None, pub_data=None, kwarg=None, print_event=True, full_return=False): - ''' -diff --git a/tests/integration/files/engines/runtests_engine.py b/tests/integration/files/engines/runtests_engine.py -index ddb52d5c7f..426ab2a5b2 100644 ---- a/tests/integration/files/engines/runtests_engine.py -+++ b/tests/integration/files/engines/runtests_engine.py -@@ -21,7 +21,7 @@ import logging - - # Import salt libs - import salt.utils.event --import salt.utils.async -+import salt.utils.asynchronous - - # Import 3rd-party libs - from tornado import gen -@@ -70,7 +70,7 @@ class PyTestEngine(object): - self.sock.bind(('localhost', port)) - # become a server socket - self.sock.listen(5) -- with salt.utils.async.current_ioloop(self.io_loop): -+ with salt.utils.asynchronous.current_ioloop(self.io_loop): - netutil.add_accept_handler( - self.sock, - self.handle_connection, -diff --git a/tests/integration/netapi/rest_tornado/test_app.py b/tests/integration/netapi/rest_tornado/test_app.py -index 2efd2e9f3d..beb085db1e 100644 ---- a/tests/integration/netapi/rest_tornado/test_app.py -+++ b/tests/integration/netapi/rest_tornado/test_app.py -@@ -398,7 +398,7 @@ class TestMinionSaltAPIHandler(_SaltnadoIntegrationTestCase): - - def test_post_with_incorrect_client(self): - ''' -- The /minions endpoint is async only, so if you try something else -+ The /minions endpoint is asynchronous only, so if you try something else - make sure you get an error - ''' - # get a token for this test -diff --git a/tests/support/case.py b/tests/support/case.py -index 9de6b81fb7..87aeb13bf6 100644 ---- a/tests/support/case.py -+++ b/tests/support/case.py -@@ -13,7 +13,7 @@ - # pylint: disable=repr-flag-used-in-string - - # Import python libs --from __future__ import absolute_import -+from __future__ import absolute_import, unicode_literals - import os - import re - import sys -@@ -143,17 +143,19 @@ class ShellTestCase(TestCase, AdaptedConfigurationTestCaseMixin): - arg_str, - with_retcode=False, - catch_stderr=False, -- async=False, -+ asynchronous=False, - timeout=60, -- config_dir=None): -+ config_dir=None, -+ **kwargs): - ''' - Execute salt-run - ''' -+ asynchronous = kwargs.get('async', asynchronous) - arg_str = '-c {0}{async_flag} -t {timeout} {1}'.format( - config_dir or self.get_config_dir(), - arg_str, - timeout=timeout, -- async_flag=' --async' if async else '') -+ async_flag=' --async' if asynchronous else '') - return self.run_script('salt-run', - arg_str, - with_retcode=with_retcode, -diff --git a/tests/unit/utils/test_async.py b/tests/unit/utils/test_async.py -index c93538f0dd..694a7aebfe 100644 ---- a/tests/unit/utils/test_async.py -+++ b/tests/unit/utils/test_async.py -@@ -8,7 +8,7 @@ import tornado.testing - import tornado.gen - from tornado.testing import AsyncTestCase - --import salt.utils.async as async -+import salt.utils.asynchronous as asynchronous - - - class HelperA(object): -@@ -24,7 +24,7 @@ class HelperA(object): - class HelperB(object): - def __init__(self, a=None, io_loop=None): - if a is None: -- a = async.SyncWrapper(HelperA) -+ a = asynchronous.SyncWrapper(HelperA) - self.a = a - - @tornado.gen.coroutine -@@ -38,7 +38,7 @@ class TestSyncWrapper(AsyncTestCase): - @tornado.testing.gen_test - def test_helpers(self): - ''' -- Test that the helper classes do what we expect within a regular async env -+ Test that the helper classes do what we expect within a regular asynchronous env - ''' - ha = HelperA() - ret = yield ha.sleep() -@@ -50,29 +50,29 @@ class TestSyncWrapper(AsyncTestCase): - - def test_basic_wrap(self): - ''' -- Test that we can wrap an async caller. -+ Test that we can wrap an asynchronous caller. - ''' -- sync = async.SyncWrapper(HelperA) -+ sync = asynchronous.SyncWrapper(HelperA) - ret = sync.sleep() - self.assertTrue(ret) - - def test_double(self): - ''' -- Test when the async wrapper object itself creates a wrap of another thing -+ Test when the asynchronous wrapper object itself creates a wrap of another thing - - This works fine since the second wrap is based on the first's IOLoop so we - don't have to worry about complex start/stop mechanics - ''' -- sync = async.SyncWrapper(HelperB) -+ sync = asynchronous.SyncWrapper(HelperB) - ret = sync.sleep() - self.assertFalse(ret) - - def test_double_sameloop(self): - ''' -- Test async wrappers initiated from the same IOLoop, to ensure that -+ Test asynchronous wrappers initiated from the same IOLoop, to ensure that - we don't wire up both to the same IOLoop (since it causes MANY problems). - ''' -- a = async.SyncWrapper(HelperA) -- sync = async.SyncWrapper(HelperB, (a,)) -+ a = asynchronous.SyncWrapper(HelperA) -+ sync = asynchronous.SyncWrapper(HelperB, (a,)) - ret = sync.sleep() - self.assertFalse(ret) --- -2.17.1 - - diff --git a/add-virt.all_capabilities.patch b/add-virt.all_capabilities.patch new file mode 100644 index 0000000..fcb8f96 --- /dev/null +++ b/add-virt.all_capabilities.patch @@ -0,0 +1,214 @@ +From 0fd1e40e7149dd1a33f9a4497fa4e31c78ddfba7 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?C=C3=A9dric=20Bosdonnat?= +Date: Thu, 18 Oct 2018 13:32:59 +0200 +Subject: [PATCH] Add virt.all_capabilities + +In order to get all possible capabilities from a host, the user has to +call virt.capabilities, and then loop over the guests and domains +before calling virt.domain_capabilities for each of them. + +This commit embeds all this logic to get them all in a single +virt.all_capabilities call. +--- + salt/modules/virt.py | 107 +++++++++++++++++++++++--------- + tests/unit/modules/test_virt.py | 56 +++++++++++++++++ + 2 files changed, 134 insertions(+), 29 deletions(-) + +diff --git a/salt/modules/virt.py b/salt/modules/virt.py +index b45c5f522d..0921122a8a 100644 +--- a/salt/modules/virt.py ++++ b/salt/modules/virt.py +@@ -4094,37 +4094,10 @@ def _parse_caps_loader(node): + return result + + +-def domain_capabilities(emulator=None, arch=None, machine=None, domain=None, **kwargs): ++def _parse_domain_caps(caps): + ''' +- Return the domain capabilities given an emulator, architecture, machine or virtualization type. +- +- .. versionadded:: 2019.2.0 +- +- :param emulator: return the capabilities for the given emulator binary +- :param arch: return the capabilities for the given CPU architecture +- :param machine: return the capabilities for the given emulated machine type +- :param domain: return the capabilities for the given virtualization type. +- :param connection: libvirt connection URI, overriding defaults +- :param username: username to connect with, overriding defaults +- :param password: password to connect with, overriding defaults +- +- The list of the possible emulator, arch, machine and domain can be found in +- the host capabilities output. +- +- If none of the parameters is provided the libvirt default domain capabilities +- will be returned. +- +- CLI Example: +- +- .. code-block:: bash +- +- salt '*' virt.domain_capabilities arch='x86_64' domain='kvm' +- ++ Parse the XML document of domain capabilities into a structure. + ''' +- conn = __get_conn(**kwargs) +- caps = ElementTree.fromstring(conn.getDomainCapabilities(emulator, arch, machine, domain, 0)) +- conn.close() +- + result = { + 'emulator': caps.find('path').text if caps.find('path') is not None else None, + 'domain': caps.find('domain').text if caps.find('domain') is not None else None, +@@ -4164,6 +4137,82 @@ def domain_capabilities(emulator=None, arch=None, machine=None, domain=None, **k + return result + + ++def domain_capabilities(emulator=None, arch=None, machine=None, domain=None, **kwargs): ++ ''' ++ Return the domain capabilities given an emulator, architecture, machine or virtualization type. ++ ++ .. versionadded:: Fluorine ++ ++ :param emulator: return the capabilities for the given emulator binary ++ :param arch: return the capabilities for the given CPU architecture ++ :param machine: return the capabilities for the given emulated machine type ++ :param domain: return the capabilities for the given virtualization type. ++ :param connection: libvirt connection URI, overriding defaults ++ :param username: username to connect with, overriding defaults ++ :param password: password to connect with, overriding defaults ++ ++ The list of the possible emulator, arch, machine and domain can be found in ++ the host capabilities output. ++ ++ If none of the parameters is provided, the libvirt default one is returned. ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' virt.domain_capabilities arch='x86_64' domain='kvm' ++ ++ ''' ++ conn = __get_conn(**kwargs) ++ result = [] ++ try: ++ caps = ElementTree.fromstring(conn.getDomainCapabilities(emulator, arch, machine, domain, 0)) ++ result = _parse_domain_caps(caps) ++ finally: ++ conn.close() ++ ++ return result ++ ++ ++def all_capabilities(**kwargs): ++ ''' ++ Return the host and domain capabilities in a single call. ++ ++ .. versionadded:: Neon ++ ++ :param connection: libvirt connection URI, overriding defaults ++ :param username: username to connect with, overriding defaults ++ :param password: password to connect with, overriding defaults ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' virt.all_capabilities ++ ++ ''' ++ conn = __get_conn(**kwargs) ++ result = {} ++ try: ++ host_caps = ElementTree.fromstring(conn.getCapabilities()) ++ domains = [[(guest.get('arch', {}).get('name', None), key) ++ for key in guest.get('arch', {}).get('domains', {}).keys()] ++ for guest in [_parse_caps_guest(guest) for guest in host_caps.findall('guest')]] ++ flattened = [pair for item in (x for x in domains) for pair in item] ++ result = { ++ 'host': { ++ 'host': _parse_caps_host(host_caps.find('host')), ++ 'guests': [_parse_caps_guest(guest) for guest in host_caps.findall('guest')] ++ }, ++ 'domains': [_parse_domain_caps(ElementTree.fromstring( ++ conn.getDomainCapabilities(None, arch, None, domain))) ++ for (arch, domain) in flattened]} ++ finally: ++ conn.close() ++ ++ return result ++ ++ + def cpu_baseline(full=False, migratable=False, out='libvirt', **kwargs): + ''' + Return the optimal 'custom' CPU baseline config for VM's on this minion +diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py +index 3a69adece1..bd34962a6a 100644 +--- a/tests/unit/modules/test_virt.py ++++ b/tests/unit/modules/test_virt.py +@@ -2204,6 +2204,62 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin): + + self.assertEqual(expected, caps) + ++ def test_all_capabilities(self): ++ ''' ++ Test the virt.domain_capabilities default output ++ ''' ++ domainXml = ''' ++ ++ /usr/bin/qemu-system-x86_64 ++ kvm ++ virt-2.12 ++ x86_64 ++ ++ ++ ++ ''' ++ hostXml = ''' ++ ++ ++ 44454c4c-3400-105a-8033-b3c04f4b344a ++ ++ x86_64 ++ Nehalem ++ Intel ++ ++ ++ ++ ++ ++ hvm ++ ++ 64 ++ /usr/bin/qemu-system-x86_64 ++ pc-i440fx-2.6 ++ pc ++ pc-0.12 ++ ++ ++ /usr/bin/qemu-kvm ++ pc-i440fx-2.6 ++ pc ++ pc-0.12 ++ ++ ++ ++ ++ ''' ++ ++ # pylint: disable=no-member ++ self.mock_conn.getCapabilities.return_value = hostXml ++ self.mock_conn.getDomainCapabilities.side_effect = [ ++ domainXml, domainXml.replace('kvm', 'qemu')] ++ # pylint: enable=no-member ++ ++ caps = virt.all_capabilities() ++ self.assertEqual('44454c4c-3400-105a-8033-b3c04f4b344a', caps['host']['host']['uuid']) ++ self.assertEqual(set(['qemu', 'kvm']), set([domainCaps['domain'] for domainCaps in caps['domains']])) ++ + def test_network_tag(self): + ''' + Test virt._get_net_xml() with VLAN tag +-- +2.20.1 + + diff --git a/add-virt.volume_infos-and-virt.volume_delete.patch b/add-virt.volume_infos-and-virt.volume_delete.patch new file mode 100644 index 0000000..c2d5622 --- /dev/null +++ b/add-virt.volume_infos-and-virt.volume_delete.patch @@ -0,0 +1,334 @@ +From 2536ee56bd0060c024994f97388f9975ccbe1ee1 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?C=C3=A9dric=20Bosdonnat?= +Date: Fri, 15 Feb 2019 17:28:00 +0100 +Subject: [PATCH] Add virt.volume_infos() and virt.volume_delete() + +Expose more functions to handle libvirt storage volumes. + +virt.volume_infos() expose informations of the volumes, either for one or +all the volumes. Among the provided data, this function exposes the +names of the virtual machines using the volumes of file type. + +virt.volume_delete() allows removing a given volume. +--- + salt/modules/virt.py | 113 ++++++++++++++++++++ + tests/unit/modules/test_virt.py | 184 ++++++++++++++++++++++++++++++++ + 2 files changed, 297 insertions(+) + +diff --git a/salt/modules/virt.py b/salt/modules/virt.py +index 0921122a8a..4a301f289c 100644 +--- a/salt/modules/virt.py ++++ b/salt/modules/virt.py +@@ -4988,3 +4988,116 @@ def pool_list_volumes(name, **kwargs): + return pool.listVolumes() + finally: + conn.close() ++ ++ ++def _get_storage_vol(conn, pool, vol): ++ ''' ++ Helper function getting a storage volume. Will throw a libvirtError ++ if the pool or the volume couldn't be found. ++ ''' ++ pool_obj = conn.storagePoolLookupByName(pool) ++ return pool_obj.storageVolLookupByName(vol) ++ ++ ++def _get_all_volumes_paths(conn): ++ ''' ++ Extract the path and backing stores path of all volumes. ++ ++ :param conn: libvirt connection to use ++ ''' ++ volumes = [vol for l in [obj.listAllVolumes() for obj in conn.listAllStoragePools()] for vol in l] ++ return {vol.path(): [path.text for path in ElementTree.fromstring(vol.XMLDesc()).findall('.//backingStore/path')] ++ for vol in volumes} ++ ++ ++def volume_infos(pool, volume, **kwargs): ++ ''' ++ Provide details on a storage volume. If no volume name is provided, the infos ++ all the volumes contained in the pool are provided. If no pool is provided, ++ the infos of the volumes of all pools are output. ++ ++ :param pool: libvirt storage pool name ++ :param volume: name of the volume to get infos from ++ :param connection: libvirt connection URI, overriding defaults ++ :param username: username to connect with, overriding defaults ++ :param password: password to connect with, overriding defaults ++ ++ .. versionadded:: Neon ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt "*" virt.volume_infos ++ ''' ++ result = {} ++ conn = __get_conn(**kwargs) ++ try: ++ backing_stores = _get_all_volumes_paths(conn) ++ disks = {domain.name(): ++ {node.get('file') for node ++ in ElementTree.fromstring(domain.XMLDesc(0)).findall('.//disk/source/[@file]')} ++ for domain in _get_domain(conn)} ++ ++ def _volume_extract_infos(vol): ++ ''' ++ Format the volume info dictionary ++ ++ :param vol: the libvirt storage volume object. ++ ''' ++ types = ['file', 'block', 'dir', 'network', 'netdir', 'ploop'] ++ infos = vol.info() ++ ++ # If we have a path, check its use. ++ used_by = [] ++ if vol.path(): ++ as_backing_store = {path for (path, all_paths) in backing_stores.items() if vol.path() in all_paths} ++ used_by = [vm_name for (vm_name, vm_disks) in disks.items() ++ if vm_disks & as_backing_store or vol.path() in vm_disks] ++ ++ return { ++ 'type': types[infos[0]] if infos[0] < len(types) else 'unknown', ++ 'key': vol.key(), ++ 'path': vol.path(), ++ 'capacity': infos[1], ++ 'allocation': infos[2], ++ 'used_by': used_by, ++ } ++ ++ pools = [obj for obj in conn.listAllStoragePools() if pool is None or obj.name() == pool] ++ vols = {pool_obj.name(): {vol.name(): _volume_extract_infos(vol) ++ for vol in pool_obj.listAllVolumes() ++ if volume is None or vol.name() == volume} ++ for pool_obj in pools} ++ return {pool_name: volumes for (pool_name, volumes) in vols.items() if volumes} ++ except libvirt.libvirtError as err: ++ log.debug('Silenced libvirt error: %s', str(err)) ++ finally: ++ conn.close() ++ return result ++ ++ ++def volume_delete(pool, volume, **kwargs): ++ ''' ++ Delete a libvirt managed volume. ++ ++ :param pool: libvirt storage pool name ++ :param volume: name of the volume to delete ++ :param connection: libvirt connection URI, overriding defaults ++ :param username: username to connect with, overriding defaults ++ :param password: password to connect with, overriding defaults ++ ++ .. versionadded:: Neon ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt "*" virt.volume_delete ++ ''' ++ conn = __get_conn(**kwargs) ++ try: ++ vol = _get_storage_vol(conn, pool, volume) ++ return not bool(vol.delete()) ++ finally: ++ conn.close() +diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py +index bd34962a6a..55005f1d04 100644 +--- a/tests/unit/modules/test_virt.py ++++ b/tests/unit/modules/test_virt.py +@@ -2698,3 +2698,187 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin): + self.mock_conn.storagePoolLookupByName.return_value = mock_pool + # pylint: enable=no-member + self.assertEqual(names, virt.pool_list_volumes('default')) ++ ++ def test_volume_infos(self): ++ ''' ++ Test virt.volume_infos ++ ''' ++ vms_disks = [ ++ ''' ++ ++ ++ ++ ++ ++ ''', ++ ''' ++ ++ ++ ++ ++ ++ ''', ++ ''' ++ ++ ++ ++ ++ ++ ''' ++ ] ++ mock_vms = [] ++ for idx, disk in enumerate(vms_disks): ++ vm = MagicMock() ++ # pylint: disable=no-member ++ vm.name.return_value = 'vm{0}'.format(idx) ++ vm.XMLDesc.return_value = ''' ++ ++ vm{0} ++ {1} ++ ++ '''.format(idx, disk) ++ # pylint: enable=no-member ++ mock_vms.append(vm) ++ ++ mock_pool_data = [ ++ { ++ 'name': 'pool0', ++ 'volumes': [ ++ { ++ 'key': '/key/of/vol0', ++ 'name': 'vol0', ++ 'path': '/path/to/vol0.qcow2', ++ 'info': [0, 123456789, 123456], ++ 'backingStore': None ++ } ++ ] ++ }, ++ { ++ 'name': 'pool1', ++ 'volumes': [ ++ { ++ 'key': '/key/of/vol1', ++ 'name': 'vol1', ++ 'path': '/path/to/vol1.qcow2', ++ 'info': [0, 12345, 1234], ++ 'backingStore': None ++ }, ++ { ++ 'key': '/key/of/vol2', ++ 'name': 'vol2', ++ 'path': '/path/to/vol2.qcow2', ++ 'info': [0, 12345, 1234], ++ 'backingStore': '/path/to/vol0.qcow2' ++ }, ++ ], ++ } ++ ] ++ mock_pools = [] ++ for pool_data in mock_pool_data: ++ mock_pool = MagicMock() ++ mock_pool.name.return_value = pool_data['name'] # pylint: disable=no-member ++ mock_volumes = [] ++ for vol_data in pool_data['volumes']: ++ mock_volume = MagicMock() ++ # pylint: disable=no-member ++ mock_volume.name.return_value = vol_data['name'] ++ mock_volume.key.return_value = vol_data['key'] ++ mock_volume.path.return_value = '/path/to/{0}.qcow2'.format(vol_data['name']) ++ mock_volume.info.return_value = vol_data['info'] ++ backing_store = ''' ++ ++ qcow2 ++ {0} ++ ++ '''.format(vol_data['backingStore']) if vol_data['backingStore'] else '' ++ mock_volume.XMLDesc.return_value = ''' ++ ++ {0} ++ ++ qcow2 ++ /path/to/{0}.qcow2 ++ ++ {1} ++ ++ '''.format(vol_data['name'], backing_store) ++ mock_volumes.append(mock_volume) ++ # pylint: enable=no-member ++ mock_pool.listAllVolumes.return_value = mock_volumes # pylint: disable=no-member ++ mock_pools.append(mock_pool) ++ ++ self.mock_conn.listAllStoragePools.return_value = mock_pools # pylint: disable=no-member ++ ++ with patch('salt.modules.virt._get_domain', MagicMock(return_value=mock_vms)): ++ actual = virt.volume_infos('pool0', 'vol0') ++ self.assertEqual(1, len(actual.keys())) ++ self.assertEqual(1, len(actual['pool0'].keys())) ++ self.assertEqual(['vm0', 'vm2'], sorted(actual['pool0']['vol0']['used_by'])) ++ self.assertEqual('/path/to/vol0.qcow2', actual['pool0']['vol0']['path']) ++ self.assertEqual('file', actual['pool0']['vol0']['type']) ++ self.assertEqual('/key/of/vol0', actual['pool0']['vol0']['key']) ++ self.assertEqual(123456789, actual['pool0']['vol0']['capacity']) ++ self.assertEqual(123456, actual['pool0']['vol0']['allocation']) ++ ++ self.assertEqual(virt.volume_infos('pool1', None), { ++ 'pool1': { ++ 'vol1': { ++ 'type': 'file', ++ 'key': '/key/of/vol1', ++ 'path': '/path/to/vol1.qcow2', ++ 'capacity': 12345, ++ 'allocation': 1234, ++ 'used_by': [], ++ }, ++ 'vol2': { ++ 'type': 'file', ++ 'key': '/key/of/vol2', ++ 'path': '/path/to/vol2.qcow2', ++ 'capacity': 12345, ++ 'allocation': 1234, ++ 'used_by': ['vm2'], ++ } ++ } ++ }) ++ ++ self.assertEqual(virt.volume_infos(None, 'vol2'), { ++ 'pool1': { ++ 'vol2': { ++ 'type': 'file', ++ 'key': '/key/of/vol2', ++ 'path': '/path/to/vol2.qcow2', ++ 'capacity': 12345, ++ 'allocation': 1234, ++ 'used_by': ['vm2'], ++ } ++ } ++ }) ++ ++ def test_volume_delete(self): ++ ''' ++ Test virt.volume_delete ++ ''' ++ mock_delete = MagicMock(side_effect=[0, 1]) ++ mock_volume = MagicMock() ++ mock_volume.delete = mock_delete # pylint: disable=no-member ++ mock_pool = MagicMock() ++ # pylint: disable=no-member ++ mock_pool.storageVolLookupByName.side_effect = [ ++ mock_volume, ++ mock_volume, ++ self.mock_libvirt.libvirtError("Missing volume"), ++ mock_volume, ++ ] ++ self.mock_conn.storagePoolLookupByName.side_effect = [ ++ mock_pool, ++ mock_pool, ++ mock_pool, ++ self.mock_libvirt.libvirtError("Missing pool"), ++ ] ++ ++ # pylint: enable=no-member ++ self.assertTrue(virt.volume_delete('default', 'test_volume')) ++ self.assertFalse(virt.volume_delete('default', 'test_volume')) ++ with self.assertRaises(self.mock_libvirt.libvirtError): ++ virt.volume_delete('default', 'missing') ++ virt.volume_delete('missing', 'test_volume') ++ self.assertEqual(mock_delete.call_count, 2) +-- +2.20.1 + + diff --git a/align-suse-salt-master.service-limitnofiles-limit-wi.patch b/align-suse-salt-master.service-limitnofiles-limit-wi.patch deleted file mode 100644 index 292dee4..0000000 --- a/align-suse-salt-master.service-limitnofiles-limit-wi.patch +++ /dev/null @@ -1,28 +0,0 @@ -From 816c7ec3b72510346deef17deb2990a09ddab03a Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= - -Date: Thu, 31 May 2018 10:58:16 +0100 -Subject: [PATCH] Align SUSE salt-master.service 'LimitNOFILES' limit - with upstream Salt - ---- - pkg/suse/salt-master.service | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/pkg/suse/salt-master.service b/pkg/suse/salt-master.service -index c0ea4606d8..b31c1a1373 100644 ---- a/pkg/suse/salt-master.service -+++ b/pkg/suse/salt-master.service -@@ -4,7 +4,7 @@ Documentation=man:salt-master(1) file:///usr/share/doc/salt/html/contents.html h - After=network.target - - [Service] --LimitNOFILE=16384 -+LimitNOFILE=100000 - Type=simple - ExecStart=/usr/bin/salt-master - TasksMax=infinity --- -2.13.7 - - diff --git a/async-batch-implementation.patch b/async-batch-implementation.patch new file mode 100644 index 0000000..4f91985 --- /dev/null +++ b/async-batch-implementation.patch @@ -0,0 +1,960 @@ +From dfd16dd5968aae96e36e0dee412864fc765b62fb Mon Sep 17 00:00:00 2001 +From: Mihai Dinca +Date: Fri, 16 Nov 2018 17:05:29 +0100 +Subject: [PATCH] Async batch implementation + +Add find_job checks + +Check if should close on all events + +Make batch_delay a request parameter + +Allow multiple event handlers + +Use config value for gather_job_timeout when not in payload + +Add async batch unittests + +Allow metadata to pass + +Pass metadata only to batch jobs + +Add the metadata to the start/done events + +Pass only metadata not all **kwargs + +Add separate batch presence_ping timeout +--- + salt/auth/__init__.py | 4 +- + salt/cli/batch.py | 91 ++++++-- + salt/cli/batch_async.py | 227 +++++++++++++++++++ + salt/client/__init__.py | 44 +--- + salt/master.py | 25 ++ + salt/netapi/__init__.py | 3 +- + salt/transport/ipc.py | 11 +- + salt/utils/event.py | 11 +- + tests/unit/cli/test_batch_async.py | 351 +++++++++++++++++++++++++++++ + 9 files changed, 707 insertions(+), 60 deletions(-) + create mode 100644 salt/cli/batch_async.py + create mode 100644 tests/unit/cli/test_batch_async.py + +diff --git a/salt/auth/__init__.py b/salt/auth/__init__.py +index 61fbb018fd..a8aefa7091 100644 +--- a/salt/auth/__init__.py ++++ b/salt/auth/__init__.py +@@ -51,7 +51,9 @@ AUTH_INTERNAL_KEYWORDS = frozenset([ + 'metadata', + 'print_event', + 'raw', +- 'yield_pub_data' ++ 'yield_pub_data', ++ 'batch', ++ 'batch_delay' + ]) + + +diff --git a/salt/cli/batch.py b/salt/cli/batch.py +index e3a7bf9bcf..4bd07f584a 100644 +--- a/salt/cli/batch.py ++++ b/salt/cli/batch.py +@@ -26,6 +26,79 @@ import logging + log = logging.getLogger(__name__) + + ++def get_bnum(opts, minions, quiet): ++ ''' ++ Return the active number of minions to maintain ++ ''' ++ partition = lambda x: float(x) / 100.0 * len(minions) ++ try: ++ if '%' in opts['batch']: ++ res = partition(float(opts['batch'].strip('%'))) ++ if res < 1: ++ return int(math.ceil(res)) ++ else: ++ return int(res) ++ else: ++ return int(opts['batch']) ++ except ValueError: ++ if not quiet: ++ salt.utils.stringutils.print_cli('Invalid batch data sent: {0}\nData must be in the ' ++ 'form of %10, 10% or 3'.format(opts['batch'])) ++ ++ ++def batch_get_opts( ++ tgt, ++ fun, ++ batch, ++ parent_opts, ++ arg=(), ++ tgt_type='glob', ++ ret='', ++ kwarg=None, ++ **kwargs): ++ # We need to re-import salt.utils.args here ++ # even though it has already been imported. ++ # when cmd_batch is called via the NetAPI ++ # the module is unavailable. ++ import salt.utils.args ++ ++ arg = salt.utils.args.condition_input(arg, kwarg) ++ opts = {'tgt': tgt, ++ 'fun': fun, ++ 'arg': arg, ++ 'tgt_type': tgt_type, ++ 'ret': ret, ++ 'batch': batch, ++ 'failhard': kwargs.get('failhard', False), ++ 'raw': kwargs.get('raw', False)} ++ ++ if 'timeout' in kwargs: ++ opts['timeout'] = kwargs['timeout'] ++ if 'gather_job_timeout' in kwargs: ++ opts['gather_job_timeout'] = kwargs['gather_job_timeout'] ++ if 'batch_wait' in kwargs: ++ opts['batch_wait'] = int(kwargs['batch_wait']) ++ ++ for key, val in six.iteritems(parent_opts): ++ if key not in opts: ++ opts[key] = val ++ ++ return opts ++ ++ ++def batch_get_eauth(kwargs): ++ eauth = {} ++ if 'eauth' in kwargs: ++ eauth['eauth'] = kwargs.pop('eauth') ++ if 'username' in kwargs: ++ eauth['username'] = kwargs.pop('username') ++ if 'password' in kwargs: ++ eauth['password'] = kwargs.pop('password') ++ if 'token' in kwargs: ++ eauth['token'] = kwargs.pop('token') ++ return eauth ++ ++ + class Batch(object): + ''' + Manage the execution of batch runs +@@ -80,23 +153,7 @@ class Batch(object): + return (list(fret), ping_gen, nret.difference(fret)) + + def get_bnum(self): +- ''' +- Return the active number of minions to maintain +- ''' +- partition = lambda x: float(x) / 100.0 * len(self.minions) +- try: +- if '%' in self.opts['batch']: +- res = partition(float(self.opts['batch'].strip('%'))) +- if res < 1: +- return int(math.ceil(res)) +- else: +- return int(res) +- else: +- return int(self.opts['batch']) +- except ValueError: +- if not self.quiet: +- salt.utils.stringutils.print_cli('Invalid batch data sent: {0}\nData must be in the ' +- 'form of %10, 10% or 3'.format(self.opts['batch'])) ++ return get_bnum(self.opts, self.minions, self.quiet) + + def __update_wait(self, wait): + now = datetime.now() +diff --git a/salt/cli/batch_async.py b/salt/cli/batch_async.py +new file mode 100644 +index 0000000000..3160d46d8b +--- /dev/null ++++ b/salt/cli/batch_async.py +@@ -0,0 +1,227 @@ ++# -*- coding: utf-8 -*- ++''' ++Execute a job on the targeted minions by using a moving window of fixed size `batch`. ++''' ++ ++# Import python libs ++from __future__ import absolute_import, print_function, unicode_literals ++import tornado ++ ++# Import salt libs ++import salt.client ++ ++# pylint: enable=import-error,no-name-in-module,redefined-builtin ++import logging ++import fnmatch ++ ++log = logging.getLogger(__name__) ++ ++from salt.cli.batch import get_bnum, batch_get_opts, batch_get_eauth ++ ++ ++class BatchAsync(object): ++ ''' ++ Run a job on the targeted minions by using a moving window of fixed size `batch`. ++ ++ ``BatchAsync`` is used to execute a job on the targeted minions by keeping ++ the number of concurrent running minions to the size of `batch` parameter. ++ ++ The control parameters are: ++ - batch: number/percentage of concurrent running minions ++ - batch_delay: minimum wait time between batches ++ - batch_presence_ping_timeout: time to wait for presence pings before starting the batch ++ - gather_job_timeout: `find_job` timeout ++ - timeout: time to wait before firing a `find_job` ++ ++ When the batch stars, a `start` event is fired: ++ - tag: salt/batch//start ++ - data: { ++ "available_minions": self.minions, ++ "down_minions": self.down_minions ++ } ++ ++ When the batch ends, an `done` event is fired: ++ - tag: salt/batch//done ++ - data: { ++ "available_minions": self.minions, ++ "down_minions": self.down_minions, ++ "done_minions": self.done_minions, ++ "timedout_minions": self.timedout_minions ++ } ++ ''' ++ def __init__(self, parent_opts, jid_gen, clear_load): ++ ioloop = tornado.ioloop.IOLoop.current() ++ self.local = salt.client.get_local_client(parent_opts['conf_file']) ++ if 'gather_job_timeout' in clear_load['kwargs']: ++ clear_load['gather_job_timeout'] = clear_load['kwargs'].pop('gather_job_timeout') ++ else: ++ clear_load['gather_job_timeout'] = self.local.opts['gather_job_timeout'] ++ self.batch_presence_ping_timeout = clear_load['kwargs'].get('batch_presence_ping_timeout', None) ++ self.batch_delay = clear_load['kwargs'].get('batch_delay', 1) ++ self.opts = batch_get_opts( ++ clear_load.pop('tgt'), ++ clear_load.pop('fun'), ++ clear_load['kwargs'].pop('batch'), ++ self.local.opts, ++ **clear_load) ++ self.eauth = batch_get_eauth(clear_load['kwargs']) ++ self.metadata = clear_load['kwargs'].get('metadata', {}) ++ self.minions = set() ++ self.down_minions = set() ++ self.timedout_minions = set() ++ self.done_minions = set() ++ self.active = set() ++ self.initialized = False ++ self.ping_jid = jid_gen() ++ self.batch_jid = jid_gen() ++ self.find_job_jid = jid_gen() ++ self.find_job_returned = set() ++ self.event = salt.utils.event.get_event( ++ 'master', ++ self.opts['sock_dir'], ++ self.opts['transport'], ++ opts=self.opts, ++ listen=True, ++ io_loop=ioloop, ++ keep_loop=True) ++ ++ def __set_event_handler(self): ++ ping_return_pattern = 'salt/job/{0}/ret/*'.format(self.ping_jid) ++ batch_return_pattern = 'salt/job/{0}/ret/*'.format(self.batch_jid) ++ find_job_return_pattern = 'salt/job/{0}/ret/*'.format(self.find_job_jid) ++ self.event.subscribe(ping_return_pattern, match_type='glob') ++ self.event.subscribe(batch_return_pattern, match_type='glob') ++ self.event.subscribe(find_job_return_pattern, match_type='glob') ++ self.event.patterns = { ++ (ping_return_pattern, 'ping_return'), ++ (batch_return_pattern, 'batch_run'), ++ (find_job_return_pattern, 'find_job_return') ++ } ++ self.event.set_event_handler(self.__event_handler) ++ ++ def __event_handler(self, raw): ++ if not self.event: ++ return ++ mtag, data = self.event.unpack(raw, self.event.serial) ++ for (pattern, op) in self.event.patterns: ++ if fnmatch.fnmatch(mtag, pattern): ++ minion = data['id'] ++ if op == 'ping_return': ++ self.minions.add(minion) ++ self.down_minions.remove(minion) ++ if not self.down_minions: ++ self.event.io_loop.spawn_callback(self.start_batch) ++ elif op == 'find_job_return': ++ self.find_job_returned.add(minion) ++ elif op == 'batch_run': ++ if minion in self.active: ++ self.active.remove(minion) ++ self.done_minions.add(minion) ++ # call later so that we maybe gather more returns ++ self.event.io_loop.call_later(self.batch_delay, self.schedule_next) ++ ++ if self.initialized and self.done_minions == self.minions.difference(self.timedout_minions): ++ self.end_batch() ++ ++ def _get_next(self): ++ to_run = self.minions.difference( ++ self.done_minions).difference( ++ self.active).difference( ++ self.timedout_minions) ++ next_batch_size = min( ++ len(to_run), # partial batch (all left) ++ self.batch_size - len(self.active) # full batch or available slots ++ ) ++ return set(list(to_run)[:next_batch_size]) ++ ++ @tornado.gen.coroutine ++ def check_find_job(self, minions): ++ did_not_return = minions.difference(self.find_job_returned) ++ if did_not_return: ++ for minion in did_not_return: ++ if minion in self.find_job_returned: ++ self.find_job_returned.remove(minion) ++ if minion in self.active: ++ self.active.remove(minion) ++ self.timedout_minions.add(minion) ++ running = minions.difference(did_not_return).difference(self.done_minions).difference(self.timedout_minions) ++ if running: ++ self.event.io_loop.add_callback(self.find_job, running) ++ ++ @tornado.gen.coroutine ++ def find_job(self, minions): ++ not_done = minions.difference(self.done_minions) ++ ping_return = yield self.local.run_job_async( ++ not_done, ++ 'saltutil.find_job', ++ [self.batch_jid], ++ 'list', ++ gather_job_timeout=self.opts['gather_job_timeout'], ++ jid=self.find_job_jid, ++ **self.eauth) ++ self.event.io_loop.call_later( ++ self.opts['gather_job_timeout'], ++ self.check_find_job, ++ not_done) ++ ++ @tornado.gen.coroutine ++ def start(self): ++ self.__set_event_handler() ++ #start batching even if not all minions respond to ping ++ self.event.io_loop.call_later( ++ self.batch_presence_ping_timeout or self.opts['gather_job_timeout'], ++ self.start_batch) ++ ping_return = yield self.local.run_job_async( ++ self.opts['tgt'], ++ 'test.ping', ++ [], ++ self.opts.get( ++ 'selected_target_option', ++ self.opts.get('tgt_type', 'glob') ++ ), ++ gather_job_timeout=self.opts['gather_job_timeout'], ++ jid=self.ping_jid, ++ metadata=self.metadata, ++ **self.eauth) ++ self.down_minions = set(ping_return['minions']) ++ ++ @tornado.gen.coroutine ++ def start_batch(self): ++ if not self.initialized: ++ self.batch_size = get_bnum(self.opts, self.minions, True) ++ self.initialized = True ++ data = { ++ "available_minions": self.minions, ++ "down_minions": self.down_minions, ++ "metadata": self.metadata ++ } ++ self.event.fire_event(data, "salt/batch/{0}/start".format(self.batch_jid)) ++ yield self.schedule_next() ++ ++ def end_batch(self): ++ data = { ++ "available_minions": self.minions, ++ "down_minions": self.down_minions, ++ "done_minions": self.done_minions, ++ "timedout_minions": self.timedout_minions, ++ "metadata": self.metadata ++ } ++ self.event.fire_event(data, "salt/batch/{0}/done".format(self.batch_jid)) ++ self.event.remove_event_handler(self.__event_handler) ++ ++ @tornado.gen.coroutine ++ def schedule_next(self): ++ next_batch = self._get_next() ++ if next_batch: ++ yield self.local.run_job_async( ++ next_batch, ++ self.opts['fun'], ++ self.opts['arg'], ++ 'list', ++ raw=self.opts.get('raw', False), ++ ret=self.opts.get('return', ''), ++ gather_job_timeout=self.opts['gather_job_timeout'], ++ jid=self.batch_jid, ++ metadata=self.metadata) ++ self.event.io_loop.call_later(self.opts['timeout'], self.find_job, set(next_batch)) ++ self.active = self.active.union(next_batch) +diff --git a/salt/client/__init__.py b/salt/client/__init__.py +index 9f0903c7f0..8b37422cbf 100644 +--- a/salt/client/__init__.py ++++ b/salt/client/__init__.py +@@ -531,45 +531,14 @@ class LocalClient(object): + {'dave': {...}} + {'stewart': {...}} + ''' +- # We need to re-import salt.utils.args here +- # even though it has already been imported. +- # when cmd_batch is called via the NetAPI +- # the module is unavailable. +- import salt.utils.args +- + # Late import - not used anywhere else in this file + import salt.cli.batch ++ opts = salt.cli.batch.batch_get_opts( ++ tgt, fun, batch, self.opts, ++ arg=arg, tgt_type=tgt_type, ret=ret, kwarg=kwarg, **kwargs) ++ ++ eauth = salt.cli.batch.batch_get_eauth(kwargs) + +- arg = salt.utils.args.condition_input(arg, kwarg) +- opts = {'tgt': tgt, +- 'fun': fun, +- 'arg': arg, +- 'tgt_type': tgt_type, +- 'ret': ret, +- 'batch': batch, +- 'failhard': kwargs.get('failhard', False), +- 'raw': kwargs.get('raw', False)} +- +- if 'timeout' in kwargs: +- opts['timeout'] = kwargs['timeout'] +- if 'gather_job_timeout' in kwargs: +- opts['gather_job_timeout'] = kwargs['gather_job_timeout'] +- if 'batch_wait' in kwargs: +- opts['batch_wait'] = int(kwargs['batch_wait']) +- +- eauth = {} +- if 'eauth' in kwargs: +- eauth['eauth'] = kwargs.pop('eauth') +- if 'username' in kwargs: +- eauth['username'] = kwargs.pop('username') +- if 'password' in kwargs: +- eauth['password'] = kwargs.pop('password') +- if 'token' in kwargs: +- eauth['token'] = kwargs.pop('token') +- +- for key, val in six.iteritems(self.opts): +- if key not in opts: +- opts[key] = val + batch = salt.cli.batch.Batch(opts, eauth=eauth, quiet=True) + for ret in batch.run(): + yield ret +@@ -1732,7 +1701,8 @@ class LocalClient(object): + if listen and not self.event.connect_pub(timeout=timeout): + raise SaltReqTimeoutError() + payload = channel.send(payload_kwargs, timeout=timeout) +- except SaltReqTimeoutError: ++ except SaltReqTimeoutError as err: ++ log.error(err) + raise SaltReqTimeoutError( + 'Salt request timed out. The master is not responding. You ' + 'may need to run your command with `--async` in order to ' +diff --git a/salt/master.py b/salt/master.py +index 6881aae137..f08c126280 100644 +--- a/salt/master.py ++++ b/salt/master.py +@@ -32,6 +32,7 @@ import tornado.gen # pylint: disable=F0401 + + # Import salt libs + import salt.crypt ++import salt.cli.batch_async + import salt.client + import salt.client.ssh.client + import salt.exceptions +@@ -2039,6 +2040,27 @@ class ClearFuncs(object): + return False + return self.loadauth.get_tok(clear_load['token']) + ++ def publish_batch(self, clear_load, minions, missing): ++ batch_load = {} ++ batch_load.update(clear_load) ++ import salt.cli.batch_async ++ batch = salt.cli.batch_async.BatchAsync( ++ self.local.opts, ++ functools.partial(self._prep_jid, clear_load, {}), ++ batch_load ++ ) ++ ioloop = tornado.ioloop.IOLoop.current() ++ ioloop.add_callback(batch.start) ++ ++ return { ++ 'enc': 'clear', ++ 'load': { ++ 'jid': batch.batch_jid, ++ 'minions': minions, ++ 'missing': missing ++ } ++ } ++ + def publish(self, clear_load): + ''' + This method sends out publications to the minions, it can only be used +@@ -2130,6 +2152,9 @@ class ClearFuncs(object): + 'error': 'Master could not resolve minions for target {0}'.format(clear_load['tgt']) + } + } ++ if extra.get('batch', None): ++ return self.publish_batch(clear_load, minions, missing) ++ + jid = self._prep_jid(clear_load, extra) + if jid is None: + return {'enc': 'clear', +diff --git a/salt/netapi/__init__.py b/salt/netapi/__init__.py +index 95f6384889..43b6e943a7 100644 +--- a/salt/netapi/__init__.py ++++ b/salt/netapi/__init__.py +@@ -88,7 +88,8 @@ class NetapiClient(object): + :return: job ID + ''' + local = salt.client.get_local_client(mopts=self.opts) +- return local.run_job(*args, **kwargs) ++ ret = local.run_job(*args, **kwargs) ++ return ret + + def local(self, *args, **kwargs): + ''' +diff --git a/salt/transport/ipc.py b/salt/transport/ipc.py +index 40a172991d..8235f104ef 100644 +--- a/salt/transport/ipc.py ++++ b/salt/transport/ipc.py +@@ -669,6 +669,8 @@ class IPCMessageSubscriber(IPCClient): + self._sync_ioloop_running = False + self.saved_data = [] + self._sync_read_in_progress = Semaphore() ++ self.callbacks = set() ++ self.reading = False + + @tornado.gen.coroutine + def _read_sync(self, timeout): +@@ -756,6 +758,7 @@ class IPCMessageSubscriber(IPCClient): + while not self.stream.closed(): + try: + self._read_stream_future = self.stream.read_bytes(4096, partial=True) ++ self.reading = True + wire_bytes = yield self._read_stream_future + self._read_stream_future = None + self.unpacker.feed(wire_bytes) +@@ -768,8 +771,12 @@ class IPCMessageSubscriber(IPCClient): + except Exception as exc: + log.error('Exception occurred while Subscriber handling stream: %s', exc) + ++ def __run_callbacks(self, raw): ++ for callback in self.callbacks: ++ self.io_loop.spawn_callback(callback, raw) ++ + @tornado.gen.coroutine +- def read_async(self, callback): ++ def read_async(self): + ''' + Asynchronously read messages and invoke a callback when they are ready. + +@@ -784,7 +791,7 @@ class IPCMessageSubscriber(IPCClient): + except Exception as exc: + log.error('Exception occurred while Subscriber connecting: %s', exc) + yield tornado.gen.sleep(1) +- yield self._read_async(callback) ++ yield self._read_async(self.__run_callbacks) + + def close(self): + ''' +diff --git a/salt/utils/event.py b/salt/utils/event.py +index 296a296084..d2700bd2a0 100644 +--- a/salt/utils/event.py ++++ b/salt/utils/event.py +@@ -863,6 +863,10 @@ class SaltEvent(object): + # Minion fired a bad retcode, fire an event + self._fire_ret_load_specific_fun(load) + ++ def remove_event_handler(self, event_handler): ++ if event_handler in self.subscriber.callbacks: ++ self.subscriber.callbacks.remove(event_handler) ++ + def set_event_handler(self, event_handler): + ''' + Invoke the event_handler callback each time an event arrives. +@@ -871,8 +875,11 @@ class SaltEvent(object): + + if not self.cpub: + self.connect_pub() +- # This will handle reconnects +- return self.subscriber.read_async(event_handler) ++ ++ self.subscriber.callbacks.add(event_handler) ++ if not self.subscriber.reading: ++ # This will handle reconnects ++ self.subscriber.read_async() + + def __del__(self): + # skip exceptions in destroy-- since destroy() doesn't cover interpreter +diff --git a/tests/unit/cli/test_batch_async.py b/tests/unit/cli/test_batch_async.py +new file mode 100644 +index 0000000000..f65b6a06c3 +--- /dev/null ++++ b/tests/unit/cli/test_batch_async.py +@@ -0,0 +1,351 @@ ++# -*- coding: utf-8 -*- ++ ++from __future__ import absolute_import ++ ++# Import Salt Libs ++from salt.cli.batch_async import BatchAsync ++ ++import tornado ++from tornado.testing import AsyncTestCase ++from tests.support.unit import skipIf, TestCase ++from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON ++ ++ ++@skipIf(NO_MOCK, NO_MOCK_REASON) ++class AsyncBatchTestCase(AsyncTestCase, TestCase): ++ ++ def setUp(self): ++ self.io_loop = self.get_new_ioloop() ++ opts = {'batch': '1', ++ 'conf_file': {}, ++ 'tgt': '*', ++ 'timeout': 5, ++ 'gather_job_timeout': 5, ++ 'batch_presence_ping_timeout': 1, ++ 'transport': None, ++ 'sock_dir': ''} ++ ++ with patch('salt.client.get_local_client', MagicMock(return_value=MagicMock())): ++ with patch('salt.cli.batch_async.batch_get_opts', ++ MagicMock(return_value=opts) ++ ): ++ self.batch = BatchAsync( ++ opts, ++ MagicMock(side_effect=['1234', '1235', '1236']), ++ { ++ 'tgt': '', ++ 'fun': '', ++ 'kwargs': { ++ 'batch': '', ++ 'batch_presence_ping_timeout': 1 ++ } ++ }) ++ ++ def test_ping_jid(self): ++ self.assertEqual(self.batch.ping_jid, '1234') ++ ++ def test_batch_jid(self): ++ self.assertEqual(self.batch.batch_jid, '1235') ++ ++ def test_find_job_jid(self): ++ self.assertEqual(self.batch.find_job_jid, '1236') ++ ++ def test_batch_size(self): ++ ''' ++ Tests passing batch value as a number ++ ''' ++ self.batch.opts = {'batch': '2', 'timeout': 5} ++ self.batch.minions = set(['foo', 'bar']) ++ self.batch.start_batch() ++ self.assertEqual(self.batch.batch_size, 2) ++ ++ @tornado.testing.gen_test ++ def test_batch_start_on_batch_presence_ping_timeout(self): ++ self.batch.event = MagicMock() ++ future = tornado.gen.Future() ++ future.set_result({'minions': ['foo', 'bar']}) ++ self.batch.local.run_job_async.return_value = future ++ ret = self.batch.start() ++ # assert start_batch is called later with batch_presence_ping_timeout as param ++ self.assertEqual( ++ self.batch.event.io_loop.call_later.call_args[0], ++ (self.batch.batch_presence_ping_timeout, self.batch.start_batch)) ++ # assert test.ping called ++ self.assertEqual( ++ self.batch.local.run_job_async.call_args[0], ++ ('*', 'test.ping', [], 'glob') ++ ) ++ # assert down_minions == all minions matched by tgt ++ self.assertEqual(self.batch.down_minions, set(['foo', 'bar'])) ++ ++ @tornado.testing.gen_test ++ def test_batch_start_on_gather_job_timeout(self): ++ self.batch.event = MagicMock() ++ future = tornado.gen.Future() ++ future.set_result({'minions': ['foo', 'bar']}) ++ self.batch.local.run_job_async.return_value = future ++ self.batch.batch_presence_ping_timeout = None ++ ret = self.batch.start() ++ # assert start_batch is called later with gather_job_timeout as param ++ self.assertEqual( ++ self.batch.event.io_loop.call_later.call_args[0], ++ (self.batch.opts['gather_job_timeout'], self.batch.start_batch)) ++ ++ def test_batch_fire_start_event(self): ++ self.batch.minions = set(['foo', 'bar']) ++ self.batch.opts = {'batch': '2', 'timeout': 5} ++ self.batch.event = MagicMock() ++ self.batch.metadata = {'mykey': 'myvalue'} ++ self.batch.start_batch() ++ self.assertEqual( ++ self.batch.event.fire_event.call_args[0], ++ ( ++ { ++ 'available_minions': set(['foo', 'bar']), ++ 'down_minions': set(), ++ 'metadata': self.batch.metadata ++ }, ++ "salt/batch/1235/start" ++ ) ++ ) ++ ++ @tornado.testing.gen_test ++ def test_start_batch_calls_next(self): ++ self.batch.schedule_next = MagicMock(return_value=MagicMock()) ++ self.batch.event = MagicMock() ++ future = tornado.gen.Future() ++ future.set_result(None) ++ self.batch.schedule_next = MagicMock(return_value=future) ++ self.batch.start_batch() ++ self.assertEqual(self.batch.initialized, True) ++ self.assertEqual(len(self.batch.schedule_next.mock_calls), 1) ++ ++ def test_batch_fire_done_event(self): ++ self.batch.minions = set(['foo', 'bar']) ++ self.batch.event = MagicMock() ++ self.batch.metadata = {'mykey': 'myvalue'} ++ self.batch.end_batch() ++ self.assertEqual( ++ self.batch.event.fire_event.call_args[0], ++ ( ++ { ++ 'available_minions': set(['foo', 'bar']), ++ 'done_minions': set(), ++ 'down_minions': set(), ++ 'timedout_minions': set(), ++ 'metadata': self.batch.metadata ++ }, ++ "salt/batch/1235/done" ++ ) ++ ) ++ self.assertEqual( ++ len(self.batch.event.remove_event_handler.mock_calls), 1) ++ ++ @tornado.testing.gen_test ++ def test_batch_next(self): ++ self.batch.event = MagicMock() ++ self.batch.opts['fun'] = 'my.fun' ++ self.batch.opts['arg'] = [] ++ self.batch._get_next = MagicMock(return_value={'foo', 'bar'}) ++ self.batch.batch_size = 2 ++ future = tornado.gen.Future() ++ future.set_result({'minions': ['foo', 'bar']}) ++ self.batch.local.run_job_async.return_value = future ++ ret = self.batch.schedule_next().result() ++ self.assertEqual( ++ self.batch.local.run_job_async.call_args[0], ++ ({'foo', 'bar'}, 'my.fun', [], 'list') ++ ) ++ self.assertEqual( ++ self.batch.event.io_loop.call_later.call_args[0], ++ (self.batch.opts['timeout'], self.batch.find_job, {'foo', 'bar'}) ++ ) ++ self.assertEqual(self.batch.active, {'bar', 'foo'}) ++ ++ def test_next_batch(self): ++ self.batch.minions = {'foo', 'bar'} ++ self.batch.batch_size = 2 ++ self.assertEqual(self.batch._get_next(), {'foo', 'bar'}) ++ ++ def test_next_batch_one_done(self): ++ self.batch.minions = {'foo', 'bar'} ++ self.batch.done_minions = {'bar'} ++ self.batch.batch_size = 2 ++ self.assertEqual(self.batch._get_next(), {'foo'}) ++ ++ def test_next_batch_one_done_one_active(self): ++ self.batch.minions = {'foo', 'bar', 'baz'} ++ self.batch.done_minions = {'bar'} ++ self.batch.active = {'baz'} ++ self.batch.batch_size = 2 ++ self.assertEqual(self.batch._get_next(), {'foo'}) ++ ++ def test_next_batch_one_done_one_active_one_timedout(self): ++ self.batch.minions = {'foo', 'bar', 'baz', 'faz'} ++ self.batch.done_minions = {'bar'} ++ self.batch.active = {'baz'} ++ self.batch.timedout_minions = {'faz'} ++ self.batch.batch_size = 2 ++ self.assertEqual(self.batch._get_next(), {'foo'}) ++ ++ def test_next_batch_bigger_size(self): ++ self.batch.minions = {'foo', 'bar'} ++ self.batch.batch_size = 3 ++ self.assertEqual(self.batch._get_next(), {'foo', 'bar'}) ++ ++ def test_next_batch_all_done(self): ++ self.batch.minions = {'foo', 'bar'} ++ self.batch.done_minions = {'foo', 'bar'} ++ self.batch.batch_size = 2 ++ self.assertEqual(self.batch._get_next(), set()) ++ ++ def test_next_batch_all_active(self): ++ self.batch.minions = {'foo', 'bar'} ++ self.batch.active = {'foo', 'bar'} ++ self.batch.batch_size = 2 ++ self.assertEqual(self.batch._get_next(), set()) ++ ++ def test_next_batch_all_timedout(self): ++ self.batch.minions = {'foo', 'bar'} ++ self.batch.timedout_minions = {'foo', 'bar'} ++ self.batch.batch_size = 2 ++ self.assertEqual(self.batch._get_next(), set()) ++ ++ def test_batch__event_handler_ping_return(self): ++ self.batch.down_minions = {'foo'} ++ self.batch.event = MagicMock( ++ unpack=MagicMock(return_value=('salt/job/1234/ret/foo', {'id': 'foo'}))) ++ self.batch.start() ++ self.assertEqual(self.batch.minions, set()) ++ self.batch._BatchAsync__event_handler(MagicMock()) ++ self.assertEqual(self.batch.minions, {'foo'}) ++ self.assertEqual(self.batch.done_minions, set()) ++ ++ def test_batch__event_handler_call_start_batch_when_all_pings_return(self): ++ self.batch.down_minions = {'foo'} ++ self.batch.event = MagicMock( ++ unpack=MagicMock(return_value=('salt/job/1234/ret/foo', {'id': 'foo'}))) ++ self.batch.start() ++ self.batch._BatchAsync__event_handler(MagicMock()) ++ self.assertEqual( ++ self.batch.event.io_loop.spawn_callback.call_args[0], ++ (self.batch.start_batch,)) ++ ++ def test_batch__event_handler_not_call_start_batch_when_not_all_pings_return(self): ++ self.batch.down_minions = {'foo', 'bar'} ++ self.batch.event = MagicMock( ++ unpack=MagicMock(return_value=('salt/job/1234/ret/foo', {'id': 'foo'}))) ++ self.batch.start() ++ self.batch._BatchAsync__event_handler(MagicMock()) ++ self.assertEqual( ++ len(self.batch.event.io_loop.spawn_callback.mock_calls), 0) ++ ++ def test_batch__event_handler_batch_run_return(self): ++ self.batch.event = MagicMock( ++ unpack=MagicMock(return_value=('salt/job/1235/ret/foo', {'id': 'foo'}))) ++ self.batch.start() ++ self.batch.active = {'foo'} ++ self.batch._BatchAsync__event_handler(MagicMock()) ++ self.assertEqual(self.batch.active, set()) ++ self.assertEqual(self.batch.done_minions, {'foo'}) ++ self.assertEqual( ++ self.batch.event.io_loop.call_later.call_args[0], ++ (self.batch.batch_delay, self.batch.schedule_next)) ++ ++ def test_batch__event_handler_find_job_return(self): ++ self.batch.event = MagicMock( ++ unpack=MagicMock(return_value=('salt/job/1236/ret/foo', {'id': 'foo'}))) ++ self.batch.start() ++ self.batch._BatchAsync__event_handler(MagicMock()) ++ self.assertEqual(self.batch.find_job_returned, {'foo'}) ++ ++ @tornado.testing.gen_test ++ def test_batch__event_handler_end_batch(self): ++ self.batch.event = MagicMock( ++ unpack=MagicMock(return_value=('salt/job/not-my-jid/ret/foo', {'id': 'foo'}))) ++ future = tornado.gen.Future() ++ future.set_result({'minions': ['foo', 'bar', 'baz']}) ++ self.batch.local.run_job_async.return_value = future ++ self.batch.start() ++ self.batch.initialized = True ++ self.assertEqual(self.batch.down_minions, {'foo', 'bar', 'baz'}) ++ self.batch.end_batch = MagicMock() ++ self.batch.minions = {'foo', 'bar', 'baz'} ++ self.batch.done_minions = {'foo', 'bar'} ++ self.batch.timedout_minions = {'baz'} ++ self.batch._BatchAsync__event_handler(MagicMock()) ++ self.assertEqual(len(self.batch.end_batch.mock_calls), 1) ++ ++ @tornado.testing.gen_test ++ def test_batch_find_job(self): ++ self.batch.event = MagicMock() ++ future = tornado.gen.Future() ++ future.set_result({}) ++ self.batch.local.run_job_async.return_value = future ++ self.batch.find_job({'foo', 'bar'}) ++ self.assertEqual( ++ self.batch.event.io_loop.call_later.call_args[0], ++ (self.batch.opts['gather_job_timeout'], self.batch.check_find_job, {'foo', 'bar'}) ++ ) ++ ++ @tornado.testing.gen_test ++ def test_batch_find_job_with_done_minions(self): ++ self.batch.done_minions = {'bar'} ++ self.batch.event = MagicMock() ++ future = tornado.gen.Future() ++ future.set_result({}) ++ self.batch.local.run_job_async.return_value = future ++ self.batch.find_job({'foo', 'bar'}) ++ self.assertEqual( ++ self.batch.event.io_loop.call_later.call_args[0], ++ (self.batch.opts['gather_job_timeout'], self.batch.check_find_job, {'foo'}) ++ ) ++ ++ def test_batch_check_find_job_did_not_return(self): ++ self.batch.event = MagicMock() ++ self.batch.active = {'foo'} ++ self.batch.find_job_returned = set() ++ self.batch.check_find_job({'foo'}) ++ self.assertEqual(self.batch.find_job_returned, set()) ++ self.assertEqual(self.batch.active, set()) ++ self.assertEqual(len(self.batch.event.io_loop.add_callback.mock_calls), 0) ++ ++ def test_batch_check_find_job_did_return(self): ++ self.batch.event = MagicMock() ++ self.batch.find_job_returned = {'foo'} ++ self.batch.check_find_job({'foo'}) ++ self.assertEqual( ++ self.batch.event.io_loop.add_callback.call_args[0], ++ (self.batch.find_job, {'foo'}) ++ ) ++ ++ def test_batch_check_find_job_multiple_states(self): ++ self.batch.event = MagicMock() ++ # currently running minions ++ self.batch.active = {'foo', 'bar'} ++ ++ # minion is running and find_job returns ++ self.batch.find_job_returned = {'foo'} ++ ++ # minion started running but find_job did not return ++ self.batch.timedout_minions = {'faz'} ++ ++ # minion finished ++ self.batch.done_minions = {'baz'} ++ ++ # both not yet done but only 'foo' responded to find_job ++ not_done = {'foo', 'bar'} ++ ++ self.batch.check_find_job(not_done) ++ ++ # assert 'bar' removed from active ++ self.assertEqual(self.batch.active, {'foo'}) ++ ++ # assert 'bar' added to timedout_minions ++ self.assertEqual(self.batch.timedout_minions, {'bar', 'faz'}) ++ ++ # assert 'find_job' schedueled again only for 'foo' ++ self.assertEqual( ++ self.batch.event.io_loop.add_callback.call_args[0], ++ (self.batch.find_job, {'foo'}) ++ ) +-- +2.20.1 + + diff --git a/avoid-incomprehensive-message-if-crashes.patch b/avoid-incomprehensive-message-if-crashes.patch deleted file mode 100644 index 01679d8..0000000 --- a/avoid-incomprehensive-message-if-crashes.patch +++ /dev/null @@ -1,57 +0,0 @@ -From c4d9227b6da4407348e181f092445f17e3c14b51 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= - -Date: Thu, 26 Jul 2018 16:42:10 +0100 -Subject: [PATCH] Avoid incomprehensive message if crashes - -Check dmidecoder executable on each call to avoid crashing - -Fix pylint issues ---- - salt/modules/smbios.py | 11 +++++++++++ - 1 file changed, 11 insertions(+) - -diff --git a/salt/modules/smbios.py b/salt/modules/smbios.py -index c8a0e54a5c..c0b94c2a65 100644 ---- a/salt/modules/smbios.py -+++ b/salt/modules/smbios.py -@@ -19,6 +19,7 @@ import re - - # Import salt libs - import salt.utils.path -+from salt.exceptions import CommandExecutionError - - # Solve the Chicken and egg problem where grains need to run before any - # of the modules are loaded and are generally available for any usage. -@@ -32,10 +33,16 @@ log = logging.getLogger(__name__) - DMIDECODER = salt.utils.path.which_bin(['dmidecode', 'smbios']) - - -+def _refresh_dmidecoder(): -+ global DMIDECODER -+ DMIDECODER = salt.utils.path.which_bin(['dmidecode', 'smbios']) -+ -+ - def __virtual__(): - ''' - Only work when dmidecode is installed. - ''' -+ _refresh_dmidecoder() - if DMIDECODER is None: - log.debug('SMBIOS: neither dmidecode nor smbios found!') - return (False, 'The smbios execution module failed to load: neither dmidecode nor smbios in the path.') -@@ -327,6 +334,10 @@ def _dmidecoder(args=None): - ''' - Call DMIdecode - ''' -+ _refresh_dmidecoder() -+ if DMIDECODER is None: -+ raise CommandExecutionError('SMBIOS: neither dmidecode nor smbios found!') -+ - if args is None: - return salt.modules.cmdmod._run_quiet(DMIDECODER) - else: --- -2.17.1 - - diff --git a/azurefs-gracefully-handle-attributeerror.patch b/azurefs-gracefully-handle-attributeerror.patch index fa03205..bcbf9c6 100644 --- a/azurefs-gracefully-handle-attributeerror.patch +++ b/azurefs-gracefully-handle-attributeerror.patch @@ -1,4 +1,4 @@ -From 326e649ef1f14b609916f0e9ce75e29a5e7f4d05 Mon Sep 17 00:00:00 2001 +From d914a1e952e393f3e72aee2cb8d9056533f490cc Mon Sep 17 00:00:00 2001 From: Robert Munteanu Date: Mon, 19 Nov 2018 17:52:34 +0100 Subject: [PATCH] azurefs: gracefully handle AttributeError @@ -13,7 +13,7 @@ Problem was encountered on openSUSE Tumbleweed. 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/salt/fileserver/azurefs.py b/salt/fileserver/azurefs.py -index c266323fbe..a42c10c594 100644 +index 547a681016..032739d160 100644 --- a/salt/fileserver/azurefs.py +++ b/salt/fileserver/azurefs.py @@ -68,7 +68,7 @@ try: @@ -26,6 +26,6 @@ index c266323fbe..a42c10c594 100644 # Import third party libs -- -2.20.1 +2.17.1 diff --git a/bugfix-any-unicode-string-of-length-16-will-raise-ty.patch b/bugfix-any-unicode-string-of-length-16-will-raise-ty.patch index aa453d5..1ba1462 100644 --- a/bugfix-any-unicode-string-of-length-16-will-raise-ty.patch +++ b/bugfix-any-unicode-string-of-length-16-will-raise-ty.patch @@ -1,4 +1,4 @@ -From e82dc4c556497b612d31b65e60b34c979c957424 Mon Sep 17 00:00:00 2001 +From 8fc3419db49497ca33f99d7bbc3a251d7b07ff09 Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Fri, 5 Oct 2018 12:02:08 +0200 Subject: [PATCH] Bugfix: any unicode string of length 16 will raise @@ -9,11 +9,11 @@ Subject: [PATCH] Bugfix: any unicode string of length 16 will raise 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/salt/_compat.py b/salt/_compat.py -index 0576210afc..71963a4ead 100644 +index 8628833dcf..98931c6cce 100644 --- a/salt/_compat.py +++ b/salt/_compat.py -@@ -192,7 +192,7 @@ class IPv6AddressScoped(ipaddress.IPv6Address): - if len(data) == 16 and ':' not in data: +@@ -191,7 +191,7 @@ class IPv6AddressScoped(ipaddress.IPv6Address): + if isinstance(data, bytes) and len(data) == 16 and b':' not in data: try: packed = bool(int(str(bytearray(data)).encode('hex'), 16)) - except ValueError: @@ -22,6 +22,6 @@ index 0576210afc..71963a4ead 100644 return packed -- -2.17.1 +2.20.1 diff --git a/change-stringio-import-in-python2-to-import-the-clas.patch b/change-stringio-import-in-python2-to-import-the-clas.patch deleted file mode 100644 index 8c43a7f..0000000 --- a/change-stringio-import-in-python2-to-import-the-clas.patch +++ /dev/null @@ -1,57 +0,0 @@ -From a0d5af98c8d2a22c5eb56943ff320ca287fa79ea Mon Sep 17 00:00:00 2001 -From: Florian Bergmann -Date: Tue, 11 Sep 2018 14:03:33 +0200 -Subject: [PATCH] Change StringIO import in python2 to import the class. - (#107) - -Instead of using StringIO in python3, use the correct BytesIO class instead. ---- - salt/modules/hashutil.py | 11 ++++++----- - 1 file changed, 6 insertions(+), 5 deletions(-) - -diff --git a/salt/modules/hashutil.py b/salt/modules/hashutil.py -index 721957973d..5123cc7cd7 100644 ---- a/salt/modules/hashutil.py -+++ b/salt/modules/hashutil.py -@@ -17,9 +17,10 @@ import salt.utils.hashutils - import salt.utils.stringutils - - if six.PY2: -- import StringIO -+ from StringIO import StringIO -+ BytesIO = StringIO - elif six.PY3: -- from io import StringIO -+ from io import BytesIO, StringIO - - - def digest(instr, checksum='md5'): -@@ -155,13 +156,13 @@ def base64_encodefile(fname): - - salt '*' hashutil.base64_encodefile /path/to/binary_file - ''' -- encoded_f = StringIO.StringIO() -+ encoded_f = BytesIO() - - with salt.utils.files.fopen(fname, 'rb') as f: - base64.encode(f, encoded_f) - - encoded_f.seek(0) -- return encoded_f.read() -+ return salt.utils.stringutils.to_str(encoded_f.read()) - - - def base64_decodestring(instr): -@@ -192,7 +193,7 @@ def base64_decodefile(instr, outfile): - - salt '*' hashutil.base64_decodefile instr='Z2V0IHNhbHRlZAo=' outfile='/path/to/binary_file' - ''' -- encoded_f = StringIO.StringIO(instr) -+ encoded_f = StringIO(instr) - - with salt.utils.files.fopen(outfile, 'wb') as f: - base64.decode(encoded_f, f) --- -2.19.0 - - diff --git a/debian-info_installed-compatibility-50453.patch b/debian-info_installed-compatibility-50453.patch index 566f529..d29223c 100644 --- a/debian-info_installed-compatibility-50453.patch +++ b/debian-info_installed-compatibility-50453.patch @@ -1,4 +1,4 @@ -From 9b2473001dcf25c53dff469d3ffb38113e0402eb Mon Sep 17 00:00:00 2001 +From afdfd35222223d81c304854b5ae7af60f3820ed3 Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Tue, 20 Nov 2018 16:06:31 +0100 Subject: [PATCH] Debian info_installed compatibility (#50453) @@ -49,17 +49,17 @@ Adjust architecture getter according to the lowpkg info Fix wrong Git merge: missing function signature --- - salt/modules/aptpkg.py | 20 +++- - salt/modules/dpkg.py | 93 +++++++++++++-- - tests/unit/modules/test_aptpkg.py | 189 +++++++++++++++++------------- - tests/unit/modules/test_dpkg.py | 69 +++++++++++ - 4 files changed, 274 insertions(+), 97 deletions(-) + salt/modules/aptpkg.py | 20 +++- + salt/modules/dpkg_lowpkg.py | 93 +++++++++++++-- + tests/unit/modules/test_aptpkg.py | 151 ++++++++++++++++--------- + tests/unit/modules/test_dpkg_lowpkg.py | 69 +++++++++++ + 4 files changed, 267 insertions(+), 66 deletions(-) diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py -index 90b99c44b9..dc27903230 100644 +index 6b3a921a82..64620647c2 100644 --- a/salt/modules/aptpkg.py +++ b/salt/modules/aptpkg.py -@@ -2800,6 +2800,15 @@ def info_installed(*names, **kwargs): +@@ -2776,6 +2776,15 @@ def info_installed(*names, **kwargs): .. versionadded:: 2016.11.3 @@ -75,7 +75,7 @@ index 90b99c44b9..dc27903230 100644 CLI example: .. code-block:: bash -@@ -2810,11 +2819,15 @@ def info_installed(*names, **kwargs): +@@ -2786,11 +2795,15 @@ def info_installed(*names, **kwargs): ''' kwargs = salt.utils.args.clean_kwargs(**kwargs) failhard = kwargs.pop('failhard', True) @@ -92,7 +92,7 @@ index 90b99c44b9..dc27903230 100644 t_nfo = dict() # Translate dpkg-specific keys to a common structure for key, value in pkg_nfo.items(): -@@ -2831,7 +2844,10 @@ def info_installed(*names, **kwargs): +@@ -2807,7 +2820,10 @@ def info_installed(*names, **kwargs): else: t_nfo[key] = value @@ -104,10 +104,10 @@ index 90b99c44b9..dc27903230 100644 return ret -diff --git a/salt/modules/dpkg.py b/salt/modules/dpkg.py +diff --git a/salt/modules/dpkg_lowpkg.py b/salt/modules/dpkg_lowpkg.py index 03be5f821a..26ca5dcf5a 100644 ---- a/salt/modules/dpkg.py -+++ b/salt/modules/dpkg.py +--- a/salt/modules/dpkg_lowpkg.py ++++ b/salt/modules/dpkg_lowpkg.py @@ -252,6 +252,38 @@ def file_dict(*packages): return {'errors': errors, 'packages': ret} @@ -257,26 +257,19 @@ index 03be5f821a..26ca5dcf5a 100644 return ret diff --git a/tests/unit/modules/test_aptpkg.py b/tests/unit/modules/test_aptpkg.py -index c0e26cfcd4..5352e39982 100644 +index 1e963ee5db..580b840197 100644 --- a/tests/unit/modules/test_aptpkg.py +++ b/tests/unit/modules/test_aptpkg.py -@@ -13,12 +13,14 @@ import copy - # Import Salt Testing Libs - from tests.support.mixins import LoaderModuleMockMixin - from tests.support.unit import TestCase, skipIf --from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON -+from tests.support.mock import Mock, MagicMock, patch, NO_MOCK, NO_MOCK_REASON - - # Import Salt Libs +@@ -20,6 +20,8 @@ from tests.support.mock import Mock, MagicMock, patch, NO_MOCK, NO_MOCK_REASON from salt.ext import six from salt.exceptions import CommandExecutionError, SaltInvocationError import salt.modules.aptpkg as aptpkg +import pytest +import textwrap - - APT_KEY_LIST = r''' -@@ -142,51 +144,39 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin): + try: + import pytest +@@ -148,51 +150,39 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin): def setup_loader_modules(self): return {aptpkg: {}} @@ -341,7 +334,7 @@ index c0e26cfcd4..5352e39982 100644 def test_get_repo_keys(self): ''' -@@ -199,35 +189,31 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin): +@@ -205,35 +195,31 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin): with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock}): self.assertEqual(aptpkg.get_repo_keys(), REPO_KEYS) @@ -388,7 +381,7 @@ index c0e26cfcd4..5352e39982 100644 def test_info_installed(self): ''' Test - Return the information of the named package(s) installed on the system. -@@ -243,19 +229,72 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin): +@@ -249,19 +235,72 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin): if installed['wget'].get(names[name], False): installed['wget'][name] = installed['wget'].pop(names[name]) @@ -435,7 +428,7 @@ index c0e26cfcd4..5352e39982 100644 + assert isinstance(ret, dict) + assert 'wget' in ret + assert isinstance(ret['wget'], list) - ++ + pkgs = ret['wget'] + + assert len(pkgs) == 1 @@ -449,7 +442,7 @@ index c0e26cfcd4..5352e39982 100644 + for k in wget_pkg: + assert k in expected_pkg + assert wget_pkg[k] == expected_pkg[k] -+ + + @patch('salt.modules.aptpkg.__salt__', {'cmd.run_stdout': MagicMock(return_value='wget: /usr/bin/wget')}) def test_owner(self): ''' @@ -468,18 +461,9 @@ index c0e26cfcd4..5352e39982 100644 def test_refresh_db(self): ''' Test - Updates the APT database to latest packages based upon repositories. -@@ -267,26 +306,20 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin): - 'http://security.ubuntu.com trusty-security/main amd64 Packages': True, - 'http://security.ubuntu.com trusty-security/main i386 Packages': True - } -- mock = MagicMock(return_value={ -- 'retcode': 0, -- 'stdout': APT_Q_UPDATE -- }) -- with patch('salt.utils.pkg.clear_rtag', MagicMock()): -- with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock}): -- self.assertEqual(aptpkg.refresh_db(), refresh_db) -+ assert aptpkg.refresh_db() == refresh_db +@@ -281,6 +320,10 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin): + with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock, 'config.get': MagicMock(return_value=False)}): + self.assertEqual(aptpkg.refresh_db(), refresh_db) + @patch('salt.utils.pkg.clear_rtag', MagicMock()) + @patch('salt.modules.aptpkg.__salt__', {'cmd.run_all': MagicMock(return_value={'retcode': 0, @@ -488,25 +472,9 @@ index c0e26cfcd4..5352e39982 100644 def test_refresh_db_failed(self): ''' Test - Update the APT database using unreachable repositories. - ''' -- kwargs = {'failhard': True} -- mock = MagicMock(return_value={ -- 'retcode': 0, -- 'stdout': APT_Q_UPDATE_ERROR -- }) -- with patch('salt.utils.pkg.clear_rtag', MagicMock()): -- with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock}): -- self.assertRaises(CommandExecutionError, aptpkg.refresh_db, **kwargs) -+ with pytest.raises(CommandExecutionError) as err: -+ aptpkg.refresh_db(failhard=True) -+ assert 'Error getting repos' in str(err) -+ assert 'http://security.ubuntu.com trusty InRelease, http://security.ubuntu.com trusty Release.gpg' in str(err) - - def test_autoremove(self): - ''' -@@ -306,38 +339,26 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin): - self.assertEqual(aptpkg.autoremove(list_only=True), list()) - self.assertEqual(aptpkg.autoremove(list_only=True, purge=True), list()) +@@ -312,22 +355,24 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin): + assert aptpkg.autoremove(list_only=True) == [] + assert aptpkg.autoremove(list_only=True, purge=True) == [] + @patch('salt.modules.aptpkg._uninstall', MagicMock(return_value=UNINSTALL)) def test_remove(self): @@ -535,27 +503,10 @@ index c0e26cfcd4..5352e39982 100644 def test_upgrade(self): ''' Test - Upgrades all packages. - ''' -- with patch('salt.utils.pkg.clear_rtag', MagicMock()): -- with patch('salt.modules.aptpkg.list_pkgs', -- MagicMock(return_value=UNINSTALL)): -- mock_cmd = MagicMock(return_value={ -- 'retcode': 0, -- 'stdout': UPGRADE -- }) -- patch_kwargs = { -- '__salt__': { -- 'config.get': MagicMock(return_value=True), -- 'cmd.run_all': mock_cmd -- } -- } -- with patch.multiple(aptpkg, **patch_kwargs): -- self.assertEqual(aptpkg.upgrade(), dict()) -+ assert aptpkg.upgrade() == {} -diff --git a/tests/unit/modules/test_dpkg.py b/tests/unit/modules/test_dpkg.py -index fcfa7caf77..1acfd89ccf 100644 ---- a/tests/unit/modules/test_dpkg.py -+++ b/tests/unit/modules/test_dpkg.py +diff --git a/tests/unit/modules/test_dpkg_lowpkg.py b/tests/unit/modules/test_dpkg_lowpkg.py +index bdcb7eec89..d16ce3cc1a 100644 +--- a/tests/unit/modules/test_dpkg_lowpkg.py ++++ b/tests/unit/modules/test_dpkg_lowpkg.py @@ -25,6 +25,30 @@ class DpkgTestCase(TestCase, LoaderModuleMockMixin): ''' Test cases for salt.modules.dpkg @@ -637,6 +588,6 @@ index fcfa7caf77..1acfd89ccf 100644 + assert ret['emacs']['license'] == 'BSD v3' + assert ret['emacs']['version'] == '46.1' -- -2.19.1 +2.20.1 diff --git a/decode-file-contents-for-python2-bsc-1102013.patch b/decode-file-contents-for-python2-bsc-1102013.patch deleted file mode 100644 index 50b90aa..0000000 --- a/decode-file-contents-for-python2-bsc-1102013.patch +++ /dev/null @@ -1,27 +0,0 @@ -From 58913b6801b92bd59374cd53fa48fa74171abb73 Mon Sep 17 00:00:00 2001 -From: Abid Mehmood -Date: Wed, 1 Aug 2018 17:19:11 +0200 -Subject: [PATCH] Decode file contents for python2(bsc#1102013) - ---- - salt/states/file.py | 3 +-- - 1 file changed, 1 insertion(+), 2 deletions(-) - -diff --git a/salt/states/file.py b/salt/states/file.py -index e1d247ae4f..db82098a33 100644 ---- a/salt/states/file.py -+++ b/salt/states/file.py -@@ -1105,8 +1105,7 @@ def _get_template_texts(source_list=None, - tmplines = None - with salt.utils.files.fopen(rndrd_templ_fn, 'rb') as fp_: - tmplines = fp_.read() -- if six.PY3: -- tmplines = tmplines.decode(__salt_system_encoding__) -+ tmplines = tmplines.decode(__salt_system_encoding__) - tmplines = tmplines.splitlines(True) - if not tmplines: - msg = 'Failed to read rendered template file {0} ({1})' --- -2.17.1 - - diff --git a/do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch b/do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch index 0149d8e..a8a1f2f 100644 --- a/do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch +++ b/do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch @@ -1,4 +1,4 @@ -From 7727ab13e3492b722b316469cc912d9dd64f063e Mon Sep 17 00:00:00 2001 +From ab7d69b3438c719f7ad6b4b346e56556e8a7bd10 Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Fri, 21 Sep 2018 17:31:39 +0200 Subject: [PATCH] Do not load pip state if there is no 3rd party @@ -6,12 +6,11 @@ Subject: [PATCH] Do not load pip state if there is no 3rd party Safe import 3rd party dependency --- - salt/modules/pip.py | 12 ++++++++++-- - salt/states/pip_state.py | 9 +++++---- - 2 files changed, 15 insertions(+), 6 deletions(-) + salt/modules/pip.py | 12 ++++++++++-- + 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/salt/modules/pip.py b/salt/modules/pip.py -index f1a2e42433..85844f098b 100644 +index eac40c719c..988ae695a7 100644 --- a/salt/modules/pip.py +++ b/salt/modules/pip.py @@ -79,7 +79,10 @@ from __future__ import absolute_import, print_function, unicode_literals @@ -40,34 +39,7 @@ index f1a2e42433..85844f098b 100644 def _clear_context(bin_env=None): -diff --git a/salt/states/pip_state.py b/salt/states/pip_state.py -index ab58fbd5fc..afe41d7fc8 100644 ---- a/salt/states/pip_state.py -+++ b/salt/states/pip_state.py -@@ -23,7 +23,10 @@ requisite to a pkg.installed state for the package which provides pip - from __future__ import absolute_import, print_function, unicode_literals - import re - import logging --import pkg_resources -+try: -+ import pkg_resources -+except ImportError: -+ pkg_resources = None - - # Import salt libs - import salt.utils.versions -@@ -71,9 +74,7 @@ def __virtual__(): - ''' - Only load if the pip module is available in __salt__ - ''' -- if 'pip.list' in __salt__: -- return __virtualname__ -- return False -+ return 'pip.list' in __salt__ and __virtualname__ or False - - - def _find_key(prefix, pip_list): -- -2.19.0 +2.17.1 diff --git a/do-not-override-jid-on-returners-only-sending-back-t.patch b/do-not-override-jid-on-returners-only-sending-back-t.patch deleted file mode 100644 index b80bab9..0000000 --- a/do-not-override-jid-on-returners-only-sending-back-t.patch +++ /dev/null @@ -1,38 +0,0 @@ -From 911d61d1479d89ed31b23b038874505b731c6d86 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= - -Date: Fri, 4 May 2018 09:34:13 +0100 -Subject: [PATCH] Do not override jid on returners, only sending back to - master (bsc#1092373) - ---- - salt/utils/schedule.py | 12 +++++++----- - 1 file changed, 7 insertions(+), 5 deletions(-) - -diff --git a/salt/utils/schedule.py b/salt/utils/schedule.py -index 65c2e3fbda..32fdae9786 100644 ---- a/salt/utils/schedule.py -+++ b/salt/utils/schedule.py -@@ -755,11 +755,13 @@ class Schedule(object): - else: - # Send back to master so the job is included in the job list - mret = ret.copy() -- mret['jid'] = 'req' -- if data.get('return_job') == 'nocache': -- # overwrite 'req' to signal to master that -- # this job shouldn't be stored -- mret['jid'] = 'nocache' -+ # No returners defined, so we're only sending back to the master -+ if not data_returner and not self.schedule_returner: -+ mret['jid'] = 'req' -+ if data.get('return_job') == 'nocache': -+ # overwrite 'req' to signal to master that -+ # this job shouldn't be stored -+ mret['jid'] = 'nocache' - load = {'cmd': '_return', 'id': self.opts['id']} - for key, value in six.iteritems(mret): - load[key] = value --- -2.13.7 - - diff --git a/don-t-call-zypper-with-more-than-one-no-refresh.patch b/don-t-call-zypper-with-more-than-one-no-refresh.patch new file mode 100644 index 0000000..aaa65bf --- /dev/null +++ b/don-t-call-zypper-with-more-than-one-no-refresh.patch @@ -0,0 +1,42 @@ +From 1c3f8f32d475701e8b7fab64b8cb9dcd44b587d4 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?C=C3=A9dric=20Bosdonnat?= +Date: Tue, 29 Jan 2019 09:44:03 +0100 +Subject: [PATCH] Don't call zypper with more than one --no-refresh + +Now zypper started being picky and errors out when --no-refresh is +passed twice. Make sure we won't hit this. +--- + salt/modules/zypperpkg.py | 2 +- + tests/unit/modules/test_zypperpkg.py | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py +index c442337c58..bab9e22dec 100644 +--- a/salt/modules/zypperpkg.py ++++ b/salt/modules/zypperpkg.py +@@ -291,7 +291,7 @@ class _Zypper(object): + self.__called = True + if self.__xml: + self.__cmd.append('--xmlout') +- if not self.__refresh: ++ if not self.__refresh and '--no-refresh' not in args: + self.__cmd.append('--no-refresh') + if self.__root: + self.__cmd.extend(['--root', self.__root]) +diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py +index e7474ff777..9d109a431d 100644 +--- a/tests/unit/modules/test_zypperpkg.py ++++ b/tests/unit/modules/test_zypperpkg.py +@@ -141,7 +141,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): + self.assertEqual(zypper.__zypper__.call('foo'), stdout_xml_snippet) + self.assertEqual(len(sniffer.calls), 1) + +- zypper.__zypper__.call('bar') ++ zypper.__zypper__.call('--no-refresh', 'bar') + self.assertEqual(len(sniffer.calls), 2) + self.assertEqual(sniffer.calls[0]['args'][0], ['zypper', '--non-interactive', '--no-refresh', 'foo']) + self.assertEqual(sniffer.calls[1]['args'][0], ['zypper', '--non-interactive', '--no-refresh', 'bar']) +-- +2.20.1 + + diff --git a/don-t-error-on-retcode-0-in-libcrypto.openssl_init_c.patch b/don-t-error-on-retcode-0-in-libcrypto.openssl_init_c.patch deleted file mode 100644 index 0a754c4..0000000 --- a/don-t-error-on-retcode-0-in-libcrypto.openssl_init_c.patch +++ /dev/null @@ -1,33 +0,0 @@ -From 34089db15e7d3a1e361789f04613d0a13138dea0 Mon Sep 17 00:00:00 2001 -From: rallytime -Date: Fri, 13 Jul 2018 12:42:46 -0400 -Subject: [PATCH] Don't error on retcode 0 in - libcrypto.OPENSSL_init_crypto call - -Fixes #46884 ---- - salt/utils/rsax931.py | 7 +++---- - 1 file changed, 3 insertions(+), 4 deletions(-) - -diff --git a/salt/utils/rsax931.py b/salt/utils/rsax931.py -index 168c02734b..6bfef41bd3 100644 ---- a/salt/utils/rsax931.py -+++ b/salt/utils/rsax931.py -@@ -71,10 +71,9 @@ def _init_libcrypto(): - libcrypto.RSA_public_decrypt.argtypes = (c_int, c_char_p, c_char_p, c_void_p, c_int) - - try: -- if libcrypto.OPENSSL_init_crypto(OPENSSL_INIT_NO_LOAD_CONFIG | -- OPENSSL_INIT_ADD_ALL_CIPHERS | -- OPENSSL_INIT_ADD_ALL_DIGESTS, None) != 1: -- raise OSError("Failed to initialize OpenSSL library (OPENSSL_init_crypto failed)") -+ libcrypto.OPENSSL_init_crypto(OPENSSL_INIT_NO_LOAD_CONFIG | -+ OPENSSL_INIT_ADD_ALL_CIPHERS | -+ OPENSSL_INIT_ADD_ALL_DIGESTS, None) - except AttributeError: - # Support for OpenSSL < 1.1 (OPENSSL_API_COMPAT < 0x10100000L) - libcrypto.OPENSSL_no_config() --- -2.19.2 - - diff --git a/fall-back-to-pymysql.patch b/fall-back-to-pymysql.patch index c135d4e..784bc24 100644 --- a/fall-back-to-pymysql.patch +++ b/fall-back-to-pymysql.patch @@ -1,4 +1,4 @@ -From 9e0c0bbc1b48fa7065a9d0f50bd7111789712e2d Mon Sep 17 00:00:00 2001 +From d3b2f157643845d2659a226ba72ce24ce1d2a73d Mon Sep 17 00:00:00 2001 From: Maximilian Meister Date: Thu, 5 Apr 2018 13:23:23 +0200 Subject: [PATCH] fall back to PyMySQL @@ -7,311 +7,32 @@ same is already done in modules (see #26803) Signed-off-by: Maximilian Meister --- - salt/auth/mysql.py | 25 ++++++++++++++++++++++--- - salt/cache/mysql_cache.py | 28 +++++++++++++++++++--------- - salt/modules/mysql.py | 22 ++++++++++------------ - salt/pillar/mysql.py | 21 ++++++++++++++++----- - salt/returners/mysql.py | 29 +++++++++++++++++++++-------- - tests/unit/pillar/test_mysql.py | 2 +- - 6 files changed, 89 insertions(+), 38 deletions(-) + salt/modules/mysql.py | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) -diff --git a/salt/auth/mysql.py b/salt/auth/mysql.py -index 8bc18a4101..86d00a4373 100644 ---- a/salt/auth/mysql.py -+++ b/salt/auth/mysql.py -@@ -55,10 +55,29 @@ import logging - log = logging.getLogger(__name__) - - try: -+ # Trying to import MySQLdb - import MySQLdb -- HAS_MYSQL = True -+ import MySQLdb.cursors -+ import MySQLdb.converters -+ from MySQLdb.connections import OperationalError - except ImportError: -- HAS_MYSQL = False -+ try: -+ # MySQLdb import failed, try to import PyMySQL -+ import pymysql -+ pymysql.install_as_MySQLdb() -+ import MySQLdb -+ import MySQLdb.cursors -+ import MySQLdb.converters -+ from MySQLdb.err import OperationalError -+ except ImportError: -+ MySQLdb = None -+ -+ -+def __virtual__(): -+ ''' -+ Confirm that a python mysql client is installed. -+ ''' -+ return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else '' - - - def __get_connection_info(): -@@ -95,7 +114,7 @@ def auth(username, password): - _info['username'], - _info['password'], - _info['database']) -- except MySQLdb.OperationalError as e: -+ except OperationalError as e: - log.error(e) - return False - -diff --git a/salt/cache/mysql_cache.py b/salt/cache/mysql_cache.py -index 9d6aa17987..8b0a942310 100644 ---- a/salt/cache/mysql_cache.py -+++ b/salt/cache/mysql_cache.py -@@ -46,11 +46,24 @@ value to ``mysql``: - from __future__ import absolute_import, print_function, unicode_literals - from time import sleep - import logging -+ - try: -+ # Trying to import MySQLdb - import MySQLdb -- HAS_MYSQL = True -+ import MySQLdb.cursors -+ import MySQLdb.converters -+ from MySQLdb.connections import OperationalError - except ImportError: -- HAS_MYSQL = False -+ try: -+ # MySQLdb import failed, try to import PyMySQL -+ import pymysql -+ pymysql.install_as_MySQLdb() -+ import MySQLdb -+ import MySQLdb.cursors -+ import MySQLdb.converters -+ from MySQLdb.err import OperationalError -+ except ImportError: -+ MySQLdb = None - - from salt.exceptions import SaltCacheError - -@@ -71,12 +84,9 @@ __func_alias__ = {'ls': 'list'} - - def __virtual__(): - ''' -- Confirm that python-mysql package is installed. -+ Confirm that a python mysql client is installed. - ''' -- if not HAS_MYSQL: -- return (False, "Please install python-mysql package to use mysql data " -- "cache driver") -- return __virtualname__ -+ return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else '' - - - def run_query(conn, query, retries=3): -@@ -84,13 +94,13 @@ def run_query(conn, query, retries=3): - Get a cursor and run a query. Reconnect up to `retries` times if - needed. - Returns: cursor, affected rows counter -- Raises: SaltCacheError, AttributeError, MySQLdb.OperationalError -+ Raises: SaltCacheError, AttributeError, OperationalError - ''' - try: - cur = conn.cursor() - out = cur.execute(query) - return cur, out -- except (AttributeError, MySQLdb.OperationalError) as e: -+ except (AttributeError, OperationalError) as e: - if retries == 0: - raise - # reconnect creating new client diff --git a/salt/modules/mysql.py b/salt/modules/mysql.py -index 833a766a97..a5965f3a25 100644 +index de8916f4f2..64c773f40a 100644 --- a/salt/modules/mysql.py +++ b/salt/modules/mysql.py -@@ -51,13 +51,14 @@ import salt.utils.stringutils - from salt.ext import six - # pylint: disable=import-error - from salt.ext.six.moves import range, zip # pylint: disable=no-name-in-module,redefined-builtin -+ - try: -- # Try to import MySQLdb -+ # Trying to import MySQLdb - import MySQLdb +@@ -58,7 +58,7 @@ try: import MySQLdb.cursors import MySQLdb.converters from MySQLdb.constants import FIELD_TYPE, FLAG -- HAS_MYSQLDB = True +- from MySQLdb import OperationalError + from MySQLdb.connections import OperationalError except ImportError: try: # MySQLdb import failed, try to import PyMySQL -@@ -67,10 +68,9 @@ except ImportError: +@@ -68,7 +68,7 @@ except ImportError: import MySQLdb.cursors import MySQLdb.converters from MySQLdb.constants import FIELD_TYPE, FLAG -- HAS_MYSQLDB = True +- from MySQLdb import OperationalError + from MySQLdb.err import OperationalError except ImportError: -- # No MySQL Connector installed, return False -- HAS_MYSQLDB = False -+ MySQLdb = None - - log = logging.getLogger(__name__) - -@@ -195,11 +195,9 @@ And theses could be mixed, in a like query value with args: 'f\_o\%%o`b\'a"r' - - def __virtual__(): - ''' -- Only load this module if the mysql libraries exist -+ Confirm that a python mysql client is installed. - ''' -- if HAS_MYSQLDB: -- return True -- return (False, 'The mysql execution module cannot be loaded: neither MySQLdb nor PyMySQL is available.') -+ return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else '' - - - def __check_table(name, table, **connection_args): -@@ -331,7 +329,7 @@ def _connect(**kwargs): - connargs.pop('passwd') - try: - dbc = MySQLdb.connect(**connargs) -- except MySQLdb.OperationalError as exc: -+ except OperationalError as exc: - err = 'MySQL Error {0}: {1}'.format(*exc) - __context__['mysql.error'] = err - log.error(err) -@@ -647,7 +645,7 @@ def query(database, query, **connection_args): - log.debug('Using db: %s to run query %s', database, query) - try: - affected = _execute(cur, query) -- except MySQLdb.OperationalError as exc: -+ except OperationalError as exc: - err = 'MySQL Error {0}: {1}'.format(*exc) - __context__['mysql.error'] = err - log.error(err) -@@ -772,7 +770,7 @@ def status(**connection_args): - qry = 'SHOW STATUS' - try: - _execute(cur, qry) -- except MySQLdb.OperationalError as exc: -+ except OperationalError as exc: - err = 'MySQL Error {0}: {1}'.format(*exc) - __context__['mysql.error'] = err - log.error(err) -diff --git a/salt/pillar/mysql.py b/salt/pillar/mysql.py -index 8029e5c197..d3f9619ad5 100644 ---- a/salt/pillar/mysql.py -+++ b/salt/pillar/mysql.py -@@ -59,16 +59,27 @@ log = logging.getLogger(__name__) - - # Import third party libs - try: -+ # Trying to import MySQLdb - import MySQLdb -- HAS_MYSQL = True -+ import MySQLdb.cursors -+ import MySQLdb.converters - except ImportError: -- HAS_MYSQL = False -+ try: -+ # MySQLdb import failed, try to import PyMySQL -+ import pymysql -+ pymysql.install_as_MySQLdb() -+ import MySQLdb -+ import MySQLdb.cursors -+ import MySQLdb.converters -+ except ImportError: -+ MySQLdb = None - - - def __virtual__(): -- if not HAS_MYSQL: -- return False -- return True -+ ''' -+ Confirm that a python mysql client is installed. -+ ''' -+ return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else '' - - - class MySQLExtPillar(SqlBaseExtPillar): -diff --git a/salt/returners/mysql.py b/salt/returners/mysql.py -index af6698142b..85892cb06c 100644 ---- a/salt/returners/mysql.py -+++ b/salt/returners/mysql.py -@@ -155,11 +155,24 @@ import salt.exceptions - - # Import 3rd-party libs - from salt.ext import six -+ - try: -+ # Trying to import MySQLdb - import MySQLdb -- HAS_MYSQL = True -+ import MySQLdb.cursors -+ import MySQLdb.converters -+ from MySQLdb.connections import OperationalError - except ImportError: -- HAS_MYSQL = False -+ try: -+ # MySQLdb import failed, try to import PyMySQL -+ import pymysql -+ pymysql.install_as_MySQLdb() -+ import MySQLdb -+ import MySQLdb.cursors -+ import MySQLdb.converters -+ from MySQLdb.err import OperationalError -+ except ImportError: -+ MySQLdb = None - - log = logging.getLogger(__name__) - -@@ -168,10 +181,10 @@ __virtualname__ = 'mysql' - - - def __virtual__(): -- if not HAS_MYSQL: -- return False, 'Could not import mysql returner; ' \ -- 'mysql python client is not installed.' -- return True -+ ''' -+ Confirm that a python mysql client is installed. -+ ''' -+ return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else '' - - - def _get_options(ret=None): -@@ -228,7 +241,7 @@ def _get_serv(ret=None, commit=False): - conn = __context__['mysql_returner_conn'] - conn.ping() - connect = False -- except MySQLdb.connections.OperationalError as exc: -+ except OperationalError as exc: - log.debug('OperationalError on ping: %s', exc) - - if connect: -@@ -254,7 +267,7 @@ def _get_serv(ret=None, commit=False): - __context__['mysql_returner_conn'] = conn - except TypeError: - pass -- except MySQLdb.connections.OperationalError as exc: -+ except OperationalError as exc: - raise salt.exceptions.SaltMasterError('MySQL returner could not connect to database: {exc}'.format(exc=exc)) - - cursor = conn.cursor() -diff --git a/tests/unit/pillar/test_mysql.py b/tests/unit/pillar/test_mysql.py -index a242eac1a1..f6a2d0a44b 100644 ---- a/tests/unit/pillar/test_mysql.py -+++ b/tests/unit/pillar/test_mysql.py -@@ -12,7 +12,7 @@ import salt.pillar.mysql as mysql - - - @skipIf(NO_MOCK, NO_MOCK_REASON) --@skipIf(not mysql.HAS_MYSQL, 'MySQL-python module not installed') -+@skipIf(mysql.MySQLdb is None, 'MySQL-python module not installed') - class MysqlPillarTestCase(TestCase): - maxDiff = None + MySQLdb = None -- -2.13.7 +2.17.1 diff --git a/feat-add-grain-for-all-fqdns.patch b/feat-add-grain-for-all-fqdns.patch deleted file mode 100644 index f604e76..0000000 --- a/feat-add-grain-for-all-fqdns.patch +++ /dev/null @@ -1,88 +0,0 @@ -From 6e5f0fbbe3c232c7d5212d4fddfe52b5a5a71597 Mon Sep 17 00:00:00 2001 -From: Michele Bologna -Date: Thu, 14 Dec 2017 18:20:02 +0100 -Subject: [PATCH] Feat: add grain for all FQDNs - -This PR adds a grain named fqdns to the grains. -fqdns represents all the FQDNs known for the system on all available interfaces (excluding lo). - -Note: hostname != FQDN - -hostname is the UNIX name of the machine. A machine can have one and only one hostname. -FQDN is host.domain that resolves to an IP address that the machines is answering to. -A machine can have 1+ FQDNs. - -Upstream PR: -https://github.com/saltstack/salt/pull/45060 ---- - salt/grains/core.py | 27 +++++++++++++++++++++++++++ - tests/integration/modules/test_grains.py | 1 + - tests/unit/grains/test_core.py | 1 + - 3 files changed, 29 insertions(+) - -diff --git a/salt/grains/core.py b/salt/grains/core.py -index 8545d4368c..24de3cff6b 100644 ---- a/salt/grains/core.py -+++ b/salt/grains/core.py -@@ -1886,6 +1886,33 @@ def append_domain(): - return grain - - -+def fqdns(): -+ ''' -+ Return all known FQDNs for the system by enumerating all interfaces and -+ then trying to reverse resolve them (excluding 'lo' interface). -+ ''' -+ # Provides: -+ # fqdns -+ -+ grains = {} -+ fqdns = set() -+ -+ addresses = salt.utils.network.ip_addrs(include_loopback=False, -+ interface_data=_INTERFACES) -+ addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False, -+ interface_data=_INTERFACES)) -+ -+ for ip in addresses: -+ try: -+ fqdns.add(socket.gethostbyaddr(ip)[0]) -+ except (socket.error, socket.herror, -+ socket.gaierror, socket.timeout) as e: -+ log.error("Exception during resolving address: " + str(e)) -+ -+ grains['fqdns'] = list(fqdns) -+ return grains -+ -+ - def ip_fqdn(): - ''' - Return ip address and FQDN grains -diff --git a/tests/integration/modules/test_grains.py b/tests/integration/modules/test_grains.py -index 616e07d455..dfa70afa03 100644 ---- a/tests/integration/modules/test_grains.py -+++ b/tests/integration/modules/test_grains.py -@@ -51,6 +51,7 @@ class TestModulesGrains(ModuleCase): - 'cpuarch', - 'domain', - 'fqdn', -+ 'fqdns', - 'gid', - 'groupname', - 'host', -diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py -index 54c8293dcf..616c62e658 100644 ---- a/tests/unit/grains/test_core.py -+++ b/tests/unit/grains/test_core.py -@@ -7,6 +7,7 @@ - from __future__ import absolute_import, print_function, unicode_literals - import logging - import os -+import socket - - # Import Salt Testing Libs - try: --- -2.13.7 - - diff --git a/fix-async-call-to-process-manager.patch b/fix-async-call-to-process-manager.patch deleted file mode 100644 index ed37284..0000000 --- a/fix-async-call-to-process-manager.patch +++ /dev/null @@ -1,35 +0,0 @@ -From b276ee7373e88d05c01912a9d9d3a44a5d17bab6 Mon Sep 17 00:00:00 2001 -From: Daniel Wallace -Date: Mon, 13 Aug 2018 13:55:37 -0500 -Subject: [PATCH] fix async call to process manager - ---- - salt/minion.py | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) - -diff --git a/salt/minion.py b/salt/minion.py -index 9c05a646ea..8b8fd797d1 100644 ---- a/salt/minion.py -+++ b/salt/minion.py -@@ -923,7 +923,7 @@ class MinionManager(MinionBase): - install_zmq() - self.io_loop = ZMQDefaultLoop.current() - self.process_manager = ProcessManager(name='MultiMinionProcessManager') -- self.io_loop.spawn_callback(self.process_manager.run, **{'async': True}) # Tornado backward compat -+ self.io_loop.spawn_callback(self.process_manager.run, **{'asynchronous': True}) # Tornado backward compat - - def __del__(self): - self.destroy() -@@ -1120,7 +1120,7 @@ class Minion(MinionBase): - time.sleep(sleep_time) - - self.process_manager = ProcessManager(name='MinionProcessManager') -- self.io_loop.spawn_callback(self.process_manager.run, **{'async': True}) -+ self.io_loop.spawn_callback(self.process_manager.run, **{'asynchronous': True}) - # We don't have the proxy setup yet, so we can't start engines - # Engines need to be able to access __proxy__ - if not salt.utils.platform.is_proxy(): --- -2.17.1 - - diff --git a/fix-decrease-loglevel-when-unable-to-resolve-addr.patch b/fix-decrease-loglevel-when-unable-to-resolve-addr.patch deleted file mode 100644 index 114e1f3..0000000 --- a/fix-decrease-loglevel-when-unable-to-resolve-addr.patch +++ /dev/null @@ -1,72 +0,0 @@ -From 5d12b612b1f7b05a13e7b8da02e50ec471a72187 Mon Sep 17 00:00:00 2001 -From: Michele Bologna -Date: Tue, 20 Mar 2018 19:27:36 +0100 -Subject: [PATCH] Fix: decrease loglevel when unable to resolve addr - -Upstream PR: https://github.com/saltstack/salt/pull/46575 - -It occurs that when the machine has multiple interfaces without an associated FQDN, Salt logs are polluted by this error. -Some examples: - -``` -caasp-admin:~ # uptime - 09:08am up 0:13, 2 users, load average: 1.30, 1.37, 0.98 -caasp-admin:~ # docker logs $(docker ps | grep salt-master | awk '{print $1}') 2>&1 | grep "Exception during resolving address" | wc -l -528 -``` - -``` -caasp-admin:~ # docker exec -it $(docker ps | grep salt-master | awk '{print $1}') salt '*' cmd.run uptime -b24f41eb4cc94624862ca0c9e8afcd15: - 09:08am up 0:11, 0 users, load average: 1.26, 0.83, 0.40 -admin: - 09:08am up 0:13, 2 users, load average: 1.33, 1.37, 0.99 -ba8c76af029043a39ba917f7ab2af796: - 09:08am up 0:12, 0 users, load average: 0.84, 0.63, 0.32 -7b7aa52158524556a0c46ae57569ce93: - 09:08am up 0:11, 1 user, load average: 1.05, 0.77, 0.38 -5ab0e18cbd084e9088a928a17edb86cb: - 09:08am up 0:10, 0 users, load average: 0.12, 0.25, 0.20 -1756c9cd9a9a402b91d8636400d1e512: - 09:08am up 0:09, 0 users, load average: 0.12, 0.23, 0.14 -ca: - 09:08am up 0:13, 0 users, load average: 1.33, 1.37, 0.99 -caasp-admin:~ # docker exec -it $(docker ps | grep salt-master | awk '{print $1}') salt '*' cmd.run "bash -c 'cat /var/log/salt/minion | grep \"Exception during resolving address\" | wc -l'" -admin: - 63 -ba8c76af029043a39ba917f7ab2af796: - 47 -5ab0e18cbd084e9088a928a17edb86cb: - 55 -7b7aa52158524556a0c46ae57569ce93: - 59 -b24f41eb4cc94624862ca0c9e8afcd15: - 47 -1756c9cd9a9a402b91d8636400d1e512: - 59 -ca: - 25 -``` - -This patch changes the log level of the exception to INFO, since the resolve-unable problem is not blocking. ---- - salt/grains/core.py | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/salt/grains/core.py b/salt/grains/core.py -index 24de3cff6b..c166a43d7c 100644 ---- a/salt/grains/core.py -+++ b/salt/grains/core.py -@@ -1907,7 +1907,7 @@ def fqdns(): - fqdns.add(socket.gethostbyaddr(ip)[0]) - except (socket.error, socket.herror, - socket.gaierror, socket.timeout) as e: -- log.error("Exception during resolving address: " + str(e)) -+ log.info("Exception during resolving address: " + str(e)) - - grains['fqdns'] = list(fqdns) - return grains --- -2.13.7 - - diff --git a/fix-deprecation-warning-bsc-1095507.patch b/fix-deprecation-warning-bsc-1095507.patch deleted file mode 100644 index 4ba68ee..0000000 --- a/fix-deprecation-warning-bsc-1095507.patch +++ /dev/null @@ -1,26 +0,0 @@ -From 9289e1607ebf6f397c027d4a6edcf35c59bd600c Mon Sep 17 00:00:00 2001 -From: Mihai Dinca -Date: Wed, 6 Jun 2018 15:47:45 +0200 -Subject: [PATCH] Fix deprecation warning (bsc#1095507) - ---- - salt/utils/thin.py | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/salt/utils/thin.py b/salt/utils/thin.py -index e4b878eb19..b99e407583 100644 ---- a/salt/utils/thin.py -+++ b/salt/utils/thin.py -@@ -546,7 +546,7 @@ def thin_sum(cachedir, form='sha1'): - thintar = gen_thin(cachedir) - code_checksum_path = os.path.join(cachedir, 'thin', 'code-checksum') - if os.path.isfile(code_checksum_path): -- with salt.utils.fopen(code_checksum_path, 'r') as fh: -+ with salt.utils.files.fopen(code_checksum_path, 'r') as fh: - code_checksum = "'{0}'".format(fh.read().strip()) - else: - code_checksum = "'0'" --- -2.13.7 - - diff --git a/fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch b/fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch deleted file mode 100644 index 4eb2a40..0000000 --- a/fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch +++ /dev/null @@ -1,27 +0,0 @@ -From 7bda1dcd4f14da55abe38b1739b1e46ad0f5213c Mon Sep 17 00:00:00 2001 -From: Erik Johnson -Date: Fri, 13 Apr 2018 11:25:24 -0500 -Subject: [PATCH] Fix diffing binary files in file.get_diff (bsc#1098394) - ---- - salt/modules/file.py | 3 +-- - 1 file changed, 1 insertion(+), 2 deletions(-) - -diff --git a/salt/modules/file.py b/salt/modules/file.py -index 1b4b7e0e46..95bca7fb1b 100644 ---- a/salt/modules/file.py -+++ b/salt/modules/file.py -@@ -5008,8 +5008,7 @@ def get_diff(file1, - *salt.utils.data.decode(args) - ) - ) -- return ret -- return '' -+ return ret - - - def manage_file(name, --- -2.13.7 - - diff --git a/fix-for-ec2-rate-limit-failures.patch b/fix-for-ec2-rate-limit-failures.patch deleted file mode 100644 index e736067..0000000 --- a/fix-for-ec2-rate-limit-failures.patch +++ /dev/null @@ -1,66 +0,0 @@ -From 88a99b5beeaa51eaf646eb92d8f546f65f654008 Mon Sep 17 00:00:00 2001 -From: Daniel Wallace -Date: Wed, 25 Apr 2018 11:13:15 -0500 -Subject: [PATCH] Fix for EC2 Rate Limit Failures - -Fix for ec2 rate limit failures described here: https://bugzilla.suse.com/show_bug.cgi?id=1088888 ---- - salt/utils/aws.py | 22 ++++++++++++++++------ - 1 file changed, 16 insertions(+), 6 deletions(-) - -diff --git a/salt/utils/aws.py b/salt/utils/aws.py -index 059450e7ca..912f1466ba 100644 ---- a/salt/utils/aws.py -+++ b/salt/utils/aws.py -@@ -20,6 +20,7 @@ import hmac - import logging - import salt.config - import re -+import random - from salt.ext import six - - # Import Salt libs -@@ -442,8 +443,9 @@ def query(params=None, setname=None, requesturl=None, location=None, - ) - headers = {} - -- attempts = 5 -- while attempts > 0: -+ MAX_RETRIES = 6 -+ attempts = 0 -+ while attempts < MAX_RETRIES: - log.debug('AWS Request: %s', requesturl) - log.trace('AWS Request Parameters: %s', params_with_headers) - try: -@@ -461,15 +463,23 @@ def query(params=None, setname=None, requesturl=None, location=None, - - # check to see if we should retry the query - err_code = data.get('Errors', {}).get('Error', {}).get('Code', '') -- if attempts > 0 and err_code and err_code in AWS_RETRY_CODES: -- attempts -= 1 -+ if attempts < MAX_RETRIES and err_code and err_code in AWS_RETRY_CODES: -+ attempts += 1 - log.error( - 'AWS Response Status Code and Error: [%s %s] %s; ' - 'Attempts remaining: %s', - exc.response.status_code, exc, data, attempts - ) -- # Wait a bit before continuing to prevent throttling -- time.sleep(2) -+ # backoff an exponential amount of time to throttle requests -+ # during "API Rate Exceeded" failures as suggested by the AWS documentation here: -+ # https://docs.aws.amazon.com/AWSEC2/latest/APIReference/query-api-troubleshooting.html -+ # and also here: -+ # https://docs.aws.amazon.com/general/latest/gr/api-retries.html -+ # Failure to implement this approach results in a failure rate of >30% when using salt-cloud with -+ # "--parallel" when creating 50 or more instances with a fixed delay of 2 seconds. -+ # A failure rate of >10% is observed when using the salt-api with an asyncronous client -+ # specified (runner_async). -+ time.sleep(random.uniform(1, 2**attempts)) - continue - - log.error( --- -2.13.7 - - diff --git a/fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch b/fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch deleted file mode 100644 index 9fce2ae..0000000 --- a/fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch +++ /dev/null @@ -1,81 +0,0 @@ -From 826194be2a036fee80d3ca546822023416ac3a7d Mon Sep 17 00:00:00 2001 -From: Bo Maryniuk -Date: Wed, 21 Mar 2018 11:10:23 +0100 -Subject: [PATCH] Fix for [Errno 0] Resolver Error 0 (no error) - (bsc#1087581) - - * Lintfix: PEP8 ident - * Use proper levels of the error handling, use proper log formatting. - * Fix unit test for reversed fqdns return data ---- - salt/grains/core.py | 19 ++++++++++++------- - tests/unit/grains/test_core.py | 22 ++++++++++++++++++++++ - 2 files changed, 34 insertions(+), 7 deletions(-) - -diff --git a/salt/grains/core.py b/salt/grains/core.py -index c166a43d7c..dc472a6c0a 100644 ---- a/salt/grains/core.py -+++ b/salt/grains/core.py -@@ -1898,16 +1898,21 @@ def fqdns(): - fqdns = set() - - addresses = salt.utils.network.ip_addrs(include_loopback=False, -- interface_data=_INTERFACES) -+ interface_data=_INTERFACES) - addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False, -- interface_data=_INTERFACES)) -- -+ interface_data=_INTERFACES)) -+ err_message = 'Exception during resolving address: %s' - for ip in addresses: - try: -- fqdns.add(socket.gethostbyaddr(ip)[0]) -- except (socket.error, socket.herror, -- socket.gaierror, socket.timeout) as e: -- log.info("Exception during resolving address: " + str(e)) -+ fqdns.add(socket.getfqdn(socket.gethostbyaddr(ip)[0])) -+ except socket.herror as err: -+ if err.errno == 0: -+ # No FQDN for this IP address, so we don't need to know this all the time. -+ log.debug("Unable to resolve address %s: %s", ip, err) -+ else: -+ log.error(err_message, err) -+ except (socket.error, socket.gaierror, socket.timeout) as err: -+ log.error(err_message, err) - - grains['fqdns'] = list(fqdns) - return grains -diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py -index 616c62e658..dd7d5b06f8 100644 ---- a/tests/unit/grains/test_core.py -+++ b/tests/unit/grains/test_core.py -@@ -877,3 +877,25 @@ SwapTotal: 4789244 kB''' - osdata = {'kernel': 'test', } - ret = core._virtual(osdata) - self.assertEqual(ret['virtual'], virt) -+ -+ @skipIf(not salt.utils.platform.is_linux(), 'System is not Linux') -+ @patch.object(salt.utils, 'is_windows', MagicMock(return_value=False)) -+ @patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '5.6.7.8'])) -+ @patch('salt.utils.network.ip_addrs6', -+ MagicMock(return_value=['fe80::a8b2:93ff:fe00:0', 'fe80::a8b2:93ff:dead:beef'])) -+ @patch('salt.utils.network.socket.getfqdn', MagicMock(side_effect=lambda v: v)) # Just pass-through -+ def test_fqdns_return(self): -+ ''' -+ test the return for a dns grain. test for issue: -+ https://github.com/saltstack/salt/issues/41230 -+ ''' -+ reverse_resolv_mock = [('foo.bar.baz', [], ['1.2.3.4']), -+ ('rinzler.evil-corp.com', [], ['5.6.7.8']), -+ ('foo.bar.baz', [], ['fe80::a8b2:93ff:fe00:0']), -+ ('bluesniff.foo.bar', [], ['fe80::a8b2:93ff:dead:beef'])] -+ ret = {'fqdns': ['bluesniff.foo.bar', 'foo.bar.baz', 'rinzler.evil-corp.com']} -+ with patch.object(socket, 'gethostbyaddr', side_effect=reverse_resolv_mock): -+ fqdns = core.fqdns() -+ self.assertIn('fqdns', fqdns) -+ self.assertEqual(len(fqdns['fqdns']), len(ret['fqdns'])) -+ self.assertEqual(set(fqdns['fqdns']), set(ret['fqdns'])) --- -2.13.7 - - diff --git a/fix-for-sorting-of-multi-version-packages-bsc-109717.patch b/fix-for-sorting-of-multi-version-packages-bsc-109717.patch deleted file mode 100644 index bbdc35f..0000000 --- a/fix-for-sorting-of-multi-version-packages-bsc-109717.patch +++ /dev/null @@ -1,35 +0,0 @@ -From f0f63dc8dd5979b51db71cf759d4350da1078383 Mon Sep 17 00:00:00 2001 -From: Jochen Breuer -Date: Wed, 13 Jun 2018 17:51:13 +0200 -Subject: [PATCH] Fix for sorting of multi-version packages (bsc#1097174 - and bsc#1097413) - ---- - salt/modules/rpm.py | 3 ++- - 1 file changed, 2 insertions(+), 1 deletion(-) - -diff --git a/salt/modules/rpm.py b/salt/modules/rpm.py -index 3683234f59..8e71992f81 100644 ---- a/salt/modules/rpm.py -+++ b/salt/modules/rpm.py -@@ -9,6 +9,7 @@ import logging - import os - import re - import datetime -+from distutils.version import LooseVersion - - # Import Salt libs - import salt.utils.decorators.path -@@ -609,7 +610,7 @@ def info(*packages, **kwargs): - # pick only latest versions - # (in case multiple packages installed, e.g. kernel) - ret = dict() -- for pkg_data in reversed(sorted(_ret, key=lambda x: x['edition'])): -+ for pkg_data in reversed(sorted(_ret, key=lambda x: LooseVersion(x['edition']))): - pkg_name = pkg_data.pop('name') - # Filter out GPG public keys packages - if pkg_name.startswith('gpg-pubkey'): --- -2.17.1 - - diff --git a/fix-for-suse-expanded-support-detection.patch b/fix-for-suse-expanded-support-detection.patch index 0cd6f18..5b08d54 100644 --- a/fix-for-suse-expanded-support-detection.patch +++ b/fix-for-suse-expanded-support-detection.patch @@ -1,4 +1,4 @@ -From 1c9cba3a397d53e399b82320507fb5141234c67f Mon Sep 17 00:00:00 2001 +From 616750ad4b2b2b8d55d19b81500dbd4f0aba1f74 Mon Sep 17 00:00:00 2001 From: Jochen Breuer Date: Thu, 6 Sep 2018 17:15:18 +0200 Subject: [PATCH] Fix for SUSE Expanded Support detection @@ -10,20 +10,17 @@ CentOS installation, this lead to SUSE ES being detected as CentOS. This change also adds a check for redhat-release and then marks the 'lsb_distrib_id' as RedHat. --- - salt/grains/core.py | 13 +++++++++++-- - 1 file changed, 11 insertions(+), 2 deletions(-) + salt/grains/core.py | 9 +++++++++ + 1 file changed, 9 insertions(+) diff --git a/salt/grains/core.py b/salt/grains/core.py -index dc472a6c0a..a5c3a6a8cf 100644 +index f0f1bd17c4..b2c1d475b0 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py -@@ -1593,8 +1593,17 @@ def os_data(): - grains['lsb_distrib_codename'] = \ - comps[3].replace('(', '').replace(')', '') - elif os.path.isfile('/etc/centos-release'): -- # CentOS Linux -- grains['lsb_distrib_id'] = 'CentOS' -+ log.trace('Parsing distrib info from /etc/centos-release') +@@ -1821,6 +1821,15 @@ def os_data(): + log.trace('Parsing distrib info from /etc/centos-release') + # CentOS Linux + grains['lsb_distrib_id'] = 'CentOS' + # Maybe CentOS Linux; could also be SUSE Expanded Support. + # SUSE ES has both, centos-release and redhat-release. + if os.path.isfile('/etc/redhat-release'): @@ -33,11 +30,10 @@ index dc472a6c0a..a5c3a6a8cf 100644 + # This is a SUSE Expanded Support Rhel installation + grains['lsb_distrib_id'] = 'RedHat' + break -+ grains.setdefault('lsb_distrib_id', 'CentOS') with salt.utils.files.fopen('/etc/centos-release') as ifile: for line in ifile: # Need to pull out the version and codename -- -2.19.0 +2.17.1 diff --git a/fix-git_pillar-merging-across-multiple-__env__-repos.patch b/fix-git_pillar-merging-across-multiple-__env__-repos.patch index 91b790c..fc2378c 100644 --- a/fix-git_pillar-merging-across-multiple-__env__-repos.patch +++ b/fix-git_pillar-merging-across-multiple-__env__-repos.patch @@ -1,4 +1,4 @@ -From 49f8f296edf4655e2be7e564745931692ae939b7 Mon Sep 17 00:00:00 2001 +From 6747243babde058762428f9bdb0e3ef16402eadd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Tue, 6 Nov 2018 16:38:54 +0000 @@ -11,85 +11,29 @@ Test git ext_pillar across multiple repos using __env__ Remove unicode references --- - salt/utils/gitfs.py | 2 +- - tests/integration/pillar/test_git_pillar.py | 144 ++++++++++++++++++++ - tests/support/gitfs.py | 66 ++++++++- - 3 files changed, 209 insertions(+), 3 deletions(-) + tests/integration/pillar/test_git_pillar.py | 45 +++++++++++++++++++++ + 1 file changed, 45 insertions(+) -diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py -index 6963f40226..11af741e35 100644 ---- a/salt/utils/gitfs.py -+++ b/salt/utils/gitfs.py -@@ -2975,7 +2975,7 @@ class GitPillar(GitBase): - if repo.env: - env = repo.env - else: -- env = 'base' if repo.branch == repo.base else repo.branch -+ env = 'base' if repo.branch == repo.base else repo.get_checkout_target() - if repo._mountpoint: - if self.link_mountpoint(repo): - self.pillar_dirs[repo.linkdir] = env diff --git a/tests/integration/pillar/test_git_pillar.py b/tests/integration/pillar/test_git_pillar.py -index e97e720bab..e052782311 100644 +index 5d9a374f6e..4a9553d1a1 100644 --- a/tests/integration/pillar/test_git_pillar.py +++ b/tests/integration/pillar/test_git_pillar.py -@@ -358,6 +358,38 @@ class GitPythonMixin(object): - "available on the salt master"]} +@@ -1361,6 +1361,51 @@ class TestPygit2SSH(GitPillarSSHTestBase): + 'nested_dict': {'master': True}}} ) -+ def test_includes_enabled_solves___env___with_mountpoint(self): -+ ''' -+ Test with git_pillar_includes enabled and using "__env__" as the branch -+ name for the configured repositories. -+ The "gitinfo" repository contains top.sls file with a local reference -+ and also referencing external "nowhere.foo" which is provided by "webinfo" -+ repository mounted as "nowhere". -+ ''' -+ ret = self.get_pillar('''\ -+ file_ignore_regex: [] -+ file_ignore_glob: [] -+ git_pillar_provider: gitpython -+ cachedir: {cachedir} -+ extension_modules: {extmods} -+ ext_pillar: -+ - git: -+ - __env__ {url_extra_repo}: -+ - name: gitinfo -+ - __env__ {url}: -+ - name: webinfo -+ - mountpoint: nowhere -+ ''') -+ self.assertEqual( -+ ret, -+ {'branch': 'master', -+ 'motd': 'The force will be with you. Always.', -+ 'mylist': ['master'], -+ 'mydict': {'master': True, -+ 'nested_list': ['master'], -+ 'nested_dict': {'master': True}}} -+ ) + - - @destructiveTest - @skipIf(NO_MOCK, NO_MOCK_REASON) -@@ -413,7 +445,12 @@ class TestGitPythonAuthenticatedHTTP(TestGitPythonHTTP, GitPythonMixin): - username=cls.username, - password=cls.password, - port=cls.nginx_port) -+ cls.url_extra_repo = 'http://{username}:{password}@127.0.0.1:{port}/extra_repo.git'.format( -+ username=cls.username, -+ password=cls.password, -+ port=cls.nginx_port) - cls.ext_opts['url'] = cls.url -+ cls.ext_opts['url_extra_repo'] = cls.url_extra_repo - cls.ext_opts['username'] = cls.username - cls.ext_opts['password'] = cls.password - -@@ -1192,6 +1229,40 @@ class TestPygit2SSH(GitPillarSSHTestBase): - ''') - self.assertEqual(ret, expected) - -+ def test_includes_enabled_solves___env___with_mountpoint(self): ++@skipIf(NO_MOCK, NO_MOCK_REASON) ++@skipIf(_windows_or_mac(), 'minion is windows or mac') ++@skip_if_not_root ++@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} and libgit2 >= {1} required'.format(PYGIT2_MINVER, LIBGIT2_MINVER)) ++@skipIf(not HAS_NGINX, 'nginx not present') ++@skipIf(not HAS_VIRTUALENV, 'virtualenv not present') ++class TestPygit2HTTP(GitPillarHTTPTestBase): ++ ''' ++ Test git_pillar with pygit2 using SSH authentication ++ ''' ++ def test_single_source(self): + ''' + Test with git_pillar_includes enabled and using "__env__" as the branch + name for the configured repositories. @@ -123,205 +67,9 @@ index e97e720bab..e052782311 100644 + 'nested_dict': {'master': True}}} + ) + - - @skipIf(NO_MOCK, NO_MOCK_REASON) - @skipIf(_windows_or_mac(), 'minion is windows or mac') -@@ -1439,6 +1510,38 @@ class TestPygit2HTTP(GitPillarHTTPTestBase): - ''') - self.assertEqual(ret, expected) - -+ def test_includes_enabled_solves___env___with_mountpoint(self): -+ ''' -+ Test with git_pillar_includes enabled and using "__env__" as the branch -+ name for the configured repositories. -+ The "gitinfo" repository contains top.sls file with a local reference -+ and also referencing external "nowhere.foo" which is provided by "webinfo" -+ repository mounted as "nowhere". -+ ''' -+ ret = self.get_pillar('''\ -+ file_ignore_regex: [] -+ file_ignore_glob: [] -+ git_pillar_provider: pygit2 -+ cachedir: {cachedir} -+ extension_modules: {extmods} -+ ext_pillar: -+ - git: -+ - __env__ {url_extra_repo}: -+ - name: gitinfo -+ - __env__ {url}: -+ - name: webinfo -+ - mountpoint: nowhere -+ ''') -+ self.assertEqual( -+ ret, -+ {'branch': 'master', -+ 'motd': 'The force will be with you. Always.', -+ 'mylist': ['master'], -+ 'mydict': {'master': True, -+ 'nested_list': ['master'], -+ 'nested_dict': {'master': True}}} -+ ) -+ - - @skipIf(NO_MOCK, NO_MOCK_REASON) - @skipIf(_windows_or_mac(), 'minion is windows or mac') -@@ -1887,3 +1990,44 @@ class TestPygit2AuthenticatedHTTP(GitPillarHTTPTestBase): - - env: base - ''') - self.assertEqual(ret, expected) -+ -+ def test_includes_enabled_solves___env___with_mountpoint(self): -+ ''' -+ Test with git_pillar_includes enabled and using "__env__" as the branch -+ name for the configured repositories. -+ The "gitinfo" repository contains top.sls file with a local reference -+ and also referencing external "nowhere.foo" which is provided by "webinfo" -+ repository mounted as "nowhere". -+ ''' -+ ret = self.get_pillar('''\ -+ file_ignore_regex: [] -+ file_ignore_glob: [] -+ git_pillar_provider: pygit2 -+ git_pillar_user: {user} -+ git_pillar_password: {password} -+ git_pillar_insecure_auth: True -+ cachedir: {cachedir} -+ extension_modules: {extmods} -+ ext_pillar: -+ - git: -+ - __env__ {url_extra_repo}: -+ - name: gitinfo -+ - user: {user} -+ - password: {password} -+ - insecure_auth: True -+ - __env__ {url}: -+ - name: webinfo -+ - mountpoint: nowhere -+ - user: {user} -+ - password: {password} -+ - insecure_auth: True -+ ''') -+ self.assertEqual( -+ ret, -+ {'branch': 'master', -+ 'motd': 'The force will be with you. Always.', -+ 'mylist': ['master'], -+ 'mydict': {'master': True, -+ 'nested_list': ['master'], -+ 'nested_dict': {'master': True}}} -+ ) -diff --git a/tests/support/gitfs.py b/tests/support/gitfs.py -index 2afd31539d..e645c50a86 100644 ---- a/tests/support/gitfs.py -+++ b/tests/support/gitfs.py -@@ -133,9 +133,13 @@ class SSHDMixin(ModuleCase, ProcessManager, SaltReturnAssertsMixin): - cls.url = 'ssh://{username}@127.0.0.1:{port}/~/repo.git'.format( - username=cls.username, - port=cls.sshd_port) -+ cls.url_extra_repo = 'ssh://{username}@127.0.0.1:{port}/~/extra_repo.git'.format( -+ username=cls.username, -+ port=cls.sshd_port) - home = '/root/.ssh' - cls.ext_opts = { - 'url': cls.url, -+ 'url_extra_repo': cls.url_extra_repo, - 'privkey_nopass': os.path.join(home, cls.id_rsa_nopass), - 'pubkey_nopass': os.path.join(home, cls.id_rsa_nopass + '.pub'), - 'privkey_withpass': os.path.join(home, cls.id_rsa_withpass), -@@ -193,7 +197,8 @@ class WebserverMixin(ModuleCase, ProcessManager, SaltReturnAssertsMixin): - # get_unused_localhost_port() return identical port numbers. - cls.uwsgi_port = get_unused_localhost_port() - cls.url = 'http://127.0.0.1:{port}/repo.git'.format(port=cls.nginx_port) -- cls.ext_opts = {'url': cls.url} -+ cls.url_extra_repo = 'http://127.0.0.1:{port}/extra_repo.git'.format(port=cls.nginx_port) -+ cls.ext_opts = {'url': cls.url, 'url_extra_repo': cls.url_extra_repo} - # Add auth params if present (if so this will trigger the spawned - # server to turn on HTTP basic auth). - for credential_param in ('user', 'password'): -@@ -250,7 +255,7 @@ class GitTestBase(ModuleCase): - Base class for all gitfs/git_pillar tests. Must be subclassed and paired - with either SSHDMixin or WebserverMixin to provide the server. - ''' -- case = port = bare_repo = admin_repo = None -+ case = port = bare_repo = base_extra_repo = admin_repo = admin_extra_repo = None - maxDiff = None - git_opts = '-c user.name="Foo Bar" -c user.email=foo@bar.com' - ext_opts = {} -@@ -465,6 +470,61 @@ class GitPillarTestBase(GitTestBase, LoaderModuleMockMixin): - ''')) - _push('top_only', 'add top_only branch') - -+ def make_extra_repo(self, root_dir, user='root'): -+ self.bare_extra_repo = os.path.join(root_dir, 'extra_repo.git') -+ self.admin_extra_repo = os.path.join(root_dir, 'admin_extra') -+ -+ for dirname in (self.bare_extra_repo, self.admin_extra_repo): -+ shutil.rmtree(dirname, ignore_errors=True) -+ -+ # Create bare extra repo -+ self.run_function( -+ 'git.init', -+ [self.bare_extra_repo], -+ user=user, -+ bare=True) -+ -+ # Clone bare repo -+ self.run_function( -+ 'git.clone', -+ [self.admin_extra_repo], -+ url=self.bare_extra_repo, -+ user=user) -+ -+ def _push(branch, message): -+ self.run_function( -+ 'git.add', -+ [self.admin_extra_repo, '.'], -+ user=user) -+ self.run_function( -+ 'git.commit', -+ [self.admin_extra_repo, message], -+ user=user, -+ git_opts=self.git_opts, -+ ) -+ self.run_function( -+ 'git.push', -+ [self.admin_extra_repo], -+ remote='origin', -+ ref=branch, -+ user=user, -+ ) -+ -+ with salt.utils.files.fopen( -+ os.path.join(self.admin_extra_repo, 'top.sls'), 'w') as fp_: -+ fp_.write(textwrap.dedent('''\ -+ "{{saltenv}}": -+ '*': -+ - motd -+ - nowhere.foo -+ ''')) -+ with salt.utils.files.fopen( -+ os.path.join(self.admin_extra_repo, 'motd.sls'), 'w') as fp_: -+ fp_.write(textwrap.dedent('''\ -+ motd: The force will be with you. Always. -+ ''')) -+ _push('master', 'initial commit') -+ - - class GitPillarSSHTestBase(GitPillarTestBase, SSHDMixin): - ''' -@@ -533,6 +593,7 @@ class GitPillarSSHTestBase(GitPillarTestBase, SSHDMixin): - ) - ) - self.make_repo(root_dir, user=self.username) -+ self.make_extra_repo(root_dir, user=self.username) - - def get_pillar(self, ext_pillar_conf): + @requires_system_grains + def test_root_parameter(self, grains): ''' -@@ -579,3 +640,4 @@ class GitPillarHTTPTestBase(GitPillarTestBase, WebserverMixin): - self.spawn_server() # pylint: disable=E1120 - - self.make_repo(self.repo_dir) -+ self.make_extra_repo(self.repo_dir) -- 2.17.1 diff --git a/fix-index-error-when-running-on-python-3.patch b/fix-index-error-when-running-on-python-3.patch deleted file mode 100644 index 0d15705..0000000 --- a/fix-index-error-when-running-on-python-3.patch +++ /dev/null @@ -1,37 +0,0 @@ -From 5502f05fac89330ab26d04e29d3aa6d36ab928c5 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= - -Date: Thu, 20 Sep 2018 11:51:58 +0100 -Subject: [PATCH] Fix index error when running on Python 3 - -Fix wrong queryformat for zypper list_provides ---- - salt/modules/zypper.py | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) - -diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py -index 695bce4f4e..e4423cf1fc 100644 ---- a/salt/modules/zypper.py -+++ b/salt/modules/zypper.py -@@ -2314,7 +2314,7 @@ def list_provides(**kwargs): - ''' - ret = __context__.get('pkg.list_provides') - if not ret: -- cmd = ['rpm', '-qa', '--queryformat', '[%{PROVIDES}_|-%{NAME}\n]'] -+ cmd = ['rpm', '-qa', '--queryformat', '%{PROVIDES}_|-%{NAME}\n'] - ret = dict() - for line in __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False).splitlines(): - provide, realname = line.split('_|-') -@@ -2379,7 +2379,7 @@ def resolve_capabilities(pkgs, refresh, **kwargs): - try: - result = search(name, provides=True, match='exact') - if len(result) == 1: -- name = result.keys()[0] -+ name = next(iter(result.keys())) - elif len(result) > 1: - log.warn("Found ambiguous match for capability '%s'.", pkg) - except CommandExecutionError as exc: --- -2.17.1 - - diff --git a/fix-ipv6-scope-bsc-1108557.patch b/fix-ipv6-scope-bsc-1108557.patch index 1381712..58715cb 100644 --- a/fix-ipv6-scope-bsc-1108557.patch +++ b/fix-ipv6-scope-bsc-1108557.patch @@ -1,4 +1,4 @@ -From 0509f0b0f1e880e7651e2a33cf5b70ef1930a3ff Mon Sep 17 00:00:00 2001 +From b6d47a2ca7f1bed902dfc6574e6fe91d3034aa29 Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Fri, 28 Sep 2018 15:22:33 +0200 Subject: [PATCH] Fix IPv6 scope (bsc#1108557) @@ -69,296 +69,17 @@ Lintfix: W0611 Reverse skipping tests: if no ipaddress --- - salt/_compat.py | 287 +++++++++++++++++++++++------ - salt/cloud/clouds/saltify.py | 5 +- - salt/cloud/clouds/vagrant.py | 9 +- - salt/ext/win_inet_pton.py | 2 +- - salt/minion.py | 5 +- - salt/modules/ipset.py | 5 +- - salt/modules/network.py | 5 +- - salt/modules/vagrant.py | 6 +- - salt/utils/dns.py | 11 +- - salt/utils/minions.py | 5 +- - tests/unit/grains/test_core.py | 5 +- - tests/unit/modules/test_network.py | 15 +- - 12 files changed, 245 insertions(+), 115 deletions(-) + salt/_compat.py | 74 +++++++++++++++++++++++++++++++++++++++++++++++++ + 1 file changed, 74 insertions(+) diff --git a/salt/_compat.py b/salt/_compat.py -index 9b10646ace..0576210afc 100644 +index c10b82c0c2..8628833dcf 100644 --- a/salt/_compat.py +++ b/salt/_compat.py -@@ -2,18 +2,21 @@ - ''' - Salt compatibility code - ''' --# pylint: disable=import-error,unused-import,invalid-name -+# pylint: disable=import-error,unused-import,invalid-name,W0231,W0233 - - # Import python libs --from __future__ import absolute_import -+from __future__ import absolute_import, unicode_literals, print_function - import sys - import types -+import logging - - # Import 3rd-party libs --from salt.ext.six import binary_type, string_types, text_type -+from salt.exceptions import SaltException -+from salt.ext.six import binary_type, string_types, text_type, integer_types - from salt.ext.six.moves import cStringIO, StringIO - --HAS_XML = True -+log = logging.getLogger(__name__) -+ - try: - # Python >2.5 - import xml.etree.cElementTree as ElementTree -@@ -31,11 +34,10 @@ except Exception: - import elementtree.ElementTree as ElementTree - except Exception: - ElementTree = None -- HAS_XML = False +@@ -229,7 +229,81 @@ class IPv6InterfaceScoped(ipaddress.IPv6Interface, IPv6AddressScoped): + self.hostmask = self.network.hostmask - # True if we are running on Python 3. --PY3 = sys.version_info[0] == 3 -+PY3 = sys.version_info.major == 3 - - - if PY3: -@@ -45,13 +47,12 @@ else: - import exceptions - - --if HAS_XML: -+if ElementTree is not None: - if not hasattr(ElementTree, 'ParseError'): - class ParseError(Exception): - ''' - older versions of ElementTree do not have ParseError - ''' -- pass - - ElementTree.ParseError = ParseError - -@@ -61,9 +62,7 @@ def text_(s, encoding='latin-1', errors='strict'): - If ``s`` is an instance of ``binary_type``, return - ``s.decode(encoding, errors)``, otherwise return ``s`` - ''' -- if isinstance(s, binary_type): -- return s.decode(encoding, errors) -- return s -+ return s.decode(encoding, errors) if isinstance(s, binary_type) else s - - - def bytes_(s, encoding='latin-1', errors='strict'): -@@ -71,57 +70,37 @@ def bytes_(s, encoding='latin-1', errors='strict'): - If ``s`` is an instance of ``text_type``, return - ``s.encode(encoding, errors)``, otherwise return ``s`` - ''' -- if isinstance(s, text_type): -- return s.encode(encoding, errors) -- return s -+ return s.encode(encoding, errors) if isinstance(s, text_type) else s - - --if PY3: -- def ascii_native_(s): -- if isinstance(s, text_type): -- s = s.encode('ascii') -- return str(s, 'ascii', 'strict') --else: -- def ascii_native_(s): -- if isinstance(s, text_type): -- s = s.encode('ascii') -- return str(s) -+def ascii_native_(s): -+ ''' -+ Python 3: If ``s`` is an instance of ``text_type``, return -+ ``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')`` - --ascii_native_.__doc__ = ''' --Python 3: If ``s`` is an instance of ``text_type``, return --``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')`` -+ Python 2: If ``s`` is an instance of ``text_type``, return -+ ``s.encode('ascii')``, otherwise return ``str(s)`` -+ ''' -+ if isinstance(s, text_type): -+ s = s.encode('ascii') - --Python 2: If ``s`` is an instance of ``text_type``, return --``s.encode('ascii')``, otherwise return ``str(s)`` --''' -+ return str(s, 'ascii', 'strict') if PY3 else s - - --if PY3: -- def native_(s, encoding='latin-1', errors='strict'): -- ''' -- If ``s`` is an instance of ``text_type``, return -- ``s``, otherwise return ``str(s, encoding, errors)`` -- ''' -- if isinstance(s, text_type): -- return s -- return str(s, encoding, errors) --else: -- def native_(s, encoding='latin-1', errors='strict'): -- ''' -- If ``s`` is an instance of ``text_type``, return -- ``s.encode(encoding, errors)``, otherwise return ``str(s)`` -- ''' -- if isinstance(s, text_type): -- return s.encode(encoding, errors) -- return str(s) -+def native_(s, encoding='latin-1', errors='strict'): -+ ''' -+ Python 3: If ``s`` is an instance of ``text_type``, return ``s``, otherwise -+ return ``str(s, encoding, errors)`` - --native_.__doc__ = ''' --Python 3: If ``s`` is an instance of ``text_type``, return ``s``, otherwise --return ``str(s, encoding, errors)`` -+ Python 2: If ``s`` is an instance of ``text_type``, return -+ ``s.encode(encoding, errors)``, otherwise return ``str(s)`` -+ ''' -+ if PY3: -+ out = s if isinstance(s, text_type) else str(s, encoding, errors) -+ else: -+ out = s.encode(encoding, errors) if isinstance(s, text_type) else str(s) - --Python 2: If ``s`` is an instance of ``text_type``, return --``s.encode(encoding, errors)``, otherwise return ``str(s)`` --''' -+ return out - - - def string_io(data=None): # cStringIO can't handle unicode -@@ -133,7 +112,199 @@ def string_io(data=None): # cStringIO can't handle unicode - except (UnicodeEncodeError, TypeError): - return StringIO(data) - --if PY3: -- import ipaddress --else: -- import salt.ext.ipaddress as ipaddress -+ -+try: -+ if PY3: -+ import ipaddress -+ else: -+ import salt.ext.ipaddress as ipaddress -+except ImportError: -+ ipaddress = None -+ -+ -+class IPv6AddressScoped(ipaddress.IPv6Address): -+ ''' -+ Represent and manipulate single IPv6 Addresses. -+ Scope-aware version -+ ''' -+ def __init__(self, address): -+ ''' -+ Instantiate a new IPv6 address object. Scope is moved to an attribute 'scope'. -+ -+ Args: -+ address: A string or integer representing the IP -+ -+ Additionally, an integer can be passed, so -+ IPv6Address('2001:db8::') == IPv6Address(42540766411282592856903984951653826560) -+ or, more generally -+ IPv6Address(int(IPv6Address('2001:db8::'))) == IPv6Address('2001:db8::') -+ -+ Raises: -+ AddressValueError: If address isn't a valid IPv6 address. -+ -+ :param address: -+ ''' -+ # pylint: disable-all -+ if not hasattr(self, '_is_packed_binary'): -+ # This method (below) won't be around for some Python 3 versions -+ # and we need check this differently anyway -+ self._is_packed_binary = lambda p: isinstance(p, bytes) -+ # pylint: enable-all -+ -+ if isinstance(address, string_types) and '%' in address: -+ buff = address.split('%') -+ if len(buff) != 2: -+ raise SaltException('Invalid IPv6 address: "{}"'.format(address)) -+ address, self.__scope = buff -+ else: -+ self.__scope = None -+ -+ if sys.version_info.major == 2: -+ ipaddress._BaseAddress.__init__(self, address) -+ ipaddress._BaseV6.__init__(self, address) -+ else: -+ # Python 3.4 fix. Versions higher are simply not affected -+ # https://github.com/python/cpython/blob/3.4/Lib/ipaddress.py#L543-L544 -+ self._version = 6 -+ self._max_prefixlen = ipaddress.IPV6LENGTH -+ -+ # Efficient constructor from integer. -+ if isinstance(address, integer_types): -+ self._check_int_address(address) -+ self._ip = address -+ elif self._is_packed_binary(address): -+ self._check_packed_address(address, 16) -+ self._ip = ipaddress._int_from_bytes(address, 'big') -+ else: -+ address = str(address) -+ if '/' in address: -+ raise ipaddress.AddressValueError("Unexpected '/' in {}".format(address)) -+ self._ip = self._ip_int_from_string(address) -+ -+ def _is_packed_binary(self, data): -+ ''' -+ Check if data is hexadecimal packed -+ -+ :param data: -+ :return: -+ ''' -+ packed = False -+ if len(data) == 16 and ':' not in data: -+ try: -+ packed = bool(int(str(bytearray(data)).encode('hex'), 16)) -+ except ValueError: -+ pass -+ -+ return packed -+ -+ @property -+ def scope(self): -+ ''' -+ Return scope of IPv6 address. -+ -+ :return: -+ ''' -+ return self.__scope -+ -+ def __str__(self): -+ return text_type(self._string_from_ip_int(self._ip) + -+ ('%' + self.scope if self.scope is not None else '')) -+ -+ -+class IPv6InterfaceScoped(ipaddress.IPv6Interface, IPv6AddressScoped): -+ ''' -+ Update -+ ''' -+ def __init__(self, address): -+ if isinstance(address, (bytes, int)): -+ IPv6AddressScoped.__init__(self, address) -+ self.network = ipaddress.IPv6Network(self._ip) -+ self._prefixlen = self._max_prefixlen -+ return -+ -+ addr = ipaddress._split_optional_netmask(address) -+ IPv6AddressScoped.__init__(self, addr[0]) -+ self.network = ipaddress.IPv6Network(address, strict=False) -+ self.netmask = self.network.netmask -+ self._prefixlen = self.network._prefixlen -+ self.hostmask = self.network.hostmask -+ -+ +def ip_address(address): + """Take an IP string/int and return an object of the correct type. + @@ -431,229 +152,13 @@ index 9b10646ace..0576210afc 100644 + raise ValueError('{} does not appear to be an IPv4 or IPv6 interface'.format(address)) + + -+if ipaddress: -+ ipaddress.IPv6Address = IPv6AddressScoped -+ if sys.version_info.major == 2: -+ ipaddress.IPv6Interface = IPv6InterfaceScoped + if ipaddress: + ipaddress.IPv6Address = IPv6AddressScoped + if sys.version_info.major == 2: + ipaddress.IPv6Interface = IPv6InterfaceScoped + ipaddress.ip_address = ip_address + ipaddress.ip_interface = ip_interface -diff --git a/salt/cloud/clouds/saltify.py b/salt/cloud/clouds/saltify.py -index c9cc281b42..e0e56349a0 100644 ---- a/salt/cloud/clouds/saltify.py -+++ b/salt/cloud/clouds/saltify.py -@@ -27,10 +27,7 @@ import salt.utils.cloud - import salt.config as config - import salt.client - import salt.ext.six as six --if six.PY3: -- import ipaddress --else: -- import salt.ext.ipaddress as ipaddress -+from salt._compat import ipaddress - - from salt.exceptions import SaltCloudException, SaltCloudSystemExit - -diff --git a/salt/cloud/clouds/vagrant.py b/salt/cloud/clouds/vagrant.py -index a24170c78a..0fe410eb91 100644 ---- a/salt/cloud/clouds/vagrant.py -+++ b/salt/cloud/clouds/vagrant.py -@@ -25,13 +25,8 @@ import tempfile - import salt.utils - import salt.config as config - import salt.client --import salt.ext.six as six --if six.PY3: -- import ipaddress --else: -- import salt.ext.ipaddress as ipaddress --from salt.exceptions import SaltCloudException, SaltCloudSystemExit, \ -- SaltInvocationError -+from salt._compat import ipaddress -+from salt.exceptions import SaltCloudException, SaltCloudSystemExit, SaltInvocationError - - # Get logging started - log = logging.getLogger(__name__) -diff --git a/salt/ext/win_inet_pton.py b/salt/ext/win_inet_pton.py -index 1204bede10..89aba14ce9 100644 ---- a/salt/ext/win_inet_pton.py -+++ b/salt/ext/win_inet_pton.py -@@ -9,7 +9,7 @@ from __future__ import absolute_import - import socket - import ctypes - import os --import ipaddress -+from salt._compat import ipaddress - import salt.ext.six as six - - -diff --git a/salt/minion.py b/salt/minion.py -index 17e11c0ebe..9c05a646ea 100644 ---- a/salt/minion.py -+++ b/salt/minion.py -@@ -26,10 +26,7 @@ from binascii import crc32 - # Import Salt Libs - # pylint: disable=import-error,no-name-in-module,redefined-builtin - from salt.ext import six --if six.PY3: -- import ipaddress --else: -- import salt.ext.ipaddress as ipaddress -+from salt._compat import ipaddress - from salt.ext.six.moves import range - from salt.utils.zeromq import zmq, ZMQDefaultLoop, install_zmq, ZMQ_VERSION_INFO - -diff --git a/salt/modules/ipset.py b/salt/modules/ipset.py -index 7047e84c29..1a0fa0044d 100644 ---- a/salt/modules/ipset.py -+++ b/salt/modules/ipset.py -@@ -13,10 +13,7 @@ from salt.ext.six.moves import map, range - import salt.utils.path - - # Import third-party libs --if six.PY3: -- import ipaddress --else: -- import salt.ext.ipaddress as ipaddress -+from salt._compat import ipaddress - - # Set up logging - log = logging.getLogger(__name__) -diff --git a/salt/modules/network.py b/salt/modules/network.py -index 92893572a6..60f586f6bc 100644 ---- a/salt/modules/network.py -+++ b/salt/modules/network.py -@@ -26,10 +26,7 @@ from salt.exceptions import CommandExecutionError - # Import 3rd-party libs - from salt.ext import six - from salt.ext.six.moves import range # pylint: disable=import-error,no-name-in-module,redefined-builtin --if six.PY3: -- import ipaddress --else: -- import salt.ext.ipaddress as ipaddress -+from salt._compat import ipaddress - - - log = logging.getLogger(__name__) -diff --git a/salt/modules/vagrant.py b/salt/modules/vagrant.py -index 0592dede55..0f518c2602 100644 ---- a/salt/modules/vagrant.py -+++ b/salt/modules/vagrant.py -@@ -39,11 +39,7 @@ import salt.utils.path - import salt.utils.stringutils - from salt.exceptions import CommandExecutionError, SaltInvocationError - import salt.ext.six as six -- --if six.PY3: -- import ipaddress --else: -- import salt.ext.ipaddress as ipaddress -+from salt._compat import ipaddress - - log = logging.getLogger(__name__) - -diff --git a/salt/utils/dns.py b/salt/utils/dns.py -index db08bcb7ac..40011016fd 100644 ---- a/salt/utils/dns.py -+++ b/salt/utils/dns.py -@@ -1029,18 +1029,13 @@ def parse_resolv(src='/etc/resolv.conf'): - try: - (directive, arg) = (line[0].lower(), line[1:]) - # Drop everything after # or ; (comments) -- arg = list(itertools.takewhile( -- lambda x: x[0] not in ('#', ';'), arg)) -- -+ arg = list(itertools.takewhile(lambda x: x[0] not in ('#', ';'), arg)) - if directive == 'nameserver': -- # Split the scope (interface) if it is present -- addr, scope = arg[0].split('%', 1) if '%' in arg[0] else (arg[0], '') -+ addr = arg[0] - try: - ip_addr = ipaddress.ip_address(addr) - version = ip_addr.version -- # Rejoin scope after address validation -- if scope: -- ip_addr = '%'.join((str(ip_addr), scope)) -+ ip_addr = str(ip_addr) - if ip_addr not in nameservers: - nameservers.append(ip_addr) - if version == 4 and ip_addr not in ip4_nameservers: -diff --git a/salt/utils/minions.py b/salt/utils/minions.py -index bb0cbaa589..f282464eee 100644 ---- a/salt/utils/minions.py -+++ b/salt/utils/minions.py -@@ -26,10 +26,7 @@ import salt.cache - from salt.ext import six - - # Import 3rd-party libs --if six.PY3: -- import ipaddress --else: -- import salt.ext.ipaddress as ipaddress -+from salt._compat import ipaddress - HAS_RANGE = False - try: - import seco.range # pylint: disable=import-error -diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py -index dd7d5b06f8..e973428add 100644 ---- a/tests/unit/grains/test_core.py -+++ b/tests/unit/grains/test_core.py -@@ -32,10 +32,7 @@ import salt.grains.core as core - - # Import 3rd-party libs - from salt.ext import six --if six.PY3: -- import ipaddress --else: -- import salt.ext.ipaddress as ipaddress -+from salt._compat import ipaddress - - log = logging.getLogger(__name__) - -diff --git a/tests/unit/modules/test_network.py b/tests/unit/modules/test_network.py -index 865f15f3e3..50fa629276 100644 ---- a/tests/unit/modules/test_network.py -+++ b/tests/unit/modules/test_network.py -@@ -20,20 +20,11 @@ from tests.support.mock import ( - ) - - # Import Salt Libs --from salt.ext import six - import salt.utils.network - import salt.utils.path - import salt.modules.network as network - from salt.exceptions import CommandExecutionError --if six.PY2: -- import salt.ext.ipaddress as ipaddress -- HAS_IPADDRESS = True --else: -- try: -- import ipaddress -- HAS_IPADDRESS = True -- except ImportError: -- HAS_IPADDRESS = False -+from salt._compat import ipaddress - - - @skipIf(NO_MOCK, NO_MOCK_REASON) -@@ -278,7 +269,7 @@ class NetworkTestCase(TestCase, LoaderModuleMockMixin): - self.assertDictEqual(network.connect('host', 'port'), - {'comment': ret, 'result': True}) - -- @skipIf(HAS_IPADDRESS is False, 'unable to import \'ipaddress\'') -+ @skipIf(not bool(ipaddress), 'unable to import \'ipaddress\'') - def test_is_private(self): - ''' - Test for Check if the given IP address is a private address -@@ -290,7 +281,7 @@ class NetworkTestCase(TestCase, LoaderModuleMockMixin): - return_value=True): - self.assertTrue(network.is_private('::1')) - -- @skipIf(HAS_IPADDRESS is False, 'unable to import \'ipaddress\'') -+ @skipIf(not bool(ipaddress), 'unable to import \'ipaddress\'') - def test_is_loopback(self): - ''' - Test for Check if the given IP address is a loopback address -- -2.19.0 +2.20.1 diff --git a/fix-issue-2068-test.patch b/fix-issue-2068-test.patch index c70d265..00cb130 100644 --- a/fix-issue-2068-test.patch +++ b/fix-issue-2068-test.patch @@ -1,4 +1,4 @@ -From 2916f2f3e7c6af07148863281ffaf07df21f21da Mon Sep 17 00:00:00 2001 +From 3be2bb0043f15af468f1db33b1aa1cc6f2e5797d Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Wed, 9 Jan 2019 16:08:19 +0100 Subject: [PATCH] Fix issue #2068 test @@ -13,7 +13,7 @@ Minor update: more correct is-dict check. 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/salt/state.py b/salt/state.py -index b4b2a00601..815ebaec24 100644 +index 91985c8edc..01ec1faf8b 100644 --- a/salt/state.py +++ b/salt/state.py @@ -25,6 +25,7 @@ import traceback @@ -24,7 +24,7 @@ index b4b2a00601..815ebaec24 100644 # Import salt libs import salt.loader -@@ -2743,16 +2744,18 @@ class State(object): +@@ -2776,16 +2777,18 @@ class State(object): ''' for chunk in high: state = high[chunk] diff --git a/fix-latin1-encoding-problems-on-file-module-bsc-1116.patch b/fix-latin1-encoding-problems-on-file-module-bsc-1116.patch deleted file mode 100644 index 5f9c966..0000000 --- a/fix-latin1-encoding-problems-on-file-module-bsc-1116.patch +++ /dev/null @@ -1,941 +0,0 @@ -From 140388e51e5b5b7ee33b776269bce67046cce32f Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= - -Date: Tue, 4 Dec 2018 16:16:18 +0000 -Subject: [PATCH] Fix latin1 encoding problems on file module - (bsc#1116837) - -_get_line_indent renamed to _set_line_indent - -_regex_to_static refactored to work on lists - -line function refactored to work on list - -Added _set_line_eol and _get_eol functions - -Setting end of line - -Make tests green - -test_line_insert_end fixed - -/sr.* pattern should raise exception - -file.line function refactored - -Make integration green. Added test for mode ensure insert before first line - -Fixed file permissions - -Removed regex compilation - -Comprehensions converting to unicode replaced by salt.utils.data.decode_list - -Empty match on delete or replace not causing IndexError exception - -List comprehension replaced - -Added comments - -Add get_diff to salt.utils.stringutils - -Make to_unicode/to_str/to_bytes helpers attempt latin-1 - -Also allow for multiple encodings to be passed - -Use new get_diff helper in file module - -Use BASE_FILES instead of redundant STATE_DIR - -Add integration test for latin-1 file diffs - -PY3 scoping fix - -In PY3 the caught exceptions now drop out of scope when leaving the for -loop. - -Add unit test for latin-1 fallback, multi-encoding - -Skip pylint false-positives - -Fix incorrect use of __salt__ when __utils__ is needed - -Add stringutils.get_diff to mocks - -Only try latin-1 from get_diff instead of by default - -Fix to_unicode test - -Since latin-1 is not being automatically decoded, we need to explicitly -pass it on the test. - -Revert "Use BASE_FILES instead of redundant STATE_DIR" - -This reverts commit ba524c81b6ae6091259157cec1259f5a7fb776c0. ---- - salt/modules/file.py | 224 +++++++++--------- - salt/modules/win_file.py | 14 +- - salt/utils/stringutils.py | 118 ++++++--- - .../files/file/base/issue-48777/new.html | 5 + - .../files/file/base/issue-48777/old.html | 4 + - tests/integration/states/test_file.py | 23 ++ - tests/unit/modules/test_file.py | 102 +++++++- - tests/unit/utils/test_stringutils.py | 14 ++ - 8 files changed, 348 insertions(+), 156 deletions(-) - create mode 100644 tests/integration/files/file/base/issue-48777/new.html - create mode 100644 tests/integration/files/file/base/issue-48777/old.html - -diff --git a/salt/modules/file.py b/salt/modules/file.py -index 1b4b7e0e46..1ad0fef1ea 100644 ---- a/salt/modules/file.py -+++ b/salt/modules/file.py -@@ -12,7 +12,6 @@ from __future__ import absolute_import, print_function, unicode_literals - - # Import python libs - import datetime --import difflib - import errno - import fileinput - import fnmatch -@@ -61,6 +60,7 @@ import salt.utils.stringutils - import salt.utils.templates - import salt.utils.url - import salt.utils.user -+import salt.utils.data - from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError, get_error_message as _get_error_message - from salt.utils.files import HASHES, HASHES_REVMAP - -@@ -1570,7 +1570,7 @@ def comment_line(path, - check_perms(path, None, pre_user, pre_group, pre_mode) - - # Return a diff using the two dictionaries -- return ''.join(difflib.unified_diff(orig_file, new_file)) -+ return __utils__['stringutils.get_diff'](orig_file, new_file) - - - def _get_flags(flags): -@@ -1722,18 +1722,19 @@ def _regex_to_static(src, regex): - return None - - try: -- src = re.search(regex, src, re.M) -+ compiled = re.compile(regex, re.DOTALL) -+ src = [line for line in src if compiled.search(line) or line.count(regex)] - except Exception as ex: - raise CommandExecutionError("{0}: '{1}'".format(_get_error_message(ex), regex)) - -- return src and src.group().rstrip('\r') or regex -+ return src and src or [] - - --def _assert_occurrence(src, probe, target, amount=1): -+def _assert_occurrence(probe, target, amount=1): - ''' - Raise an exception, if there are different amount of specified occurrences in src. - ''' -- occ = src.count(probe) -+ occ = len(probe) - if occ > amount: - msg = 'more than' - elif occ < amount: -@@ -1749,7 +1750,7 @@ def _assert_occurrence(src, probe, target, amount=1): - return occ - - --def _get_line_indent(src, line, indent): -+def _set_line_indent(src, line, indent): - ''' - Indent the line with the source line. - ''' -@@ -1762,7 +1763,36 @@ def _get_line_indent(src, line, indent): - break - idt.append(c) - -- return ''.join(idt) + line.strip() -+ return ''.join(idt) + line.lstrip() -+ -+ -+def _get_eol(line): -+ match = re.search('((? -1 and not file_line == content) else file_line) -- for file_line in body.split(os.linesep)]) -+ body = [] -+ elif mode == 'delete' and match: -+ body = [line for line in body if line != match[0]] -+ elif mode == 'replace' and match: -+ idx = body.index(match[0]) -+ file_line = body.pop(idx) -+ body.insert(idx, _set_line_indent(file_line, content, indent)) - elif mode == 'insert': - if not location and not before and not after: - raise CommandExecutionError('On insert must be defined either "location" or "before/after" conditions.') - - if not location: - if before and after: -- _assert_occurrence(body, before, 'before') -- _assert_occurrence(body, after, 'after') -+ _assert_occurrence(before, 'before') -+ _assert_occurrence(after, 'after') -+ - out = [] -- lines = body.split(os.linesep) - in_range = False -- for line in lines: -- if line.find(after) > -1: -+ for line in body: -+ if line == after[0]: - in_range = True -- elif line.find(before) > -1 and in_range: -- out.append(_get_line_indent(line, content, indent)) -+ elif line == before[0] and in_range: -+ cnd = _set_line_indent(line, content, indent) -+ out.append(cnd) - out.append(line) -- body = os.linesep.join(out) -+ body = out - - if before and not after: -- _assert_occurrence(body, before, 'before') -- out = [] -- lines = body.split(os.linesep) -- for idx in range(len(lines)): -- _line = lines[idx] -- if _line.find(before) > -1: -- cnd = _get_line_indent(_line, content, indent) -- if not idx or (idx and _starts_till(lines[idx - 1], cnd) < 0): # Job for replace instead -- out.append(cnd) -- out.append(_line) -- body = os.linesep.join(out) -+ _assert_occurrence(before, 'before') -+ -+ idx = body.index(before[0]) -+ body = _insert_line_before(idx, body, content, indent) - - elif after and not before: -- _assert_occurrence(body, after, 'after') -- out = [] -- lines = body.split(os.linesep) -- for idx, _line in enumerate(lines): -- out.append(_line) -- cnd = _get_line_indent(_line, content, indent) -- # No duplicates or append, if "after" is the last line -- if (_line.find(after) > -1 and -- (lines[((idx + 1) < len(lines)) and idx + 1 or idx].strip() != cnd or -- idx + 1 == len(lines))): -- out.append(cnd) -- body = os.linesep.join(out) -+ _assert_occurrence(after, 'after') -+ -+ idx = body.index(after[0]) -+ body = _insert_line_after(idx, body, content, indent) - - else: - if location == 'start': -- body = os.linesep.join((content, body)) -+ if body: -+ body.insert(0, _set_line_eol(body[0], content)) -+ else: -+ body.append(content + os.linesep) - elif location == 'end': -- body = os.linesep.join((body, _get_line_indent(body[-1], content, indent) if body else content)) -+ body.append(_set_line_indent(body[-1], content, indent) if body else content) - - elif mode == 'ensure': -- after = after and after.strip() -- before = before and before.strip() - - if before and after: -- _assert_occurrence(body, before, 'before') -- _assert_occurrence(body, after, 'after') -+ _assert_occurrence(before, 'before') -+ _assert_occurrence(after, 'after') - -- is_there = bool(body.count(content)) -+ is_there = bool([l for l in body if l.count(content)]) - if not is_there: -- out = [] -- body = body.split(os.linesep) -- for idx, line in enumerate(body): -- out.append(line) -- if line.find(content) > -1: -- is_there = True -- if not is_there: -- if idx < (len(body) - 1) and line.find(after) > -1 and body[idx + 1].find(before) > -1: -- out.append(content) -- elif line.find(after) > -1: -- raise CommandExecutionError('Found more than one line between ' -- 'boundaries "before" and "after".') -- body = os.linesep.join(out) -+ idx = body.index(after[0]) -+ if idx < (len(body) - 1) and body[idx + 1] == before[0]: -+ cnd = _set_line_indent(body[idx], content, indent) -+ body.insert(idx + 1, cnd) -+ else: -+ raise CommandExecutionError('Found more than one line between ' -+ 'boundaries "before" and "after".') - - elif before and not after: -- _assert_occurrence(body, before, 'before') -- body = body.split(os.linesep) -- out = [] -- for idx in range(len(body)): -- if body[idx].find(before) > -1: -- prev = (idx > 0 and idx or 1) - 1 -- out.append(_get_line_indent(body[idx], content, indent)) -- if _starts_till(out[prev], content) > -1: -- del out[prev] -- out.append(body[idx]) -- body = os.linesep.join(out) -+ _assert_occurrence(before, 'before') -+ -+ idx = body.index(before[0]) -+ body = _insert_line_before(idx, body, content, indent) - - elif not before and after: -- _assert_occurrence(body, after, 'after') -- body = body.split(os.linesep) -- skip = None -- out = [] -- for idx in range(len(body)): -- if skip != body[idx]: -- out.append(body[idx]) -- -- if body[idx].find(after) > -1: -- next_line = idx + 1 < len(body) and body[idx + 1] or None -- if next_line is not None and _starts_till(next_line, content) > -1: -- skip = next_line -- out.append(_get_line_indent(body[idx], content, indent)) -- body = os.linesep.join(out) -+ _assert_occurrence(after, 'after') -+ -+ idx = body.index(after[0]) -+ body = _insert_line_after(idx, body, content, indent) - - else: - raise CommandExecutionError("Wrong conditions? " - "Unable to ensure line without knowing " - "where to put it before and/or after.") - -- changed = body_before != hashlib.sha256(salt.utils.stringutils.to_bytes(body)).hexdigest() -+ if body: -+ for idx, line in enumerate(body): -+ if not _get_eol(line) and idx+1 < len(body): -+ prev = idx and idx-1 or 1 -+ body[idx] = _set_line_eol(body[prev], line) -+ # We do not need empty line at the end anymore -+ if '' == body[-1]: -+ body.pop() -+ -+ changed = body_before != hashlib.sha256(salt.utils.stringutils.to_bytes(''.join(body))).hexdigest() - - if backup and changed and __opts__['test'] is False: - try: -@@ -2032,20 +2040,15 @@ def line(path, content=None, match=None, mode=None, location=None, - if changed: - if show_changes: - with salt.utils.files.fopen(path, 'r') as fp_: -- path_content = [salt.utils.stringutils.to_unicode(x) -- for x in fp_.read().splitlines(True)] -- changes_diff = ''.join(difflib.unified_diff( -- path_content, -- [salt.utils.stringutils.to_unicode(x) -- for x in body.splitlines(True)] -- )) -+ path_content = salt.utils.data.decode_list(fp_.read().splitlines(True)) -+ changes_diff = __utils__['stringutils.get_diff'](path_content, body) - if __opts__['test'] is False: - fh_ = None - try: - # Make sure we match the file mode from salt.utils.files.fopen - mode = 'wb' if six.PY2 and salt.utils.platform.is_windows() else 'w' - fh_ = salt.utils.atomicfile.atomic_open(path, mode) -- fh_.write(body) -+ fh_.write(''.join(body)) - finally: - if fh_: - fh_.close() -@@ -2419,18 +2422,15 @@ def replace(path, - if not dry_run and not salt.utils.platform.is_windows(): - check_perms(path, None, pre_user, pre_group, pre_mode) - -- def get_changes(): -- orig_file_as_str = [salt.utils.stringutils.to_unicode(x) for x in orig_file] -- new_file_as_str = [salt.utils.stringutils.to_unicode(x) for x in new_file] -- return ''.join(difflib.unified_diff(orig_file_as_str, new_file_as_str)) -+ differences = __utils__['stringutils.get_diff'](orig_file, new_file) - - if show_changes: -- return get_changes() -+ return differences - - # We may have found a regex line match but don't need to change the line - # (for situations where the pattern also matches the repl). Revert the - # has_changes flag to False if the final result is unchanged. -- if not get_changes(): -+ if not differences: - has_changes = False - - return has_changes -@@ -2684,7 +2684,7 @@ def blockreplace(path, - ) - - if block_found: -- diff = ''.join(difflib.unified_diff(orig_file, new_file)) -+ diff = __utils__['stringutils.get_diff'](orig_file, new_file) - has_changes = diff is not '' - if has_changes and not dry_run: - # changes detected -@@ -5003,11 +5003,7 @@ def get_diff(file1, - else: - if show_filenames: - args.extend(files) -- ret = ''.join( -- difflib.unified_diff( -- *salt.utils.data.decode(args) -- ) -- ) -+ ret = __utils__['stringutils.get_diff'](*args) - return ret - return '' - -diff --git a/salt/modules/win_file.py b/salt/modules/win_file.py -index d321bd538e..0f5c908c8f 100644 ---- a/salt/modules/win_file.py -+++ b/salt/modules/win_file.py -@@ -58,8 +58,9 @@ from salt.modules.file import (check_hash, # pylint: disable=W0611 - RE_FLAG_TABLE, blockreplace, prepend, seek_read, seek_write, rename, - lstat, path_exists_glob, write, pardir, join, HASHES, HASHES_REVMAP, - comment, uncomment, _add_flags, comment_line, _regex_to_static, -- _get_line_indent, apply_template_on_contents, dirname, basename, -- list_backups_dir, _assert_occurrence, _starts_till) -+ _set_line_indent, apply_template_on_contents, dirname, basename, -+ list_backups_dir, _assert_occurrence, _starts_till, _set_line_eol, _get_eol, -+ _insert_line_after, _insert_line_before) - from salt.modules.file import normpath as normpath_ - - from salt.utils.functools import namespaced_function as _namespaced_function -@@ -116,8 +117,9 @@ def __virtual__(): - global blockreplace, prepend, seek_read, seek_write, rename, lstat - global write, pardir, join, _add_flags, apply_template_on_contents - global path_exists_glob, comment, uncomment, _mkstemp_copy -- global _regex_to_static, _get_line_indent, dirname, basename -+ global _regex_to_static, _set_line_indent, dirname, basename - global list_backups_dir, normpath_, _assert_occurrence, _starts_till -+ global _insert_line_before, _insert_line_after, _set_line_eol, _get_eol - - replace = _namespaced_function(replace, globals()) - search = _namespaced_function(search, globals()) -@@ -172,7 +174,11 @@ def __virtual__(): - uncomment = _namespaced_function(uncomment, globals()) - comment_line = _namespaced_function(comment_line, globals()) - _regex_to_static = _namespaced_function(_regex_to_static, globals()) -- _get_line_indent = _namespaced_function(_get_line_indent, globals()) -+ _set_line_indent = _namespaced_function(_set_line_indent, globals()) -+ _set_line_eol = _namespaced_function(_set_line_eol, globals()) -+ _get_eol = _namespaced_function(_get_eol, globals()) -+ _insert_line_after = _namespaced_function(_insert_line_after, globals()) -+ _insert_line_before = _namespaced_function(_insert_line_before, globals()) - _mkstemp_copy = _namespaced_function(_mkstemp_copy, globals()) - _add_flags = _namespaced_function(_add_flags, globals()) - apply_template_on_contents = _namespaced_function(apply_template_on_contents, globals()) -diff --git a/salt/utils/stringutils.py b/salt/utils/stringutils.py -index 2909d4aebe..f84fda70a5 100644 ---- a/salt/utils/stringutils.py -+++ b/salt/utils/stringutils.py -@@ -6,6 +6,7 @@ Functions for manipulating or otherwise processing strings - # Import Python libs - from __future__ import absolute_import, print_function, unicode_literals - import base64 -+import difflib - import errno - import fnmatch - import logging -@@ -31,21 +32,32 @@ def to_bytes(s, encoding=None, errors='strict'): - Given bytes, bytearray, str, or unicode (python 2), return bytes (str for - python 2) - ''' -+ if encoding is None: -+ # Try utf-8 first, and fall back to detected encoding -+ encoding = ('utf-8', __salt_system_encoding__) -+ if not isinstance(encoding, (tuple, list)): -+ encoding = (encoding,) -+ -+ if not encoding: -+ raise ValueError('encoding cannot be empty') -+ -+ exc = None - if six.PY3: - if isinstance(s, bytes): - return s - if isinstance(s, bytearray): - return bytes(s) - if isinstance(s, six.string_types): -- if encoding: -- return s.encode(encoding, errors) -- else: -+ for enc in encoding: - try: -- # Try UTF-8 first -- return s.encode('utf-8', errors) -- except UnicodeEncodeError: -- # Fall back to detected encoding -- return s.encode(__salt_system_encoding__, errors) -+ return s.encode(enc, errors) -+ except UnicodeEncodeError as err: -+ exc = err -+ continue -+ # The only way we get this far is if a UnicodeEncodeError was -+ # raised, otherwise we would have already returned (or raised some -+ # other exception). -+ raise exc # pylint: disable=raising-bad-type - raise TypeError('expected bytes, bytearray, or str') - else: - return to_str(s, encoding, errors) -@@ -61,35 +73,48 @@ def to_str(s, encoding=None, errors='strict', normalize=False): - except TypeError: - return s - -+ if encoding is None: -+ # Try utf-8 first, and fall back to detected encoding -+ encoding = ('utf-8', __salt_system_encoding__) -+ if not isinstance(encoding, (tuple, list)): -+ encoding = (encoding,) -+ -+ if not encoding: -+ raise ValueError('encoding cannot be empty') -+ - # This shouldn't be six.string_types because if we're on PY2 and we already - # have a string, we should just return it. - if isinstance(s, str): - return _normalize(s) -+ -+ exc = None - if six.PY3: - if isinstance(s, (bytes, bytearray)): -- if encoding: -- return _normalize(s.decode(encoding, errors)) -- else: -+ for enc in encoding: - try: -- # Try UTF-8 first -- return _normalize(s.decode('utf-8', errors)) -- except UnicodeDecodeError: -- # Fall back to detected encoding -- return _normalize(s.decode(__salt_system_encoding__, errors)) -+ return _normalize(s.decode(enc, errors)) -+ except UnicodeDecodeError as err: -+ exc = err -+ continue -+ # The only way we get this far is if a UnicodeDecodeError was -+ # raised, otherwise we would have already returned (or raised some -+ # other exception). -+ raise exc # pylint: disable=raising-bad-type - raise TypeError('expected str, bytes, or bytearray not {}'.format(type(s))) - else: - if isinstance(s, bytearray): - return str(s) # future lint: disable=blacklisted-function - if isinstance(s, unicode): # pylint: disable=incompatible-py3-code,undefined-variable -- if encoding: -- return _normalize(s).encode(encoding, errors) -- else: -+ for enc in encoding: - try: -- # Try UTF-8 first -- return _normalize(s).encode('utf-8', errors) -- except UnicodeEncodeError: -- # Fall back to detected encoding -- return _normalize(s).encode(__salt_system_encoding__, errors) -+ return _normalize(s).encode(enc, errors) -+ except UnicodeEncodeError as err: -+ exc = err -+ continue -+ # The only way we get this far is if a UnicodeDecodeError was -+ # raised, otherwise we would have already returned (or raised some -+ # other exception). -+ raise exc # pylint: disable=raising-bad-type - raise TypeError('expected str, bytearray, or unicode') - - -@@ -100,6 +125,16 @@ def to_unicode(s, encoding=None, errors='strict', normalize=False): - def _normalize(s): - return unicodedata.normalize('NFC', s) if normalize else s - -+ if encoding is None: -+ # Try utf-8 first, and fall back to detected encoding -+ encoding = ('utf-8', __salt_system_encoding__) -+ if not isinstance(encoding, (tuple, list)): -+ encoding = (encoding,) -+ -+ if not encoding: -+ raise ValueError('encoding cannot be empty') -+ -+ exc = None - if six.PY3: - if isinstance(s, str): - return _normalize(s) -@@ -113,15 +148,16 @@ def to_unicode(s, encoding=None, errors='strict', normalize=False): - if isinstance(s, unicode): # pylint: disable=incompatible-py3-code - return _normalize(s) - elif isinstance(s, (str, bytearray)): -- if encoding: -- return _normalize(s.decode(encoding, errors)) -- else: -+ for enc in encoding: - try: -- # Try UTF-8 first -- return _normalize(s.decode('utf-8', errors)) -- except UnicodeDecodeError: -- # Fall back to detected encoding -- return _normalize(s.decode(__salt_system_encoding__, errors)) -+ return _normalize(s.decode(enc, errors)) -+ except UnicodeDecodeError as err: -+ exc = err -+ continue -+ # The only way we get this far is if a UnicodeDecodeError was -+ # raised, otherwise we would have already returned (or raised some -+ # other exception). -+ raise exc # pylint: disable=raising-bad-type - raise TypeError('expected str or bytearray') - - -@@ -513,3 +549,21 @@ def get_context(template, line, num_lines=5, marker=None): - buf[error_line_in_context] += marker - - return '---\n{0}\n---'.format('\n'.join(buf)) -+ -+ -+def get_diff(a, b, *args, **kwargs): -+ ''' -+ Perform diff on two iterables containing lines from two files, and return -+ the diff as as string. Lines are normalized to str types to avoid issues -+ with unicode on PY2. -+ ''' -+ encoding = ('utf-8', 'latin-1', __salt_system_encoding__) -+ # Late import to avoid circular import -+ import salt.utils.data -+ return ''.join( -+ difflib.unified_diff( -+ salt.utils.data.decode_list(a, encoding=encoding), -+ salt.utils.data.decode_list(b, encoding=encoding), -+ *args, **kwargs -+ ) -+ ) -diff --git a/tests/integration/files/file/base/issue-48777/new.html b/tests/integration/files/file/base/issue-48777/new.html -new file mode 100644 -index 0000000000..2d5c1ae744 ---- /dev/null -+++ b/tests/integration/files/file/base/issue-48777/new.html -@@ -0,0 +1,5 @@ -+ -+ -+rksmrgs -+ -+ -diff --git a/tests/integration/files/file/base/issue-48777/old.html b/tests/integration/files/file/base/issue-48777/old.html -new file mode 100644 -index 0000000000..7879e1ce9f ---- /dev/null -+++ b/tests/integration/files/file/base/issue-48777/old.html -@@ -0,0 +1,4 @@ -+ -+ -+ -+ -diff --git a/tests/integration/states/test_file.py b/tests/integration/states/test_file.py -index 9064ba7cc1..30ad39de6b 100644 ---- a/tests/integration/states/test_file.py -+++ b/tests/integration/states/test_file.py -@@ -656,6 +656,29 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin): - self.assertIn( - 'does not exist', ret['comment']) - -+ def test_managed_latin1_diff(self): -+ ''' -+ Tests that latin-1 file contents are represented properly in the diff -+ ''' -+ name = os.path.join(TMP, 'local_latin1_diff') -+ # Lay down the initial file -+ ret = self.run_state( -+ 'file.managed', -+ name=name, -+ source='salt://issue-48777/old.html') -+ ret = ret[next(iter(ret))] -+ assert ret['result'] is True, ret -+ -+ # Replace it with the new file and check the diff -+ ret = self.run_state( -+ 'file.managed', -+ name=name, -+ source='salt://issue-48777/new.html') -+ ret = ret[next(iter(ret))] -+ assert ret['result'] is True, ret -+ diff_lines = ret['changes']['diff'].split('\n') -+ assert '+räksmörgås' in diff_lines, diff_lines -+ - def test_directory(self): - ''' - file.directory -diff --git a/tests/unit/modules/test_file.py b/tests/unit/modules/test_file.py -index b157a577e5..66acaf9cb6 100644 ---- a/tests/unit/modules/test_file.py -+++ b/tests/unit/modules/test_file.py -@@ -57,7 +57,10 @@ class FileReplaceTestCase(TestCase, LoaderModuleMockMixin): - 'grains': {}, - }, - '__grains__': {'kernel': 'Linux'}, -- '__utils__': {'files.is_text': MagicMock(return_value=True)}, -+ '__utils__': { -+ 'files.is_text': MagicMock(return_value=True), -+ 'stringutils.get_diff': salt.utils.stringutils.get_diff, -+ }, - } - } - -@@ -235,7 +238,12 @@ class FileBlockReplaceTestCase(TestCase, LoaderModuleMockMixin): - 'grains': {}, - }, - '__grains__': {'kernel': 'Linux'}, -- '__utils__': {'files.is_text': MagicMock(return_value=True)}, -+ '__utils__': { -+ 'files.is_binary': MagicMock(return_value=False), -+ 'files.is_text': MagicMock(return_value=True), -+ 'files.get_encoding': MagicMock(return_value='utf-8'), -+ 'stringutils.get_diff': salt.utils.stringutils.get_diff, -+ }, - } - } - -@@ -528,7 +536,10 @@ class FileModuleTestCase(TestCase, LoaderModuleMockMixin): - 'cachedir': 'tmp', - 'grains': {}, - }, -- '__grains__': {'kernel': 'Linux'} -+ '__grains__': {'kernel': 'Linux'}, -+ '__utils__': { -+ 'stringutils.get_diff': salt.utils.stringutils.get_diff, -+ }, - } - } - -@@ -907,7 +918,10 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin): - 'cachedir': 'tmp', - 'grains': {}, - }, -- '__grains__': {'kernel': 'Linux'} -+ '__grains__': {'kernel': 'Linux'}, -+ '__utils__': { -+ 'stringutils.get_diff': salt.utils.stringutils.get_diff, -+ }, - } - } - -@@ -930,6 +944,29 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin): - self.assertIn('Cannot find text to {0}'.format(mode), - _log.warning.call_args_list[0][0][0]) - -+ @patch('os.path.realpath', MagicMock()) -+ @patch('os.path.isfile', MagicMock(return_value=True)) -+ @patch('os.stat', MagicMock()) -+ def test_line_delete_no_match(self): -+ ''' -+ Tests that when calling file.line with ``mode=delete``, -+ with not matching pattern to delete returns False -+ :return: -+ ''' -+ file_content = os.linesep.join([ -+ 'file_roots:', -+ ' base:', -+ ' - /srv/salt', -+ ' - /srv/custom' -+ ]) -+ match = 'not matching' -+ for mode in ['delete', 'replace']: -+ files_fopen = mock_open(read_data=file_content) -+ with patch('salt.utils.files.fopen', files_fopen): -+ atomic_opener = mock_open() -+ with patch('salt.utils.atomicfile.atomic_open', atomic_opener): -+ self.assertFalse(filemod.line('foo', content='foo', match=match, mode=mode)) -+ - @patch('os.path.realpath', MagicMock()) - @patch('os.path.isfile', MagicMock(return_value=True)) - def test_line_modecheck_failure(self): -@@ -1082,7 +1119,7 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin): - ' - /srv/sugar' - ]) - cfg_content = '- /srv/custom' -- for before_line in ['/srv/salt', '/srv/sa.*t', '/sr.*']: -+ for before_line in ['/srv/salt', '/srv/sa.*t']: - files_fopen = mock_open(read_data=file_content) - with patch('salt.utils.files.fopen', files_fopen): - atomic_opener = mock_open() -@@ -1092,6 +1129,32 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin): - self.assertEqual(atomic_opener().write.call_args_list[0][0][0], - file_modified) - -+ @patch('os.path.realpath', MagicMock()) -+ @patch('os.path.isfile', MagicMock(return_value=True)) -+ @patch('os.stat', MagicMock()) -+ def test_line_assert_exception_pattern(self): -+ ''' -+ Test for file.line for exception on insert with too general pattern. -+ -+ :return: -+ ''' -+ file_content = os.linesep.join([ -+ 'file_roots:', -+ ' base:', -+ ' - /srv/salt', -+ ' - /srv/sugar' -+ ]) -+ cfg_content = '- /srv/custom' -+ for before_line in ['/sr.*']: -+ files_fopen = mock_open(read_data=file_content) -+ with patch('salt.utils.files.fopen', files_fopen): -+ atomic_opener = mock_open() -+ with patch('salt.utils.atomicfile.atomic_open', atomic_opener): -+ with self.assertRaises(CommandExecutionError) as cm: -+ filemod.line('foo', content=cfg_content, before=before_line, mode='insert') -+ self.assertEqual(cm.exception.strerror, -+ 'Found more than expected occurrences in "before" expression') -+ - @patch('os.path.realpath', MagicMock()) - @patch('os.path.isfile', MagicMock(return_value=True)) - @patch('os.stat', MagicMock()) -@@ -1179,7 +1242,7 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin): - ' base:', - ' - /srv/salt', - ' - /srv/sugar', -- cfg_content -+ ' ' + cfg_content - ]) - files_fopen = mock_open(read_data=file_content) - with patch('salt.utils.files.fopen', files_fopen): -@@ -1273,6 +1336,33 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin): - self.assertEqual(atomic_opener().write.call_args_list[0][0][0], - file_modified) - -+ @patch('os.path.realpath', MagicMock()) -+ @patch('os.path.isfile', MagicMock(return_value=True)) -+ @patch('os.stat', MagicMock()) -+ def test_line_insert_ensure_before_first_line(self): -+ ''' -+ Test for file.line for insertion ensuring the line is before first line -+ :return: -+ ''' -+ cfg_content = '#!/bin/bash' -+ file_content = os.linesep.join([ -+ '/etc/init.d/someservice restart', -+ 'exit 0' -+ ]) -+ file_modified = os.linesep.join([ -+ cfg_content, -+ '/etc/init.d/someservice restart', -+ 'exit 0' -+ ]) -+ files_fopen = mock_open(read_data=file_content) -+ with patch('salt.utils.files.fopen', files_fopen): -+ atomic_opener = mock_open() -+ with patch('salt.utils.atomicfile.atomic_open', atomic_opener): -+ filemod.line('foo', content=cfg_content, before='/etc/init.d/someservice restart', mode='ensure') -+ self.assertEqual(len(atomic_opener().write.call_args_list), 1) -+ self.assertEqual(atomic_opener().write.call_args_list[0][0][0], -+ file_modified) -+ - @patch('os.path.realpath', MagicMock()) - @patch('os.path.isfile', MagicMock(return_value=True)) - @patch('os.stat', MagicMock()) -diff --git a/tests/unit/utils/test_stringutils.py b/tests/unit/utils/test_stringutils.py -index 9c8fd4f7c3..852f558793 100644 ---- a/tests/unit/utils/test_stringutils.py -+++ b/tests/unit/utils/test_stringutils.py -@@ -18,6 +18,9 @@ STR = BYTES = UNICODE.encode('utf-8') - # code points. Do not modify it. - EGGS = '\u044f\u0438\u0306\u0446\u0430' - -+LATIN1_UNICODE = 'räksmörgås' -+LATIN1_BYTES = LATIN1_UNICODE.encode('latin-1') -+ - - class StringutilsTestCase(TestCase): - def test_contains_whitespace(self): -@@ -134,6 +137,13 @@ class StringutilsTestCase(TestCase): - 'яйца' - ) - -+ self.assertEqual( -+ salt.utils.stringutils.to_unicode( -+ LATIN1_BYTES, encoding='latin-1' -+ ), -+ LATIN1_UNICODE -+ ) -+ - if six.PY3: - self.assertEqual(salt.utils.stringutils.to_unicode('plugh'), 'plugh') - self.assertEqual(salt.utils.stringutils.to_unicode('áéíóúý'), 'áéíóúý') -@@ -150,6 +160,10 @@ class StringutilsTestCase(TestCase): - with patch.object(builtins, '__salt_system_encoding__', 'CP1252'): - self.assertEqual(salt.utils.stringutils.to_unicode('Ψ'.encode('utf-8')), 'Ψ') - -+ def test_to_unicode_multi_encoding(self): -+ result = salt.utils.stringutils.to_unicode(LATIN1_BYTES, encoding=('utf-8', 'latin1')) -+ assert result == LATIN1_UNICODE -+ - def test_build_whitespace_split_regex(self): - expected_regex = '(?m)^(?:[\\s]+)?Lorem(?:[\\s]+)?ipsum(?:[\\s]+)?dolor(?:[\\s]+)?sit(?:[\\s]+)?amet\\,' \ - '(?:[\\s]+)?$' --- -2.17.1 - - diff --git a/fix-mine.get-not-returning-data-workaround-for-48020.patch b/fix-mine.get-not-returning-data-workaround-for-48020.patch deleted file mode 100644 index 5d36f29..0000000 --- a/fix-mine.get-not-returning-data-workaround-for-48020.patch +++ /dev/null @@ -1,34 +0,0 @@ -From f8c0811c3a05ef334eef1943a906fe01b13c1afc Mon Sep 17 00:00:00 2001 -From: Federico Ceratto -Date: Wed, 25 Jul 2018 10:33:09 +0000 -Subject: [PATCH] Fix mine.get not returning data (Workaround for #48020) - ---- - salt/utils/minions.py | 8 ++++---- - 1 file changed, 4 insertions(+), 4 deletions(-) - -diff --git a/salt/utils/minions.py b/salt/utils/minions.py -index c3acc6ba90..bb0cbaa589 100644 ---- a/salt/utils/minions.py -+++ b/salt/utils/minions.py -@@ -239,12 +239,12 @@ class CkMinions(object): - Retreive complete minion list from PKI dir. - Respects cache if configured - ''' -- if self.opts.get('__role') == 'master' and self.opts.get('__cli') == 'salt-run': -- # Compiling pillar directly on the master, just return the master's -- # ID as that is the only one that is available. -- return [self.opts['id']] - minions = [] - pki_cache_fn = os.path.join(self.opts['pki_dir'], self.acc, '.key_cache') -+ try: -+ os.makedirs(os.path.dirname(pki_cache_fn)) -+ except OSError: -+ pass - try: - if self.opts['key_cache'] and os.path.exists(pki_cache_fn): - log.debug('Returning cached minion list') --- -2.17.1 - - diff --git a/fix-unboundlocalerror-in-file.get_diff.patch b/fix-unboundlocalerror-in-file.get_diff.patch deleted file mode 100644 index 5720bcf..0000000 --- a/fix-unboundlocalerror-in-file.get_diff.patch +++ /dev/null @@ -1,32 +0,0 @@ -From 854ffed98894b8aa8b51973c0ba13fb75093e715 Mon Sep 17 00:00:00 2001 -From: Erik Johnson -Date: Mon, 30 Apr 2018 10:25:40 -0500 -Subject: [PATCH] Fix UnboundLocalError in file.get_diff - -This was only in 2018.3 head and not part of a release - -Add unit test for file.get_diff - -Use a lambda instead of defining a one-line function ---- - salt/modules/file.py | 3 ++- - 1 file changed, 2 insertions(+), 1 deletion(-) - -diff --git a/salt/modules/file.py b/salt/modules/file.py -index 95bca7fb1b..1b4b7e0e46 100644 ---- a/salt/modules/file.py -+++ b/salt/modules/file.py -@@ -5008,7 +5008,8 @@ def get_diff(file1, - *salt.utils.data.decode(args) - ) - ) -- return ret -+ return ret -+ return '' - - - def manage_file(name, --- -2.13.7 - - diff --git a/fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch b/fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch index 3aa3ba5..3f21b30 100644 --- a/fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch +++ b/fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch @@ -1,4 +1,4 @@ -From 997edb18b81d73933324b299da36474c24630b42 Mon Sep 17 00:00:00 2001 +From 3b5803d31a93d2f619246d48691f52f6c65d52ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Mon, 25 Jun 2018 13:06:40 +0100 @@ -10,102 +10,14 @@ Add unit test coverage for multiple version packages on Zypper Fix '_find_remove_targets' after aligning Zypper with pkg state --- - salt/modules/zypper.py | 57 +++++++++++++--------- - salt/states/pkg.py | 21 -------- - tests/unit/modules/test_zypper.py | 100 +++++++++++++++++++++++++------------- - 3 files changed, 102 insertions(+), 76 deletions(-) + salt/states/pkg.py | 21 --------------------- + 1 file changed, 21 deletions(-) -diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py -index 05ba3d86c9..4689f84926 100644 ---- a/salt/modules/zypper.py -+++ b/salt/modules/zypper.py -@@ -38,6 +38,7 @@ import salt.utils.files - import salt.utils.functools - import salt.utils.path - import salt.utils.pkg -+import salt.utils.pkg.rpm - import salt.utils.stringutils - import salt.utils.systemd - from salt.utils.versions import LooseVersion -@@ -714,24 +715,44 @@ def list_pkgs(versions_as_list=False, **kwargs): - contextkey = 'pkg.list_pkgs' - - if contextkey not in __context__: -- -- cmd = ['rpm', '-qa', '--queryformat', ( -- "%{NAME}_|-%{VERSION}_|-%{RELEASE}_|-%{ARCH}_|-" -- "%|EPOCH?{%{EPOCH}}:{}|_|-%{INSTALLTIME}\\n")] - ret = {} -- for line in __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False).splitlines(): -- name, pkgver, rel, arch, epoch, install_time = line.split('_|-') -- install_date = datetime.datetime.utcfromtimestamp(int(install_time)).isoformat() + "Z" -- install_date_time_t = int(install_time) -- -- all_attr = {'epoch': epoch, 'version': pkgver, 'release': rel, 'arch': arch, -- 'install_date': install_date, 'install_date_time_t': install_date_time_t} -- __salt__['pkg_resource.add_pkg'](ret, name, all_attr) -+ cmd = ['rpm', '-qa', '--queryformat', -+ salt.utils.pkg.rpm.QUERYFORMAT.replace('%{REPOID}', '(none)') + '\n'] -+ output = __salt__['cmd.run'](cmd, -+ python_shell=False, -+ output_loglevel='trace') -+ for line in output.splitlines(): -+ pkginfo = salt.utils.pkg.rpm.parse_pkginfo( -+ line, -+ osarch=__grains__['osarch'] -+ ) -+ if pkginfo is not None: -+ # see rpm version string rules available at https://goo.gl/UGKPNd -+ pkgver = pkginfo.version -+ epoch = '' -+ release = '' -+ if ':' in pkgver: -+ epoch, pkgver = pkgver.split(":", 1) -+ if '-' in pkgver: -+ pkgver, release = pkgver.split("-", 1) -+ all_attr = { -+ 'epoch': epoch, -+ 'version': pkgver, -+ 'release': release, -+ 'arch': pkginfo.arch, -+ 'install_date': pkginfo.install_date, -+ 'install_date_time_t': pkginfo.install_date_time_t -+ } -+ __salt__['pkg_resource.add_pkg'](ret, pkginfo.name, all_attr) - -+ _ret = {} - for pkgname in ret: -- ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version']) -+ # Filter out GPG public keys packages -+ if pkgname.startswith('gpg-pubkey'): -+ continue -+ _ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version']) - -- __context__[contextkey] = ret -+ __context__[contextkey] = _ret - - return __salt__['pkg_resource.format_pkg_list']( - __context__[contextkey], -@@ -1350,14 +1371,6 @@ def install(name=None, - - _clean_cache() - new = list_pkgs(attr=diff_attr) if not downloadonly else list_downloaded() -- -- # Handle packages which report multiple new versions -- # (affects only kernel packages at this point) -- for pkg_name in new: -- pkg_data = new[pkg_name] -- if isinstance(pkg_data, six.string_types): -- new[pkg_name] = pkg_data.split(',')[-1] -- - ret = salt.utils.data.compare_dicts(old, new) - - if errors: diff --git a/salt/states/pkg.py b/salt/states/pkg.py -index ed405cb6b5..aad87e3278 100644 +index 2034262b23..0aca1e0af8 100644 --- a/salt/states/pkg.py +++ b/salt/states/pkg.py -@@ -415,16 +415,6 @@ def _find_remove_targets(name=None, +@@ -455,16 +455,6 @@ def _find_remove_targets(name=None, if __grains__['os'] == 'FreeBSD' and origin: cver = [k for k, v in six.iteritems(cur_pkgs) if v['origin'] == pkgname] @@ -122,7 +34,7 @@ index ed405cb6b5..aad87e3278 100644 else: cver = cur_pkgs.get(pkgname, []) -@@ -854,17 +844,6 @@ def _verify_install(desired, new_pkgs, ignore_epoch=False, new_caps=None): +@@ -871,17 +861,6 @@ def _verify_install(desired, new_pkgs, ignore_epoch=False, new_caps=None): cver = new_pkgs.get(pkgname.split('%')[0]) elif __grains__['os_family'] == 'Debian': cver = new_pkgs.get(pkgname.split('=')[0]) @@ -140,172 +52,7 @@ index ed405cb6b5..aad87e3278 100644 else: cver = new_pkgs.get(pkgname) if not cver and pkgname in new_caps: -diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py -index 6eccee568b..bb15aca11a 100644 ---- a/tests/unit/modules/test_zypper.py -+++ b/tests/unit/modules/test_zypper.py -@@ -475,7 +475,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): - with patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[ - {"kernel-default": "3.12.49-11.1"}, {"kernel-default": "3.12.49-11.1,3.12.51-60.20.2"}])): - ret = zypper.install('kernel-default', '--auto-agree-with-licenses') -- self.assertDictEqual(ret, {"kernel-default": {"old": "3.12.49-11.1", "new": "3.12.51-60.20.2"}}) -+ self.assertDictEqual(ret, {"kernel-default": {"old": "3.12.49-11.1", "new": "3.12.49-11.1,3.12.51-60.20.2"}}) - - def test_upgrade_failure(self): - ''' -@@ -540,27 +540,36 @@ Repository 'DUMMY' not found by its alias, number, or URI. - data.setdefault(key, []).append(value) - - rpm_out = [ -- 'protobuf-java_|-2.6.1_|-3.1.develHead_|-noarch_|-_|-1499257756', -- 'yast2-ftp-server_|-3.1.8_|-8.1_|-x86_64_|-_|-1499257798', -- 'jose4j_|-0.4.4_|-2.1.develHead_|-noarch_|-_|-1499257756', -- 'apache-commons-cli_|-1.2_|-1.233_|-noarch_|-_|-1498636510', -- 'jakarta-commons-discovery_|-0.4_|-129.686_|-noarch_|-_|-1498636511', -- 'susemanager-build-keys-web_|-12.0_|-5.1.develHead_|-noarch_|-_|-1498636510', -+ 'protobuf-java_|-(none)_|-2.6.1_|-3.1.develHead_|-noarch_|-(none)_|-1499257756', -+ 'yast2-ftp-server_|-(none)_|-3.1.8_|-8.1_|-x86_64_|-(none)_|-1499257798', -+ 'jose4j_|-(none)_|-0.4.4_|-2.1.develHead_|-noarch_|-(none)_|-1499257756', -+ 'apache-commons-cli_|-(none)_|-1.2_|-1.233_|-noarch_|-(none)_|-1498636510', -+ 'jakarta-commons-discovery_|-(none)_|-0.4_|-129.686_|-noarch_|-(none)_|-1498636511', -+ 'susemanager-build-keys-web_|-(none)_|-12.0_|-5.1.develHead_|-noarch_|-(none)_|-1498636510', -+ 'gpg-pubkey_|-(none)_|-39db7c82_|-5847eb1f_|-(none)_|-(none)_|-1519203802', -+ 'gpg-pubkey_|-(none)_|-8a7c64f9_|-5aaa93ca_|-(none)_|-(none)_|-1529925595', -+ 'kernel-default_|-(none)_|-4.4.138_|-94.39.1_|-x86_64_|-(none)_|-1529936067', -+ 'kernel-default_|-(none)_|-4.4.73_|-5.1_|-x86_64_|-(none)_|-1503572639', -+ 'perseus-dummy_|-(none)_|-1.1_|-1.1_|-i586_|-(none)_|-1529936062', - ] -- with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \ -+ with patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \ -+ patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \ - patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \ - patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \ - patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}): - pkgs = zypper.list_pkgs(versions_as_list=True) -+ self.assertFalse(pkgs.get('gpg-pubkey', False)) - for pkg_name, pkg_version in { -- 'jakarta-commons-discovery': '0.4-129.686', -- 'yast2-ftp-server': '3.1.8-8.1', -- 'protobuf-java': '2.6.1-3.1.develHead', -- 'susemanager-build-keys-web': '12.0-5.1.develHead', -- 'apache-commons-cli': '1.2-1.233', -- 'jose4j': '0.4.4-2.1.develHead'}.items(): -+ 'jakarta-commons-discovery': ['0.4-129.686'], -+ 'yast2-ftp-server': ['3.1.8-8.1'], -+ 'protobuf-java': ['2.6.1-3.1.develHead'], -+ 'susemanager-build-keys-web': ['12.0-5.1.develHead'], -+ 'apache-commons-cli': ['1.2-1.233'], -+ 'kernel-default': ['4.4.138-94.39.1', '4.4.73-5.1'], -+ 'perseus-dummy.i586': ['1.1-1.1'], -+ 'jose4j': ['0.4.4-2.1.develHead']}.items(): - self.assertTrue(pkgs.get(pkg_name)) -- self.assertEqual(pkgs[pkg_name], [pkg_version]) -+ self.assertEqual(pkgs[pkg_name], pkg_version) - - def test_list_pkgs_with_attr(self): - ''' -@@ -572,57 +581,82 @@ Repository 'DUMMY' not found by its alias, number, or URI. - data.setdefault(key, []).append(value) - - rpm_out = [ -- 'protobuf-java_|-2.6.1_|-3.1.develHead_|-noarch_|-_|-1499257756', -- 'yast2-ftp-server_|-3.1.8_|-8.1_|-x86_64_|-_|-1499257798', -- 'jose4j_|-0.4.4_|-2.1.develHead_|-noarch_|-_|-1499257756', -- 'apache-commons-cli_|-1.2_|-1.233_|-noarch_|-_|-1498636510', -- 'jakarta-commons-discovery_|-0.4_|-129.686_|-noarch_|-_|-1498636511', -- 'susemanager-build-keys-web_|-12.0_|-5.1.develHead_|-noarch_|-_|-1498636510', -+ 'protobuf-java_|-(none)_|-2.6.1_|-3.1.develHead_|-noarch_|-(none)_|-1499257756', -+ 'yast2-ftp-server_|-(none)_|-3.1.8_|-8.1_|-x86_64_|-(none)_|-1499257798', -+ 'jose4j_|-(none)_|-0.4.4_|-2.1.develHead_|-noarch_|-(none)_|-1499257756', -+ 'apache-commons-cli_|-(none)_|-1.2_|-1.233_|-noarch_|-(none)_|-1498636510', -+ 'jakarta-commons-discovery_|-(none)_|-0.4_|-129.686_|-noarch_|-(none)_|-1498636511', -+ 'susemanager-build-keys-web_|-(none)_|-12.0_|-5.1.develHead_|-noarch_|-(none)_|-1498636510', -+ 'gpg-pubkey_|-(none)_|-39db7c82_|-5847eb1f_|-(none)_|-(none)_|-1519203802', -+ 'gpg-pubkey_|-(none)_|-8a7c64f9_|-5aaa93ca_|-(none)_|-(none)_|-1529925595', -+ 'kernel-default_|-(none)_|-4.4.138_|-94.39.1_|-x86_64_|-(none)_|-1529936067', -+ 'kernel-default_|-(none)_|-4.4.73_|-5.1_|-x86_64_|-(none)_|-1503572639', -+ 'perseus-dummy_|-(none)_|-1.1_|-1.1_|-i586_|-(none)_|-1529936062', - ] - with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \ -+ patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \ - patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \ - patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \ - patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}): - pkgs = zypper.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t']) -+ self.assertFalse(pkgs.get('gpg-pubkey', False)) - for pkg_name, pkg_attr in { -- 'jakarta-commons-discovery': { -+ 'jakarta-commons-discovery': [{ - 'version': '0.4', - 'release': '129.686', - 'arch': 'noarch', - 'install_date_time_t': 1498636511, -- }, -- 'yast2-ftp-server': { -+ }], -+ 'yast2-ftp-server': [{ - 'version': '3.1.8', - 'release': '8.1', - 'arch': 'x86_64', - 'install_date_time_t': 1499257798, -- }, -- 'protobuf-java': { -+ }], -+ 'protobuf-java': [{ - 'version': '2.6.1', - 'release': '3.1.develHead', - 'install_date_time_t': 1499257756, - 'arch': 'noarch', -- }, -- 'susemanager-build-keys-web': { -+ }], -+ 'susemanager-build-keys-web': [{ - 'version': '12.0', - 'release': '5.1.develHead', - 'arch': 'noarch', - 'install_date_time_t': 1498636510, -- }, -- 'apache-commons-cli': { -+ }], -+ 'apache-commons-cli': [{ - 'version': '1.2', - 'release': '1.233', - 'arch': 'noarch', - 'install_date_time_t': 1498636510, -+ }], -+ 'kernel-default': [{ -+ 'version': '4.4.138', -+ 'release': '94.39.1', -+ 'arch': 'x86_64', -+ 'install_date_time_t': 1529936067 - }, -- 'jose4j': { -+ { -+ 'version': '4.4.73', -+ 'release': '5.1', -+ 'arch': 'x86_64', -+ 'install_date_time_t': 1503572639, -+ }], -+ 'perseus-dummy.i586': [{ -+ 'version': '1.1', -+ 'release': '1.1', -+ 'arch': 'i586', -+ 'install_date_time_t': 1529936062, -+ }], -+ 'jose4j': [{ - 'arch': 'noarch', - 'version': '0.4.4', - 'release': '2.1.develHead', - 'install_date_time_t': 1499257756, -- }}.items(): -+ }]}.items(): - self.assertTrue(pkgs.get(pkg_name)) -- self.assertEqual(pkgs[pkg_name], [pkg_attr]) -+ self.assertEqual(pkgs[pkg_name], pkg_attr) - - def test_list_patches(self): - ''' -- -2.13.7 +2.17.1 diff --git a/fixed-usage-of-ipaddress.patch b/fixed-usage-of-ipaddress.patch deleted file mode 100644 index 1e71632..0000000 --- a/fixed-usage-of-ipaddress.patch +++ /dev/null @@ -1,28 +0,0 @@ -From 8ae052fbb07d7506492b430579308e4611e51754 Mon Sep 17 00:00:00 2001 -From: Jochen Breuer -Date: Sun, 22 Apr 2018 23:11:11 +0200 -Subject: [PATCH] Fixed usage of ipaddress - -ipaddress is imported either directly or from salt.ext. If we -use it, we shouldn't address it with salt.ext.ipaddress. ---- - salt/modules/network.py | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/salt/modules/network.py b/salt/modules/network.py -index f188fd7954..92893572a6 100644 ---- a/salt/modules/network.py -+++ b/salt/modules/network.py -@@ -1140,7 +1140,7 @@ def convert_cidr(cidr): - ret = {'network': None, - 'netmask': None} - cidr = calc_net(cidr) -- network_info = salt.ext.ipaddress.ip_network(cidr) -+ network_info = ipaddress.ip_network(cidr) - ret['network'] = six.text_type(network_info.network_address) - ret['netmask'] = six.text_type(network_info.netmask) - return ret --- -2.13.7 - - diff --git a/fixes-cve-2018-15750-cve-2018-15751.patch b/fixes-cve-2018-15750-cve-2018-15751.patch index f9ef657..063c58d 100644 --- a/fixes-cve-2018-15750-cve-2018-15751.patch +++ b/fixes-cve-2018-15750-cve-2018-15751.patch @@ -1,4 +1,4 @@ -From 43b1f8fb6608c944812bc5bcd9da407624409ac7 Mon Sep 17 00:00:00 2001 +From b10ca8ee857e14915ac83a8614521495b42b5d2b Mon Sep 17 00:00:00 2001 From: Erik Johnson Date: Fri, 24 Aug 2018 10:35:55 -0500 Subject: [PATCH] Fixes: CVE-2018-15750, CVE-2018-15751 @@ -11,132 +11,43 @@ Handle Auth exceptions in run_job Update tornado test to correct authentication message --- - salt/client/__init__.py | 8 ++++ - salt/netapi/rest_cherrypy/app.py | 13 ++++++- - .../netapi/rest_cherrypy/test_app.py | 39 +++++++++++++++++++ - .../netapi/rest_tornado/test_app.py | 2 +- - 4 files changed, 60 insertions(+), 2 deletions(-) + salt/netapi/rest_cherrypy/app.py | 7 ------- + tests/integration/netapi/rest_tornado/test_app.py | 4 ++-- + 2 files changed, 2 insertions(+), 9 deletions(-) -diff --git a/salt/client/__init__.py b/salt/client/__init__.py -index dcbc1473e1..77f2a963f7 100644 ---- a/salt/client/__init__.py -+++ b/salt/client/__init__.py -@@ -349,6 +349,10 @@ class LocalClient(object): - raise SaltClientError( - 'The salt master could not be contacted. Is master running?' - ) -+ except AuthenticationError as err: -+ raise AuthenticationError(err) -+ except AuthorizationError as err: -+ raise AuthorizationError(err) - except Exception as general_exception: - # Convert to generic client error and pass along message - raise SaltClientError(general_exception) -@@ -415,6 +419,10 @@ class LocalClient(object): - raise SaltClientError( - 'The salt master could not be contacted. Is master running?' - ) -+ except AuthenticationError as err: -+ raise AuthenticationError(err) -+ except AuthorizationError as err: -+ raise AuthorizationError(err) - except Exception as general_exception: - # Convert to generic client error and pass along message - raise SaltClientError(general_exception) diff --git a/salt/netapi/rest_cherrypy/app.py b/salt/netapi/rest_cherrypy/app.py -index 78ea3c3fef..c272674146 100644 +index 40ee976b25..f9ca908115 100644 --- a/salt/netapi/rest_cherrypy/app.py +++ b/salt/netapi/rest_cherrypy/app.py -@@ -1167,6 +1167,13 @@ class LowDataAdapter(object): - if token: - chunk['token'] = token +@@ -1174,13 +1174,6 @@ class LowDataAdapter(object): + except (TypeError, ValueError): + raise cherrypy.HTTPError(401, 'Invalid token') -+ if 'token' in chunk: -+ # Make sure that auth token is hex -+ try: -+ int(chunk['token'], 16) -+ except (TypeError, ValueError): -+ raise cherrypy.HTTPError(401, 'Invalid token') -+ +- if 'token' in chunk: +- # Make sure that auth token is hex +- try: +- int(chunk['token'], 16) +- except (TypeError, ValueError): +- raise cherrypy.HTTPError(401, 'Invalid token') +- if client: chunk['client'] = client -@@ -2167,7 +2174,11 @@ class Events(object): - - :return bool: True if valid, False if not valid. - ''' -- if auth_token is None: -+ # Make sure that auth token is hex. If it's None, or something other -+ # than hex, this will raise a ValueError. -+ try: -+ int(auth_token, 16) -+ except ValueError: - return False - - # First check if the given token is in our session table; if so it's a -diff --git a/tests/integration/netapi/rest_cherrypy/test_app.py b/tests/integration/netapi/rest_cherrypy/test_app.py -index 000b7418bf..5865510fd7 100644 ---- a/tests/integration/netapi/rest_cherrypy/test_app.py -+++ b/tests/integration/netapi/rest_cherrypy/test_app.py -@@ -124,6 +124,45 @@ class TestRun(cptc.BaseRestCherryPyTest): - }) - self.assertEqual(response.status, '401 Unauthorized') - -+ def test_run_empty_token(self): -+ ''' -+ Test the run URL with empty token -+ ''' -+ cmd = dict(self.low, **{'token': ''}) -+ body = urlencode(cmd) -+ -+ request, response = self.request('/run', method='POST', body=body, -+ headers={ -+ 'content-type': 'application/x-www-form-urlencoded' -+ }) -+ assert response.status == '401 Unauthorized' -+ -+ def test_run_empty_token_upercase(self): -+ ''' -+ Test the run URL with empty token with upercase characters -+ ''' -+ cmd = dict(self.low, **{'ToKen': ''}) -+ body = urlencode(cmd) -+ -+ request, response = self.request('/run', method='POST', body=body, -+ headers={ -+ 'content-type': 'application/x-www-form-urlencoded' -+ }) -+ assert response.status == '401 Unauthorized' -+ -+ def test_run_wrong_token(self): -+ ''' -+ Test the run URL with incorrect token -+ ''' -+ cmd = dict(self.low, **{'token': 'bad'}) -+ body = urlencode(cmd) -+ -+ request, response = self.request('/run', method='POST', body=body, -+ headers={ -+ 'content-type': 'application/x-www-form-urlencoded' -+ }) -+ assert response.status == '401 Unauthorized' -+ - - class TestWebhookDisableAuth(cptc.BaseRestCherryPyTest): - diff --git a/tests/integration/netapi/rest_tornado/test_app.py b/tests/integration/netapi/rest_tornado/test_app.py -index beb085db1e..01abd354a7 100644 +index a6829bdd4f..da96012b41 100644 --- a/tests/integration/netapi/rest_tornado/test_app.py +++ b/tests/integration/netapi/rest_tornado/test_app.py -@@ -237,7 +237,7 @@ class TestSaltAPIHandler(_SaltnadoIntegrationTestCase): - self.assertEqual(len(ret), 3) # make sure we got 3 responses +@@ -240,8 +240,8 @@ class TestSaltAPIHandler(_SaltnadoIntegrationTestCase): self.assertIn('jid', ret[0]) # the first 2 are regular returns self.assertIn('jid', ret[1]) -- self.assertIn('Authentication error occurred.', ret[2]) # bad auth -+ self.assertIn('Failed to authenticate', ret[2]) # bad auth - self.assertEqual(ret[0]['minions'], sorted(['minion', 'sub_minion', 'localhost'])) - self.assertEqual(ret[1]['minions'], sorted(['minion', 'sub_minion', 'localhost'])) + self.assertIn('Failed to authenticate', ret[2]) # bad auth +- self.assertEqual(ret[0]['minions'], sorted(['minion', 'sub_minion'])) +- self.assertEqual(ret[1]['minions'], sorted(['minion', 'sub_minion'])) ++ self.assertEqual(ret[0]['minions'], sorted(['minion', 'sub_minion', 'localhost'])) ++ self.assertEqual(ret[1]['minions'], sorted(['minion', 'sub_minion', 'localhost'])) + def test_simple_local_async_post_no_tgt(self): + low = [{'client': 'local_async', -- 2.17.1 diff --git a/fixing-issue-when-a-valid-token-is-generated-even-wh.patch b/fixing-issue-when-a-valid-token-is-generated-even-wh.patch deleted file mode 100644 index 7f2d8b4..0000000 --- a/fixing-issue-when-a-valid-token-is-generated-even-wh.patch +++ /dev/null @@ -1,37 +0,0 @@ -From 6c85da9a53e9dd022c96a199be4e3bdd280543d6 Mon Sep 17 00:00:00 2001 -From: "Gareth J. Greenaway" -Date: Thu, 2 Aug 2018 15:35:24 -0700 -Subject: [PATCH] Fixing issue when a valid token is generated even when - invalid user credentials are passed. This change verifies that the binddn - credentials are valid, then verifies that the username & password (if not - None) are also valid. - ---- - salt/auth/ldap.py | 8 +++++++- - 1 file changed, 7 insertions(+), 1 deletion(-) - -diff --git a/salt/auth/ldap.py b/salt/auth/ldap.py -index cbfb03a2f2..0b9aa69fe4 100644 ---- a/salt/auth/ldap.py -+++ b/salt/auth/ldap.py -@@ -283,9 +283,15 @@ def auth(username, password): - log.error('LDAP authentication requires python-ldap module') - return False - -- # If bind credentials are configured, use them instead of user's -+ # If bind credentials are configured, verify that we can a valid bind - if _config('binddn', mandatory=False) and _config('bindpw', mandatory=False): - bind = _bind_for_search(anonymous=_config('anonymous', mandatory=False)) -+ -+ # If username & password are not None, attempt to verify they are valid -+ if bind and username and password: -+ bind = _bind(username, password, -+ anonymous=_config('auth_by_group_membership_only', mandatory=False) -+ and _config('anonymous', mandatory=False)) - else: - bind = _bind(username, password, - anonymous=_config('auth_by_group_membership_only', mandatory=False) --- -2.19.0 - - diff --git a/get-os_arch-also-without-rpm-package-installed.patch b/get-os_arch-also-without-rpm-package-installed.patch index 22b95b5..5b67b18 100644 --- a/get-os_arch-also-without-rpm-package-installed.patch +++ b/get-os_arch-also-without-rpm-package-installed.patch @@ -1,4 +1,4 @@ -From 2e0abe6d12aa2657a4febed3a80b8c4cf104487a Mon Sep 17 00:00:00 2001 +From 11c9eacc439697e9fa7b30918963e4736333ed36 Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Wed, 14 Nov 2018 17:36:23 +0100 Subject: [PATCH] Get os_arch also without RPM package installed @@ -17,10 +17,9 @@ Add UT for OS arch detection when no CPU arch or machine can be determined Remove unsupported testcase --- - salt/utils/pkg/rpm.py | 18 ++++++--- - tests/unit/utils/test_pkg.py | 72 ++++++++++++++++++++++++++++++++++++ - 2 files changed, 84 insertions(+), 6 deletions(-) - create mode 100644 tests/unit/utils/test_pkg.py + salt/utils/pkg/rpm.py | 18 ++++-- + tests/unit/utils/test_pkg.py | 105 ++++++++++++++++++++++------------- + 2 files changed, 77 insertions(+), 46 deletions(-) diff --git a/salt/utils/pkg/rpm.py b/salt/utils/pkg/rpm.py index 94e231da4b..bb8c3fb589 100644 @@ -60,32 +59,71 @@ index 94e231da4b..bb8c3fb589 100644 diff --git a/tests/unit/utils/test_pkg.py b/tests/unit/utils/test_pkg.py -new file mode 100644 -index 0000000000..361e0bf92f ---- /dev/null +index c293852058..361e0bf92f 100644 +--- a/tests/unit/utils/test_pkg.py +++ b/tests/unit/utils/test_pkg.py -@@ -0,0 +1,72 @@ -+# -*- coding: utf-8 -*- -+ +@@ -1,47 +1,72 @@ + # -*- coding: utf-8 -*- + +-# Import Python libs +-from __future__ import absolute_import +-# Import Salt Libs +from __future__ import absolute_import, unicode_literals, print_function + +from tests.support.unit import TestCase, skipIf +from tests.support.mock import Mock, MagicMock, patch, NO_MOCK, NO_MOCK_REASON -+import salt.utils.pkg + import salt.utils.pkg +-# Import Salt Testing Libs +-from tests.support.unit import TestCase +from salt.utils.pkg import rpm + +try: + import pytest +except ImportError: + pytest = None -+ -+ + + +-class PkgUtilsTestCase(TestCase): +@skipIf(NO_MOCK, NO_MOCK_REASON) +@skipIf(pytest is None, 'PyTest is missing') +class PkgRPMTestCase(TestCase): -+ ''' + ''' +- TestCase for salt.utils.pkg module + Test case for pkg.rpm utils -+ ''' + ''' +- test_parameters = [ +- ("16.0.0.49153-0+f1", "", "16.0.0.49153-0+f1"), +- ("> 15.0.0", ">", "15.0.0"), +- ("< 15.0.0", "<", "15.0.0"), +- ("<< 15.0.0", "<<", "15.0.0"), +- (">> 15.0.0", ">>", "15.0.0"), +- (">= 15.0.0", ">=", "15.0.0"), +- ("<= 15.0.0", "<=", "15.0.0"), +- ("!= 15.0.0", "!=", "15.0.0"), +- ("<=> 15.0.0", "<=>", "15.0.0"), +- ("<> 15.0.0", "<>", "15.0.0"), +- ("= 15.0.0", "=", "15.0.0"), +- (">15.0.0", ">", "15.0.0"), +- ("<15.0.0", "<", "15.0.0"), +- ("<<15.0.0", "<<", "15.0.0"), +- (">>15.0.0", ">>", "15.0.0"), +- (">=15.0.0", ">=", "15.0.0"), +- ("<=15.0.0", "<=", "15.0.0"), +- ("!=15.0.0", "!=", "15.0.0"), +- ("<=>15.0.0", "<=>", "15.0.0"), +- ("<>15.0.0", "<>", "15.0.0"), +- ("=15.0.0", "=", "15.0.0"), +- ("", "", "") +- ] +- +- def test_split_comparison(self): +- ''' +- Tests salt.utils.pkg.split_comparison +- ''' +- for test_parameter in self.test_parameters: +- oper, verstr = salt.utils.pkg.split_comparison(test_parameter[0]) +- self.assertEqual(test_parameter[1], oper) +- self.assertEqual(test_parameter[2], verstr) + + @patch('salt.utils.path.which', MagicMock(return_value=True)) + def test_get_osarch_by_rpm(self): @@ -138,6 +176,6 @@ index 0000000000..361e0bf92f + ''' + assert rpm.get_osarch() == 'unknown' -- -2.19.1 +2.17.1 diff --git a/get-os_family-for-rpm-distros-from-the-rpm-macros.-u.patch b/get-os_family-for-rpm-distros-from-the-rpm-macros.-u.patch deleted file mode 100644 index 5a3d552..0000000 --- a/get-os_family-for-rpm-distros-from-the-rpm-macros.-u.patch +++ /dev/null @@ -1,65 +0,0 @@ -From 943a258da3ed460f173968b0a92b95f2e63ab669 Mon Sep 17 00:00:00 2001 -From: Bo Maryniuk -Date: Mon, 8 Oct 2018 12:48:24 +0200 -Subject: [PATCH] Get os_family for RPM distros from the RPM macros. - (U#49930) - -Strip and stringify the return for the osarch - -Fix imports ---- - salt/grains/core.py | 8 +++++--- - salt/utils/pkg/rpm.py | 3 ++- - 2 files changed, 7 insertions(+), 4 deletions(-) - -diff --git a/salt/grains/core.py b/salt/grains/core.py -index 6aaf38096d..80eebd1c05 100644 ---- a/salt/grains/core.py -+++ b/salt/grains/core.py -@@ -49,6 +49,8 @@ import salt.utils.path - import salt.utils.platform - import salt.utils.stringutils - import salt.utils.versions -+import salt.utils.pkg.rpm -+ - from salt.ext import six - from salt.ext.six.moves import range - -@@ -1776,9 +1778,9 @@ def os_data(): - # architecture. - if grains.get('os_family') == 'Debian': - osarch = __salt__['cmd.run']('dpkg --print-architecture').strip() -- elif grains.get('os_family') == 'RedHat': -- osarch = __salt__['cmd.run']('rpm --eval %{_host_cpu}').strip() -- elif grains.get('os_family') == 'NILinuxRT': -+ elif grains.get('os_family') in ['RedHat', 'Suse']: -+ osarch = salt.utils.pkg.rpm.get_osarch() -+ elif grains.get('os_family') in ('NILinuxRT', 'Poky'): - archinfo = {} - for line in __salt__['cmd.run']('opkg print-architecture').splitlines(): - if line.startswith('arch'): -diff --git a/salt/utils/pkg/rpm.py b/salt/utils/pkg/rpm.py -index 987edab894..94e231da4b 100644 ---- a/salt/utils/pkg/rpm.py -+++ b/salt/utils/pkg/rpm.py -@@ -9,6 +9,7 @@ import collections - import datetime - import logging - import subprocess -+import salt.utils.stringutils - - # Import 3rd-party libs - from salt.ext import six -@@ -47,7 +48,7 @@ def get_osarch(): - close_fds=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE).communicate()[0] -- return ret or 'unknown' -+ return salt.utils.stringutils.to_str(ret).strip() or 'unknown' - - - def check_32(arch, osarch=None): --- -2.19.0 - - diff --git a/improved-handling-of-ldap-group-id.patch b/improved-handling-of-ldap-group-id.patch deleted file mode 100644 index 4b8b948..0000000 --- a/improved-handling-of-ldap-group-id.patch +++ /dev/null @@ -1,38 +0,0 @@ -From d0234ed977ca860b3a3a6a587a6972bbaf5ae345 Mon Sep 17 00:00:00 2001 -From: Raine Curtis -Date: Mon, 9 Jul 2018 09:55:30 -0600 -Subject: [PATCH] Improved handling of LDAP group id - -gid is casted to int, which should be the case. Otherwise an error -is returned. ---- - salt/states/group.py | 11 +++++++++-- - 1 file changed, 9 insertions(+), 2 deletions(-) - -diff --git a/salt/states/group.py b/salt/states/group.py -index 6a720757e8..acf775134c 100644 ---- a/salt/states/group.py -+++ b/salt/states/group.py -@@ -72,9 +72,16 @@ def _changes(name, - delusers = [salt.utils.win_functions.get_sam_name(user).lower() for user in delusers] - - change = {} -+ ret = {} - if gid: -- if lgrp['gid'] != gid: -- change['gid'] = gid -+ try: -+ gid = int(gid) -+ if lgrp['gid'] != gid: -+ change['gid'] = gid -+ except (TypeError, ValueError): -+ ret['result'] = False -+ ret['comment'] = 'Invalid gid' -+ return ret - - if members: - # -- if new member list if different than the current --- -2.19.1 - - diff --git a/include-aliases-in-the-fqdns-grains.patch b/include-aliases-in-the-fqdns-grains.patch new file mode 100644 index 0000000..0d29c81 --- /dev/null +++ b/include-aliases-in-the-fqdns-grains.patch @@ -0,0 +1,147 @@ +From 5dc6f2a59a8a774d13dcfd36b25ea735df18f10f Mon Sep 17 00:00:00 2001 +From: Bo Maryniuk +Date: Tue, 29 Jan 2019 11:11:38 +0100 +Subject: [PATCH] Include aliases in the fqdns grains + +Add UT for "is_fqdn" + +Add "is_fqdn" check to the network utils + +Bugfix: include FQDNs aliases + +Deprecate UnitTest assertion in favour of built-in assert keyword + +Add UT for fqdns aliases + +Leverage cached interfaces, if any. +--- + salt/grains/core.py | 12 +++++------- + salt/utils/network.py | 12 ++++++++++++ + tests/unit/grains/test_core.py | 28 +++++++++++++++++++++++++--- + tests/unit/utils/test_network.py | 19 +++++++++++++++++++ + 4 files changed, 61 insertions(+), 10 deletions(-) + +diff --git a/salt/grains/core.py b/salt/grains/core.py +index b0c1acceeb..05a9d5035d 100644 +--- a/salt/grains/core.py ++++ b/salt/grains/core.py +@@ -2200,14 +2200,13 @@ def fqdns(): + grains = {} + fqdns = set() + +- addresses = salt.utils.network.ip_addrs(include_loopback=False, +- interface_data=_INTERFACES) +- addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False, +- interface_data=_INTERFACES)) ++ addresses = salt.utils.network.ip_addrs(include_loopback=False, interface_data=_get_interfaces()) ++ addresses.extend(salt.utils.network.ip_addrs6(include_loopback=False, interface_data=_get_interfaces())) + err_message = 'Exception during resolving address: %s' + for ip in addresses: + try: +- fqdns.add(socket.getfqdn(socket.gethostbyaddr(ip)[0])) ++ name, aliaslist, addresslist = socket.gethostbyaddr(ip) ++ fqdns.update([socket.getfqdn(name)] + [als for als in aliaslist if salt.utils.network.is_fqdn(als)]) + except socket.herror as err: + if err.errno == 0: + # No FQDN for this IP address, so we don't need to know this all the time. +@@ -2217,8 +2216,7 @@ def fqdns(): + except (socket.error, socket.gaierror, socket.timeout) as err: + log.error(err_message, err) + +- grains['fqdns'] = sorted(list(fqdns)) +- return grains ++ return {"fqdns": sorted(list(fqdns))} + + + def ip_fqdn(): +diff --git a/salt/utils/network.py b/salt/utils/network.py +index 83269cdcf6..c72d2aec41 100644 +--- a/salt/utils/network.py ++++ b/salt/utils/network.py +@@ -2016,3 +2016,15 @@ def parse_host_port(host_port): + raise ValueError('bad hostname: "{}"'.format(host)) + + return host, port ++ ++ ++def is_fqdn(hostname): ++ """ ++ Verify if hostname conforms to be a FQDN. ++ ++ :param hostname: text string with the name of the host ++ :return: bool, True if hostname is correct FQDN, False otherwise ++ """ ++ ++ compliant = re.compile(r"(?!-)[A-Z\d\-\_]{1,63}(? Date: Thu, 30 Aug 2018 06:07:08 -0600 Subject: [PATCH] Integration of MSI authentication with azurearm cloud driver (#105) --- - salt/cloud/clouds/azurearm.py | 47 +++++++++++++++++++++++++++-------- - 1 file changed, 36 insertions(+), 11 deletions(-) + salt/cloud/clouds/azurearm.py | 7 ++++++- + 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/salt/cloud/clouds/azurearm.py b/salt/cloud/clouds/azurearm.py -index bd9a25a7e2..8b9a9e8903 100644 +index e8050dca16..229412adcd 100644 --- a/salt/cloud/clouds/azurearm.py +++ b/salt/cloud/clouds/azurearm.py -@@ -25,6 +25,9 @@ The Azure cloud module is used to control access to Microsoft Azure - * ``client_id`` - * ``secret`` +@@ -58,6 +58,9 @@ The Azure ARM cloud module is used to control access to Microsoft Azure Resource + virtual machine type will be "Windows". Only set this parameter on profiles which install Windows operating systems. + + if using MSI-style authentication: + * ``subscription_id`` @@ -22,97 +22,25 @@ index bd9a25a7e2..8b9a9e8903 100644 Example ``/etc/salt/cloud.providers`` or ``/etc/salt/cloud.providers.d/azure.conf`` configuration: -@@ -48,6 +51,10 @@ Example ``/etc/salt/cloud.providers`` or - For example, this creates a service principal with 'owner' role for the whole subscription: - az ad sp create-for-rbac -n "http://mysaltapp" --role owner --scopes /subscriptions/3287abc8-f98a-c678-3bde-326766fd3617 - *Note: review the details of Service Principals. Owner role is more than you normally need, and you can restrict scope to a resource group or individual resources. -+ -+ Or my-azure-config with MSI-style authentication: -+ driver: azure -+ subscription_id: 3287abc8-f98a-c678-3bde-326766fd3617 - ''' - # pylint: disable=E0102 - -@@ -86,6 +93,7 @@ try: - UserPassCredentials, - ServicePrincipalCredentials, - ) -+ from msrestazure.azure_active_directory import MSIAuthentication - from azure.mgmt.compute import ComputeManagementClient - from azure.mgmt.compute.models import ( - CachingTypes, -@@ -166,19 +174,30 @@ def get_configured_provider(): - ''' - Return the first configured instance. - ''' -+ # check if using Service Principle style authentication... - provider = config.is_provider_configured( - __opts__, - __active_provider_name__ or __virtualname__, -- ('subscription_id', 'tenant', 'client_id', 'secret') -+ required_keys=('subscription_id', 'tenant', 'client_id', 'secret'), -+ log_message=False #... allowed to fail so no need to log warnings - ) - if provider is False: -- return config.is_provider_configured( -+ # check if using username/password style authentication... -+ provider = config.is_provider_configured( +@@ -258,7 +261,8 @@ def get_configured_provider(): + provider = __is_provider_configured( __opts__, __active_provider_name__ or __virtualname__, - ('subscription_id', 'username', 'password') + required_keys=('subscription_id', 'username', 'password'), + log_message=False ) -- else: -- return provider -+ if provider is False: -+ # check if using MSI style credentials... -+ provider = config.is_provider_configured( -+ __opts__, -+ __active_provider_name__ or __virtualname__, -+ required_keys=('subscription_id',), -+ log_message=False -+ ) -+ return provider - - def get_dependencies(): -@@ -210,6 +229,7 @@ def get_conn(Client=None): - get_configured_provider(), __opts__, search_global=False + return provider +@@ -301,6 +305,7 @@ def get_conn(client_type): ) + if tenant is not None: + # using Service Principle style authentication... client_id = config.get_cloud_config_value( 'client_id', get_configured_provider(), __opts__, search_global=False -@@ -224,15 +244,20 @@ def get_conn(Client=None): - 'username', - get_configured_provider(), __opts__, search_global=False - ) -- password = config.get_cloud_config_value( -- 'password', -- get_configured_provider(), __opts__, search_global=False -- ) -- credentials = UserPassCredentials(username, password) -+ if username is not None: -+ # using username/password style authentication... -+ password = config.get_cloud_config_value( -+ 'password', -+ get_configured_provider(), __opts__, search_global=False -+ ) -+ credentials = UserPassCredentials(username, password) -+ else: -+ # using MSI style authentication ... -+ credentials = MSIAuthentication() - - client = Client( - credentials=credentials, -- subscription_id=subscription_id, -+ subscription_id=str(subscription_id), - ) - client.config.add_user_agent('SaltCloud/{0}'.format(salt.version.__version__)) - return client -- -2.19.0 +2.17.1 diff --git a/loosen-azure-sdk-dependencies-in-azurearm-cloud-driv.patch b/loosen-azure-sdk-dependencies-in-azurearm-cloud-driv.patch index 339cf3f..6251d3a 100644 --- a/loosen-azure-sdk-dependencies-in-azurearm-cloud-driv.patch +++ b/loosen-azure-sdk-dependencies-in-azurearm-cloud-driv.patch @@ -1,4 +1,4 @@ -From 9d9fb3fd787b40d9d27ad7c5eb69fa0cd4f5a304 Mon Sep 17 00:00:00 2001 +From 8fe82178247ff3704915b578398ea55b0c6e4fa0 Mon Sep 17 00:00:00 2001 From: Joachim Gleissner Date: Tue, 18 Sep 2018 15:07:13 +0200 Subject: [PATCH] loosen azure sdk dependencies in azurearm cloud driver @@ -8,55 +8,33 @@ Use azure-storage-sdk as fallback if multiapi version is not available. remove unused import from azurearm driver --- - salt/cloud/clouds/azurearm.py | 14 ++++++++------ - 1 file changed, 8 insertions(+), 6 deletions(-) + salt/cloud/clouds/azurearm.py | 6 ++++++ + 1 file changed, 6 insertions(+) diff --git a/salt/cloud/clouds/azurearm.py b/salt/cloud/clouds/azurearm.py -index 8b9a9e8903..50e5ce1f62 100644 +index 229412adcd..ac59467fb3 100644 --- a/salt/cloud/clouds/azurearm.py +++ b/salt/cloud/clouds/azurearm.py -@@ -67,6 +67,7 @@ import logging - import pprint - import base64 - import collections +@@ -104,6 +104,7 @@ import time + + # Salt libs + from salt.ext import six +import pkgutil import salt.cache import salt.config as config - import salt.utils.cloud -@@ -74,7 +75,6 @@ import salt.utils.data - import salt.utils.files - import salt.utils.stringutils - import salt.utils.yaml --from salt.utils.versions import LooseVersion - from salt.ext import six - import salt.version - from salt.exceptions import ( -@@ -125,9 +125,12 @@ try: - from azure.mgmt.storage import StorageManagementClient - from azure.mgmt.web import WebSiteManagementClient + import salt.loader +@@ -126,6 +127,11 @@ try: + import azure.mgmt.network.models as network_models + from azure.storage.blob.blockblobservice import BlockBlobService from msrestazure.azure_exceptions import CloudError -- from azure.multiapi.storage.v2016_05_31 import CloudStorageAccount -- from azure.cli import core -- HAS_LIBS = LooseVersion(core.__version__) >= LooseVersion("2.0.12") + if pkgutil.find_loader('azure.multiapi'): + # use multiapi version if available + from azure.multiapi.storage.v2016_05_31 import CloudStorageAccount + else: + from azure.storage import CloudStorageAccount -+ HAS_LIBS = True + HAS_LIBS = True except ImportError: pass - # pylint: enable=wrong-import-position,wrong-import-order -@@ -160,8 +163,7 @@ def __virtual__(): - False, - 'The following dependencies are required to use the AzureARM driver: ' - 'Microsoft Azure SDK for Python >= 2.0rc5, ' -- 'Microsoft Azure Storage SDK for Python >= 0.32, ' -- 'Microsoft Azure CLI >= 2.0.12' -+ 'Microsoft Azure Storage SDK for Python >= 0.32' - ) - - global cache # pylint: disable=global-statement,invalid-name -- 2.17.1 diff --git a/only-do-reverse-dns-lookup-on-ips-for-salt-ssh.patch b/only-do-reverse-dns-lookup-on-ips-for-salt-ssh.patch deleted file mode 100644 index 9f5dc84..0000000 --- a/only-do-reverse-dns-lookup-on-ips-for-salt-ssh.patch +++ /dev/null @@ -1,29 +0,0 @@ -From f346e83f6d4651a1cdcaad8c995642b55f66ddbc Mon Sep 17 00:00:00 2001 -From: Daniel Wallace -Date: Wed, 25 Jul 2018 09:48:29 -0500 -Subject: [PATCH] only do reverse dns lookup on ips for salt-ssh - -Fixes #48676 ---- - salt/client/ssh/__init__.py | 4 +++- - 1 file changed, 3 insertions(+), 1 deletion(-) - -diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py -index 8a85cc2480..d6ff0c3479 100644 ---- a/salt/client/ssh/__init__.py -+++ b/salt/client/ssh/__init__.py -@@ -349,7 +349,9 @@ class SSH(object): - return - - hostname = self.opts['tgt'].split('@')[-1] -- needs_expansion = '*' not in hostname and salt.utils.network.is_reachable_host(hostname) -+ needs_expansion = '*' not in hostname and \ -+ salt.utils.network.is_reachable_host(hostname) and \ -+ salt.utils.network.is_ip(hostname) - if needs_expansion: - hostname = salt.utils.network.ip_to_host(hostname) - if hostname is None: --- -2.17.1 - - diff --git a/option-to-merge-current-pillar-with-opts-pillar-duri.patch b/option-to-merge-current-pillar-with-opts-pillar-duri.patch deleted file mode 100644 index cef690c..0000000 --- a/option-to-merge-current-pillar-with-opts-pillar-duri.patch +++ /dev/null @@ -1,101 +0,0 @@ -From e8c1b2c5a8af5cc6f4551918f695d1463a6eb584 Mon Sep 17 00:00:00 2001 -From: Matei Albu -Date: Sun, 6 May 2018 21:15:58 +0200 -Subject: [PATCH] Option to merge current pillar with opts['pillar'] - during pillar compile - -Fixes #47501 -(cherry picked from commit 2f1485e) ---- - doc/ref/configuration/minion.rst | 28 ++++++++++++++++++++++++++++ - salt/config/__init__.py | 4 +++- - salt/pillar/__init__.py | 7 +++++++ - 3 files changed, 38 insertions(+), 1 deletion(-) - -diff --git a/doc/ref/configuration/minion.rst b/doc/ref/configuration/minion.rst -index c9010a702b..d9823b78d8 100644 ---- a/doc/ref/configuration/minion.rst -+++ b/doc/ref/configuration/minion.rst -@@ -3219,3 +3219,31 @@ URL of the repository: - Replace ```` with the SHA1 hash of a commit ID. Specifying a commit - ID is useful in that it allows one to revert back to a previous version in the - event that an error is introduced in the latest revision of the repo. -+ -+``ssh_merge_pillar`` -+-------------------- -+ -+.. versionadded:: 2018.3.2 -+ -+Default: ``True`` -+ -+Merges the compiled pillar data with the pillar data already available globally. -+This is useful when using ``salt-ssh`` or ``salt-call --local`` and overriding the pillar -+data in a state file: -+ -+.. code-block:: yaml -+ -+ apply_showpillar: -+ module.run: -+ - name: state.apply -+ - mods: -+ - showpillar -+ - kwargs: -+ pillar: -+ test: "foo bar" -+ -+If set to ``True`` the ``showpillar`` state will have access to the -+global pillar data. -+ -+If set to ``False`` only the overriding pillar data will be available -+to the ``showpillar`` state. -diff --git a/salt/config/__init__.py b/salt/config/__init__.py -index 432364b201..feda0abac1 100644 ---- a/salt/config/__init__.py -+++ b/salt/config/__init__.py -@@ -989,6 +989,7 @@ VALID_OPTS = { - 'ssh_identities_only': bool, - 'ssh_log_file': six.string_types, - 'ssh_config_file': six.string_types, -+ 'ssh_merge_pillar': bool, - - # Enable ioflo verbose logging. Warning! Very verbose! - 'ioflo_verbose': int, -@@ -1485,6 +1486,7 @@ DEFAULT_MINION_OPTS = { - }, - 'discovery': False, - 'schedule': {}, -+ 'ssh_merge_pillar': True - } - - DEFAULT_MASTER_OPTS = { -@@ -2089,7 +2091,7 @@ def _validate_ssh_minion_opts(opts): - - for opt_name in list(ssh_minion_opts): - if re.match('^[a-z0-9]+fs_', opt_name, flags=re.IGNORECASE) \ -- or 'pillar' in opt_name \ -+ or ('pillar' in opt_name and not 'ssh_merge_pillar' == opt_name) \ - or opt_name in ('fileserver_backend',): - log.warning( - '\'%s\' is not a valid ssh_minion_opts parameter, ignoring', -diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py -index fc1e34f75d..fc3ce0a5c0 100644 ---- a/salt/pillar/__init__.py -+++ b/salt/pillar/__init__.py -@@ -1014,6 +1014,13 @@ class Pillar(object): - mopts['file_roots'] = self.actual_file_roots - mopts['saltversion'] = __version__ - pillar['master'] = mopts -+ if 'pillar' in self.opts and self.opts.get('ssh_merge_pillar', False): -+ pillar = merge( -+ self.opts['pillar'], -+ pillar, -+ self.merge_strategy, -+ self.opts.get('renderer', 'yaml'), -+ self.opts.get('pillar_merge_lists', False)) - if errors: - for error in errors: - log.critical('Pillar render error: %s', error) --- -2.13.7 - - diff --git a/prepend-current-directory-when-path-is-just-filename.patch b/prepend-current-directory-when-path-is-just-filename.patch deleted file mode 100644 index 34ea013..0000000 --- a/prepend-current-directory-when-path-is-just-filename.patch +++ /dev/null @@ -1,38 +0,0 @@ -From 341ee0c44cabf2f34bdd2f4b54e4b83053a3133e Mon Sep 17 00:00:00 2001 -From: Mihai Dinca -Date: Thu, 23 Aug 2018 16:14:36 +0200 -Subject: [PATCH] Prepend current directory when path is just filename - (bsc#1095942) - ---- - salt/utils/parsers.py | 9 +++++++++ - 1 file changed, 9 insertions(+) - -diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py -index 5a415ab576..9a7f27ac11 100644 ---- a/salt/utils/parsers.py -+++ b/salt/utils/parsers.py -@@ -591,10 +591,19 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)): - ) - ) - -+ def _logfile_callback(option, opt, value, parser, *args, **kwargs): -+ if not os.path.dirname(value): -+ # if the path is only a file name (no parent directory), assume current directory -+ value = os.path.join(os.path.curdir, value) -+ setattr(parser.values, self._logfile_config_setting_name_, value) -+ - group.add_option( - '--log-file', - dest=self._logfile_config_setting_name_, - default=None, -+ action='callback', -+ type='string', -+ callback=_logfile_callback, - help='Log file path. Default: \'{0}\'.'.format( - self._default_logging_logfile_ - ) --- -2.19.0 - - diff --git a/prevent-zypper-from-parsing-repo-configuration-from-.patch b/prevent-zypper-from-parsing-repo-configuration-from-.patch deleted file mode 100644 index e511dc7..0000000 --- a/prevent-zypper-from-parsing-repo-configuration-from-.patch +++ /dev/null @@ -1,28 +0,0 @@ -From d282de5c59e27c17bd5afb207c4eeaa754993368 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= - -Date: Tue, 22 May 2018 12:04:48 +0100 -Subject: [PATCH] Prevent zypper from parsing repo configuration from not - .repo files - ---- - salt/modules/zypper.py | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py -index 06f8335c18..05ba3d86c9 100644 ---- a/salt/modules/zypper.py -+++ b/salt/modules/zypper.py -@@ -862,7 +862,7 @@ def _get_configured_repos(): - ''' - - repos_cfg = configparser.ConfigParser() -- repos_cfg.read([REPOS + '/' + fname for fname in os.listdir(REPOS)]) -+ repos_cfg.read([REPOS + '/' + fname for fname in os.listdir(REPOS) if fname.endswith(".repo")]) - - return repos_cfg - --- -2.13.7 - - diff --git a/remove-arch-from-name-when-pkg.list_pkgs-is-called-w.patch b/remove-arch-from-name-when-pkg.list_pkgs-is-called-w.patch index 83a65f6..a7ebb25 100644 --- a/remove-arch-from-name-when-pkg.list_pkgs-is-called-w.patch +++ b/remove-arch-from-name-when-pkg.list_pkgs-is-called-w.patch @@ -1,4 +1,4 @@ -From 6488d91acb6f470bfa2b66ac8100cb67d6367612 Mon Sep 17 00:00:00 2001 +From 51ccc41dd16564dea5b465d122218ca8047f9f3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Mon, 19 Nov 2018 11:46:26 +0000 @@ -38,19 +38,19 @@ Remove unnecessary lambda Return None instead empty string for arch and release in pkg.list_pkgs --- salt/modules/aptpkg.py | 38 ++++++++ - salt/modules/pkg_resource.py | 17 +++- + salt/modules/pkg_resource.py | 20 +++- salt/modules/yumpkg.py | 32 ++++++- - salt/modules/zypper.py | 29 +++++- + salt/modules/zypperpkg.py | 29 +++++- tests/unit/modules/test_pkg_resource.py | 116 ++++++++++++++++++++++++ tests/unit/modules/test_yumpkg.py | 85 ++++++++++++++++- - tests/unit/modules/test_zypper.py | 81 ++++++++++++++++- - 7 files changed, 382 insertions(+), 16 deletions(-) + tests/unit/modules/test_zypperpkg.py | 79 +++++++++++++++- + 7 files changed, 383 insertions(+), 16 deletions(-) diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py -index 42d606926f..1fd4883f2c 100644 +index 4a331444c9..f51b6958e5 100644 --- a/salt/modules/aptpkg.py +++ b/salt/modules/aptpkg.py -@@ -77,6 +77,7 @@ except ImportError: +@@ -73,6 +73,7 @@ except ImportError: # pylint: enable=import-error APT_LISTS_PATH = "/var/lib/apt/lists" @@ -58,7 +58,7 @@ index 42d606926f..1fd4883f2c 100644 # Source format for urllib fallback on PPA handling LP_SRC_FORMAT = 'deb http://ppa.launchpad.net/{0}/{1}/ubuntu {2} main' -@@ -218,6 +219,43 @@ def _warn_software_properties(repo): +@@ -185,6 +186,43 @@ def _warn_software_properties(repo): log.warning('Best guess at ppa format: %s', repo) @@ -103,16 +103,17 @@ index 42d606926f..1fd4883f2c 100644 ''' Return the latest version of the named package available for upgrade or diff --git a/salt/modules/pkg_resource.py b/salt/modules/pkg_resource.py -index 9b0a8287f5..0c872f1805 100644 +index 8b83f1cda5..0c872f1805 100644 --- a/salt/modules/pkg_resource.py +++ b/salt/modules/pkg_resource.py -@@ -311,22 +311,31 @@ def format_pkg_list(packages, versions_as_list, attr): +@@ -311,21 +311,31 @@ def format_pkg_list(packages, versions_as_list, attr): ''' ret = copy.deepcopy(packages) if attr: +- requested_attr = {'epoch', 'version', 'release', 'arch', 'install_date', 'install_date_time_t'} + ret_attr = {} - requested_attr = set(['epoch', 'version', 'release', 'arch', - 'install_date', 'install_date_time_t']) ++ requested_attr = set(['epoch', 'version', 'release', 'arch', ++ 'install_date', 'install_date_time_t']) if attr != 'all': - requested_attr &= set(attr + ['version']) @@ -143,10 +144,10 @@ index 9b0a8287f5..0c872f1805 100644 for name in ret: ret[name] = [format_version(d['epoch'], d['version'], d['release']) diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py -index 51832bf883..cf50d1a4c4 100644 +index a56a2e8366..4f26a41670 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py -@@ -65,6 +65,8 @@ log = logging.getLogger(__name__) +@@ -66,6 +66,8 @@ log = logging.getLogger(__name__) __HOLD_PATTERN = r'[\w+]+(?:[.-][^-]+)*' @@ -155,7 +156,7 @@ index 51832bf883..cf50d1a4c4 100644 # Define the module's virtual name __virtualname__ = 'pkg' -@@ -397,7 +399,7 @@ def normalize_name(name): +@@ -429,7 +431,7 @@ def normalize_name(name): salt '*' pkg.normalize_name zsh.x86_64 ''' try: @@ -164,7 +165,7 @@ index 51832bf883..cf50d1a4c4 100644 if arch not in salt.utils.pkg.rpm.ARCHES + ('noarch',): return name except ValueError: -@@ -408,6 +410,30 @@ def normalize_name(name): +@@ -440,6 +442,30 @@ def normalize_name(name): return name @@ -195,7 +196,7 @@ index 51832bf883..cf50d1a4c4 100644 def latest_version(*names, **kwargs): ''' Return the latest version of the named package available for upgrade or -@@ -647,8 +673,8 @@ def list_pkgs(versions_as_list=False, **kwargs): +@@ -676,8 +702,8 @@ def list_pkgs(versions_as_list=False, **kwargs): if pkginfo is not None: # see rpm version string rules available at https://goo.gl/UGKPNd pkgver = pkginfo.version @@ -206,11 +207,11 @@ index 51832bf883..cf50d1a4c4 100644 if ':' in pkgver: epoch, pkgver = pkgver.split(":", 1) if '-' in pkgver: -diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py -index 773354b2f3..ae66e4709d 100644 ---- a/salt/modules/zypper.py -+++ b/salt/modules/zypper.py -@@ -52,6 +52,7 @@ ZYPP_HOME = '/etc/zypp' +diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py +index 0c26e2214c..92e7052020 100644 +--- a/salt/modules/zypperpkg.py ++++ b/salt/modules/zypperpkg.py +@@ -53,6 +53,7 @@ ZYPP_HOME = '/etc/zypp' LOCKS = '{0}/locks'.format(ZYPP_HOME) REPOS = '{0}/repos.d'.format(ZYPP_HOME) DEFAULT_PRIORITY = 99 @@ -218,7 +219,7 @@ index 773354b2f3..ae66e4709d 100644 # Define the module's virtual name __virtualname__ = 'pkg' -@@ -588,6 +589,30 @@ def info_available(*names, **kwargs): +@@ -590,6 +591,30 @@ def info_available(*names, **kwargs): return ret @@ -249,8 +250,8 @@ index 773354b2f3..ae66e4709d 100644 def latest_version(*names, **kwargs): ''' Return the latest version of the named package available for upgrade or -@@ -756,8 +781,8 @@ def list_pkgs(versions_as_list=False, **kwargs): - if pkginfo is not None: +@@ -760,8 +785,8 @@ def list_pkgs(versions_as_list=False, **kwargs): + if pkginfo: # see rpm version string rules available at https://goo.gl/UGKPNd pkgver = pkginfo.version - epoch = '' @@ -261,7 +262,7 @@ index 773354b2f3..ae66e4709d 100644 epoch, pkgver = pkgver.split(":", 1) if '-' in pkgver: diff --git a/tests/unit/modules/test_pkg_resource.py b/tests/unit/modules/test_pkg_resource.py -index dd3ae9a1ac..2cfd6bb16a 100644 +index b6d90cc92c..a9ffe43cdd 100644 --- a/tests/unit/modules/test_pkg_resource.py +++ b/tests/unit/modules/test_pkg_resource.py @@ -129,6 +129,122 @@ class PkgresTestCase(TestCase, LoaderModuleMockMixin): @@ -388,18 +389,18 @@ index dd3ae9a1ac..2cfd6bb16a 100644 ''' Test to takes a dict of package name/version information diff --git a/tests/unit/modules/test_yumpkg.py b/tests/unit/modules/test_yumpkg.py -index c73f2582b9..324c2c8b66 100644 +index 6113d3a4b1..6019a8179e 100644 --- a/tests/unit/modules/test_yumpkg.py +++ b/tests/unit/modules/test_yumpkg.py -@@ -16,6 +16,7 @@ from tests.support.mock import ( - ) - +@@ -18,6 +18,7 @@ from tests.support.mock import ( # Import Salt libs + from salt.exceptions import CommandExecutionError + import salt.modules.rpm_lowpkg as rpm +from salt.ext import six import salt.modules.yumpkg as yumpkg import salt.modules.pkg_resource as pkg_resource -@@ -69,7 +70,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): +@@ -76,7 +77,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): 'os_family': 'RedHat', 'osmajorrelease': 7, }, @@ -409,7 +410,7 @@ index c73f2582b9..324c2c8b66 100644 } def test_list_pkgs(self): -@@ -100,7 +102,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): +@@ -107,7 +109,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): patch.dict(yumpkg.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \ patch.dict(yumpkg.__salt__, {'pkg_resource.add_pkg': _add_data}), \ patch.dict(yumpkg.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \ @@ -419,7 +420,7 @@ index c73f2582b9..324c2c8b66 100644 pkgs = yumpkg.list_pkgs(versions_as_list=True) for pkg_name, pkg_version in { 'python-urlgrabber': '3.10-8.el7', -@@ -147,7 +150,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): +@@ -154,7 +157,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): patch.dict(yumpkg.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \ patch.dict(yumpkg.__salt__, {'pkg_resource.add_pkg': _add_data}), \ patch.dict(yumpkg.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \ @@ -429,7 +430,7 @@ index c73f2582b9..324c2c8b66 100644 pkgs = yumpkg.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t']) for pkg_name, pkg_attr in { 'python-urlgrabber': { -@@ -155,54 +159,63 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): +@@ -162,54 +166,63 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): 'release': '8.el7', 'arch': 'noarch', 'install_date_time_t': 1487838471, @@ -493,7 +494,7 @@ index c73f2582b9..324c2c8b66 100644 }, 'shadow-utils': { 'epoch': '2', -@@ -216,22 +229,88 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): +@@ -223,22 +236,88 @@ class YumTestCase(TestCase, LoaderModuleMockMixin): 'release': '33.el7', 'arch': 'x86_64', 'install_date_time_t': 1487838484, @@ -582,10 +583,10 @@ index c73f2582b9..324c2c8b66 100644 def test_latest_version_with_options(self): with patch.object(yumpkg, 'list_pkgs', MagicMock(return_value={})): -diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py -index 424438c8bf..a60e209b2c 100644 ---- a/tests/unit/modules/test_zypper.py -+++ b/tests/unit/modules/test_zypper.py +diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py +index 3259e1810d..f586c23fd0 100644 +--- a/tests/unit/modules/test_zypperpkg.py ++++ b/tests/unit/modules/test_zypperpkg.py @@ -61,7 +61,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): ''' @@ -595,7 +596,7 @@ index 424438c8bf..a60e209b2c 100644 def setUp(self): self.new_repo_config = dict( -@@ -603,7 +603,8 @@ Repository 'DUMMY' not found by its alias, number, or URI. +@@ -605,7 +605,8 @@ Repository 'DUMMY' not found by its alias, number, or URI. patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \ patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \ patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \ @@ -605,7 +606,7 @@ index 424438c8bf..a60e209b2c 100644 pkgs = zypper.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t']) self.assertFalse(pkgs.get('gpg-pubkey', False)) for pkg_name, pkg_attr in { -@@ -612,58 +613,130 @@ Repository 'DUMMY' not found by its alias, number, or URI. +@@ -614,58 +615,130 @@ Repository 'DUMMY' not found by its alias, number, or URI. 'release': '129.686', 'arch': 'noarch', 'install_date_time_t': 1498636511, @@ -654,8 +655,7 @@ index 424438c8bf..a60e209b2c 100644 'install_date_time_t': 1503572639, + 'epoch': None, }], -- 'perseus-dummy.i586': [{ -+ 'perseus-dummy': [{ + 'perseus-dummy.i586': [{ 'version': '1.1', 'release': '1.1', 'arch': 'i586', @@ -688,8 +688,8 @@ index 424438c8bf..a60e209b2c 100644 + 'virt-what_|-1.10_|-2.el7_|-x86_64_|-_|-1387838486', + ] + -+ with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \ -+ patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \ ++ with patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \ ++ patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \ + patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \ + patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \ + patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}), \ @@ -739,6 +739,6 @@ index 424438c8bf..a60e209b2c 100644 ''' Test advisory patches listing. -- -2.17.1 +2.20.1 diff --git a/remove-old-hack-when-reporting-multiversion-packages.patch b/remove-old-hack-when-reporting-multiversion-packages.patch deleted file mode 100644 index 7e3e5fc..0000000 --- a/remove-old-hack-when-reporting-multiversion-packages.patch +++ /dev/null @@ -1,51 +0,0 @@ -From 11186ce52ae42967c49a6e238659a566e488a6b4 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= - -Date: Mon, 23 Jul 2018 16:32:26 +0100 -Subject: [PATCH] Remove old hack when reporting multiversion packages - -Fix unit tests for zypper pkg.upgrade ---- - salt/modules/zypper.py | 5 ----- - tests/unit/modules/test_zypper.py | 8 +++++++- - 2 files changed, 7 insertions(+), 6 deletions(-) - -diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py -index 4689f84926..695bce4f4e 100644 ---- a/salt/modules/zypper.py -+++ b/salt/modules/zypper.py -@@ -1480,11 +1480,6 @@ def upgrade(refresh=True, - __zypper__(systemd_scope=_systemd_scope()).noraise.call(*cmd_update) - _clean_cache() - new = list_pkgs() -- -- # Handle packages which report multiple new versions -- # (affects only kernel packages at this point) -- for pkg in new: -- new[pkg] = new[pkg].split(',')[-1] - ret = salt.utils.data.compare_dicts(old, new) - - if __zypper__.exit_code not in __zypper__.SUCCESS_EXIT_CODES: -diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py -index bb15aca11a..424438c8bf 100644 ---- a/tests/unit/modules/test_zypper.py -+++ b/tests/unit/modules/test_zypper.py -@@ -429,7 +429,13 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): - zypper_mock.assert_any_call('update', '--auto-agree-with-licenses') - - with patch('salt.modules.zypper.list_pkgs', -- MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1,1.2"}])): -+ MagicMock(side_effect=[{"kernel-default": "1.1"}, {"kernel-default": "1.1,1.2"}])): -+ ret = zypper.upgrade() -+ self.assertDictEqual(ret, {"kernel-default": {"old": "1.1", "new": "1.1,1.2"}}) -+ zypper_mock.assert_any_call('update', '--auto-agree-with-licenses') -+ -+ with patch('salt.modules.zypper.list_pkgs', -+ MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}])): - ret = zypper.upgrade() - self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}}) - zypper_mock.assert_any_call('update', '--auto-agree-with-licenses') --- -2.17.1 - - diff --git a/retire-md5-checksum-for-pkg-mgmt-plugins.patch b/retire-md5-checksum-for-pkg-mgmt-plugins.patch deleted file mode 100644 index 33f009e..0000000 --- a/retire-md5-checksum-for-pkg-mgmt-plugins.patch +++ /dev/null @@ -1,43 +0,0 @@ -From 0908344fae3edda3372ee03820ea30ebcfe8980e Mon Sep 17 00:00:00 2001 -From: Bo Maryniuk -Date: Thu, 13 Sep 2018 12:00:55 +0200 -Subject: [PATCH] Retire MD5 checksum for pkg mgmt plugins - -Use SHA256 algorithm for zyppnotify plugin - -Remove an empty line ---- - scripts/suse/yum/plugins/yumnotify.py | 2 +- - scripts/suse/zypper/plugins/commit/zyppnotify | 2 +- - 2 files changed, 2 insertions(+), 2 deletions(-) - -diff --git a/scripts/suse/yum/plugins/yumnotify.py b/scripts/suse/yum/plugins/yumnotify.py -index 268e1e9531..dd2485c886 100644 ---- a/scripts/suse/yum/plugins/yumnotify.py -+++ b/scripts/suse/yum/plugins/yumnotify.py -@@ -32,7 +32,7 @@ def _get_checksum(): - Returns: - hexdigest - """ -- digest = hashlib.md5() -+ digest = hashlib.sha256() - with open(RPM_PATH, "rb") as rpm_db_fh: - while True: - buff = rpm_db_fh.read(0x1000) -diff --git a/scripts/suse/zypper/plugins/commit/zyppnotify b/scripts/suse/zypper/plugins/commit/zyppnotify -index 268298b108..b64badb119 100755 ---- a/scripts/suse/zypper/plugins/commit/zyppnotify -+++ b/scripts/suse/zypper/plugins/commit/zyppnotify -@@ -35,7 +35,7 @@ class DriftDetector(Plugin): - Returns: - hexdigest - ''' -- digest = hashlib.md5() -+ digest = hashlib.sha256() - with open(self.rpm_path, "rb") as rpm_db_fh: - while True: - buff = rpm_db_fh.read(0x1000) --- -2.20.1 - - diff --git a/salt.changes b/salt.changes index d61a0cf..00f5cae 100644 --- a/salt.changes +++ b/salt.changes @@ -1,3 +1,102 @@ +------------------------------------------------------------------- +Thu Feb 28 16:18:38 UTC 2019 - Jochen Breuer + +- No longer limiting Python3 version to <3.7 + +------------------------------------------------------------------- +Thu Feb 28 08:24:16 UTC 2019 - Jochen Breuer + +- Async batch implementation + +- Added: + * async-batch-implementation.patch + +------------------------------------------------------------------- +Wed Feb 27 14:28:55 UTC 2019 - jbreuer@suse.de + +- Update to Salt 2019.2.0 release + For further information see: + https://docs.saltstack.com/en/latest/topics/releases/2019.2.0.html + +- Added: + * add-virt.all_capabilities.patch + * add-virt.volume_infos-and-virt.volume_delete.patch + * don-t-call-zypper-with-more-than-one-no-refresh.patch + * include-aliases-in-the-fqdns-grains.patch + * temporary-fix-extend-the-whitelist-of-allowed-comman.patch + +- Removed: + * accounting-for-when-files-in-an-archive-contain-non-.patch + * add-engine-relaying-libvirt-events.patch + * add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch + * add-support-for-python-3.7.patch + * align-suse-salt-master.service-limitnofiles-limit-wi.patch + * avoid-incomprehensive-message-if-crashes.patch + * change-stringio-import-in-python2-to-import-the-clas.patch + * decode-file-contents-for-python2-bsc-1102013.patch + * do-not-override-jid-on-returners-only-sending-back-t.patch + * don-t-error-on-retcode-0-in-libcrypto.openssl_init_c.patch + * feat-add-grain-for-all-fqdns.patch + * fix-async-call-to-process-manager.patch + * fix-decrease-loglevel-when-unable-to-resolve-addr.patch + * fix-deprecation-warning-bsc-1095507.patch + * fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch + * fix-for-ec2-rate-limit-failures.patch + * fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch + * fix-for-sorting-of-multi-version-packages-bsc-109717.patch + * fix-index-error-when-running-on-python-3.patch + * fix-latin1-encoding-problems-on-file-module-bsc-1116.patch + * fix-mine.get-not-returning-data-workaround-for-48020.patch + * fix-unboundlocalerror-in-file.get_diff.patch + * fixed-usage-of-ipaddress.patch + * fixing-issue-when-a-valid-token-is-generated-even-wh.patch + * get-os_family-for-rpm-distros-from-the-rpm-macros.-u.patch + * improved-handling-of-ldap-group-id.patch + * only-do-reverse-dns-lookup-on-ips-for-salt-ssh.patch + * option-to-merge-current-pillar-with-opts-pillar-duri.patch + * prepend-current-directory-when-path-is-just-filename.patch + * prevent-zypper-from-parsing-repo-configuration-from-.patch + * remove-old-hack-when-reporting-multiversion-packages.patch + * retire-md5-checksum-for-pkg-mgmt-plugins.patch + * show-recommendations-for-salt-ssh-cross-version-pyth.patch + * strip-trailing-commas-on-linux-user-gecos-fields.patch + * support-use-of-gce-instance-credentials-109.patch + * update-error-list-for-zypper.patch + * x509-fixes-for-remote-signing-106.patch + +- Modified: + * add-all_versions-parameter-to-include-all-installed-.patch + * add-cpe_name-for-osversion-grain-parsing-u-49946.patch + * add-environment-variable-to-know-if-yum-is-invoked-f.patch + * add-hold-unhold-functions.patch + * add-saltssh-multi-version-support-across-python-inte.patch + * azurefs-gracefully-handle-attributeerror.patch + * bugfix-any-unicode-string-of-length-16-will-raise-ty.patch + * debian-info_installed-compatibility-50453.patch + * do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch + * fall-back-to-pymysql.patch + * fix-for-suse-expanded-support-detection.patch + * fix-git_pillar-merging-across-multiple-__env__-repos.patch + * fix-ipv6-scope-bsc-1108557.patch + * fix-issue-2068-test.patch + * fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch + * fixes-cve-2018-15750-cve-2018-15751.patch + * get-os_arch-also-without-rpm-package-installed.patch + * integration-of-msi-authentication-with-azurearm-clou.patch + * loosen-azure-sdk-dependencies-in-azurearm-cloud-driv.patch + * remove-arch-from-name-when-pkg.list_pkgs-is-called-w.patch + * use-adler32-algorithm-to-compute-string-checksums.patch + * x509-fixes-111.patch + * zypper-add-root-configuration-parameter.patch + +------------------------------------------------------------------- +Wed Jan 23 15:25:29 UTC 2019 - bo@suse.de + +- Add root parameter to Zypper module + +- Added: + * zypper-add-root-configuration-parameter.patch + ------------------------------------------------------------------- Wed Jan 16 16:28:09 UTC 2019 - psuarezhernandez@suse.com diff --git a/salt.spec b/salt.spec index a937c00..743737d 100644 --- a/salt.spec +++ b/salt.spec @@ -52,13 +52,13 @@ %bcond_with builddocs Name: salt -Version: 2018.3.2 +Version: 2019.2.0 Release: 0 Summary: A parallel remote execution system License: Apache-2.0 Group: System/Management Url: http://saltstack.org/ -Source: https://github.com/saltstack/salt/archive/v%{version}.tar.gz +Source: v%{version}.tar.gz Source1: README.SUSE Source2: salt-tmpfiles.d Source3: html.tar.bz2 @@ -69,156 +69,94 @@ Patch1: run-salt-master-as-dedicated-salt-user.patch Patch2: run-salt-api-as-user-salt-bsc-1064520.patch Patch3: activate-all-beacons-sources-config-pillar-grains.patch Patch4: avoid-excessive-syslogging-by-watchdog-cronjob-58.patch -Patch5: feat-add-grain-for-all-fqdns.patch -Patch6: fix-bsc-1065792.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46575 -Patch7: fix-decrease-loglevel-when-unable-to-resolve-addr.patch +Patch5: fix-bsc-1065792.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46684 -Patch8: add-saltssh-multi-version-support-across-python-inte.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46635 -Patch9: fix-for-errno-0-resolver-error-0-no-error-bsc-108758.patch +Patch6: add-saltssh-multi-version-support-across-python-inte.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46890 -Patch10: fall-back-to-pymysql.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47149 -Patch11: strip-trailing-commas-on-linux-user-gecos-fields.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47232 -Patch12: fixed-usage-of-ipaddress.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47504 -Patch13: option-to-merge-current-pillar-with-opts-pillar-duri.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47471 -Patch14: do-not-override-jid-on-returners-only-sending-back-t.patch +Patch7: fall-back-to-pymysql.patch # PATCH-FIX_OPENSUSE bsc#1091371 -Patch15: enable-passing-a-unix_socket-for-mysql-returners-bsc.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47211 -Patch16: fix-for-ec2-rate-limit-failures.patch +Patch8: enable-passing-a-unix_socket-for-mysql-returners-bsc.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47638 -Patch17: add-all_versions-parameter-to-include-all-installed-.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47765 -Patch18: prevent-zypper-from-parsing-repo-configuration-from-.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47149 -Patch19: add-other-attribute-to-gecos-fields-to-avoid-inconsi.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47908 -Patch20: align-suse-salt-master.service-limitnofiles-limit-wi.patch -# PATCH-FIX_OPENSUSE bsc#1095507 -Patch21: fix-deprecation-warning-bsc-1095507.patch +Patch9: add-all_versions-parameter-to-include-all-installed-.patch # PATCH-FIX_OPENSUSE bsc#1057635 -Patch22: add-environment-variable-to-know-if-yum-is-invoked-f.patch +Patch10: add-environment-variable-to-know-if-yum-is-invoked-f.patch # PATCH-FIX_OPENSUSE -Patch23: add-custom-suse-capabilities-as-grains.patch -# PATCH-FIX_OPENSUSE bsc#1098394 https://github.com/saltstack/salt/pull/47061 -Patch24: fix-diffing-binary-files-in-file.get_diff-bsc-109839.patch -# PATCH-FIX_OPENSUSE bsc#1072599 -Patch25: show-recommendations-for-salt-ssh-cross-version-pyth.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47405 -Patch26: fix-unboundlocalerror-in-file.get_diff.patch +Patch11: add-custom-suse-capabilities-as-grains.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48294 -Patch27: fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47572 -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48015 -Patch28: accounting-for-when-files-in-an-archive-contain-non-.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48712 -Patch29: remove-old-hack-when-reporting-multiversion-packages.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46461 -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46928 -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46957 -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/47243 -Patch30: add-engine-relaying-libvirt-events.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48781 -Patch31: avoid-incomprehensive-message-if-crashes.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48765 -Patch32: fix-mine.get-not-returning-data-workaround-for-48020.patch -# PATCH-FIX_OPENSUSE bsc#1097174 and bsc#1097413 -Patch33: fix-for-sorting-of-multi-version-packages-bsc-109717.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48863 -Patch34: decode-file-contents-for-python2-bsc-1102013.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49052 -Patch35: add-support-for-python-3.7.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48771 -Patch36: only-do-reverse-dns-lookup-on-ips-for-salt-ssh.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49277 -Patch37: prepend-current-directory-when-path-is-just-filename.patch +Patch12: fix-zypper.list_pkgs-to-be-aligned-with-pkg-state.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49063 -Patch38: integration-of-msi-authentication-with-azurearm-clou.patch +Patch13: integration-of-msi-authentication-with-azurearm-clou.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49538 -Patch39: fix-for-suse-expanded-support-detection.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49508 -Patch40: x509-fixes-for-remote-signing-106.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49555 -Patch41: change-stringio-import-in-python2-to-import-the-clas.patch +Patch14: fix-for-suse-expanded-support-detection.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48812 -Patch42: use-adler32-algorithm-to-compute-string-checksums.patch +Patch15: use-adler32-algorithm-to-compute-string-checksums.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49497 -Patch43: x509-fixes-111.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49605 -Patch44: support-use-of-gce-instance-credentials-109.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49722 -Patch45: fix-index-error-when-running-on-python-3.patch +Patch16: x509-fixes-111.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49696 -Patch46: loosen-azure-sdk-dependencies-in-azurearm-cloud-driv.patch +Patch17: loosen-azure-sdk-dependencies-in-azurearm-cloud-driv.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49737 -Patch47: do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49806 -Patch48: update-error-list-for-zypper.patch +Patch18: do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49815 -Patch49: fix-ipv6-scope-bsc-1108557.patch +Patch19: fix-ipv6-scope-bsc-1108557.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49480 -Patch50: early-feature-support-config.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49085 -Patch51: fix-async-call-to-process-manager.patch +Patch20: early-feature-support-config.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49908 -Patch52: bugfix-any-unicode-string-of-length-16-will-raise-ty.patch +Patch21: bugfix-any-unicode-string-of-length-16-will-raise-ty.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49936 -Patch53: make-profiles-a-package.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49930 -Patch54: get-os_family-for-rpm-distros-from-the-rpm-macros.-u.patch +Patch22: make-profiles-a-package.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49946 -Patch55: add-cpe_name-for-osversion-grain-parsing-u-49946.patch +Patch23: add-cpe_name-for-osversion-grain-parsing-u-49946.patch # PATCH-FIX_OPENSUSE: Fix unit test for grains core -Patch56: fix-unit-test-for-grains-core.patch +Patch24: fix-unit-test-for-grains-core.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50049 # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50072 -Patch57: preserving-signature-in-module.run-state-u-50049.patch +Patch25: preserving-signature-in-module.run-state-u-50049.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50095 -Patch58: support-config-non-root-permission-issues-fixes-u-50.patch +Patch26: support-config-non-root-permission-issues-fixes-u-50.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50018 -Patch59: add-multi-file-support-and-globbing-to-the-filetree-.patch +Patch27: add-multi-file-support-and-globbing-to-the-filetree-.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49761 # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50201 -Patch60: fixes-cve-2018-15750-cve-2018-15751.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48491 -Patch61: improved-handling-of-ldap-group-id.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48901 -Patch62: fixing-issue-when-a-valid-token-is-generated-even-wh.patch +Patch28: fixes-cve-2018-15750-cve-2018-15751.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50417 -Patch63: fix-git_pillar-merging-across-multiple-__env__-repos.patch +Patch29: fix-git_pillar-merging-across-multiple-__env__-repos.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50523 -Patch64: get-os_arch-also-without-rpm-package-installed.patch +Patch30: get-os_arch-also-without-rpm-package-installed.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50392 -Patch65: make-aptpkg.list_repos-compatible-on-enabled-disable.patch +Patch31: make-aptpkg.list_repos-compatible-on-enabled-disable.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50453 -Patch66: debian-info_installed-compatibility-50453.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48580 -Patch67: don-t-error-on-retcode-0-in-libcrypto.openssl_init_c.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48503 -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48934 -Patch68: fix-latin1-encoding-problems-on-file-module-bsc-1116.patch +Patch32: debian-info_installed-compatibility-50453.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50742 -Patch69: decide-if-the-source-should-be-actually-skipped.patch +Patch33: decide-if-the-source-should-be-actually-skipped.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50773 -Patch70: add-hold-unhold-functions.patch -# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49639 -Patch71: retire-md5-checksum-for-pkg-mgmt-plugins.patch +Patch34: add-hold-unhold-functions.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50401 # NOTE: This is a techpreview as well as in Fluorine! Release only in Neon. -Patch72: add-supportconfig-module-for-remote-calls-and-saltss.patch +Patch35: add-supportconfig-module-for-remote-calls-and-saltss.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50567 -Patch73: azurefs-gracefully-handle-attributeerror.patch +Patch36: azurefs-gracefully-handle-attributeerror.patch # PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/116 -Patch74: return-the-expected-powerpc-os-arch-bsc-1117995.patch +Patch37: return-the-expected-powerpc-os-arch-bsc-1117995.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51108 -Patch75: remove-arch-from-name-when-pkg.list_pkgs-is-called-w.patch +Patch38: remove-arch-from-name-when-pkg.list_pkgs-is-called-w.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51119 -Patch76: fix-issue-2068-test.patch +Patch39: fix-issue-2068-test.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50125 +Patch40: zypper-add-root-configuration-parameter.patch +# PATCH_FIX_OPENSUSE: Temporary fix allowing "id_" and "force" params while upstrem figures it out +Patch41: temporary-fix-extend-the-whitelist-of-allowed-comman.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51382 +Patch42: don-t-call-zypper-with-more-than-one-no-refresh.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50109 +# PATCH_FIX_OPENSUSE https://github.com/openSUSE/salt/pull/121 +Patch43: add-virt.all_capabilities.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51691 +Patch44: add-virt.volume_infos-and-virt.volume_delete.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51384 +Patch45: include-aliases-in-the-fqdns-grains.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50546 +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51863 +Patch46: async-batch-implementation.patch # BuildRoot: %{_tmppath}/%{name}-%{version}-build BuildRoot: %{_tmppath}/%{name}-%{version}-build @@ -674,7 +612,7 @@ Zsh command line completion support for %{name}. %prep # %setup -q -n salt-%{version} -%setup -q -n salt-%{version} +%setup -q -n salt-2019.2.0-suse cp %{S:1} . cp %{S:5} ./.travis.yml %patch1 -p1 @@ -723,36 +661,6 @@ cp %{S:5} ./.travis.yml %patch44 -p1 %patch45 -p1 %patch46 -p1 -%patch47 -p1 -%patch48 -p1 -%patch49 -p1 -%patch50 -p1 -%patch51 -p1 -%patch52 -p1 -%patch53 -p1 -%patch54 -p1 -%patch55 -p1 -%patch56 -p1 -%patch57 -p1 -%patch58 -p1 -%patch59 -p1 -%patch60 -p1 -%patch61 -p1 -%patch62 -p1 -%patch63 -p1 -%patch64 -p1 -%patch65 -p1 -%patch66 -p1 -%patch67 -p1 -%patch68 -p1 -%patch69 -p1 -%patch70 -p1 -%patch71 -p1 -%patch72 -p1 -%patch73 -p1 -%patch74 -p1 -%patch75 -p1 -%patch76 -p1 %build %if 0%{?build_py2} diff --git a/show-recommendations-for-salt-ssh-cross-version-pyth.patch b/show-recommendations-for-salt-ssh-cross-version-pyth.patch deleted file mode 100644 index 9d42956..0000000 --- a/show-recommendations-for-salt-ssh-cross-version-pyth.patch +++ /dev/null @@ -1,63 +0,0 @@ -From 15e97fd2916176fe850850fe90983ac95a1f8e7b Mon Sep 17 00:00:00 2001 -From: Erik Johnson -Date: Mon, 11 Jun 2018 14:46:58 -0500 -Subject: [PATCH] Show recommendations for salt-ssh cross-version python - errors - -This shows more accurate information on how to resolve version issues -(e.g. master only has Salt deps installed for Python 3 but remote host -has no Python 3 installed). - -Use parenthesis for line continuation ---- - salt/client/ssh/__init__.py | 26 +++++++++++++++++++++++++- - 1 file changed, 25 insertions(+), 1 deletion(-) - -diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py -index f1300b5698..8a85cc2480 100644 ---- a/salt/client/ssh/__init__.py -+++ b/salt/client/ssh/__init__.py -@@ -1387,6 +1387,30 @@ ARGS = {arguments}\n'''.format(config=self.minion_config, - perm_error_fmt = 'Permissions problem, target user may need '\ - 'to be root or use sudo:\n {0}' - -+ def _version_mismatch_error(): -+ messages = { -+ 2: { -+ 6: 'Install Python 2.7 / Python 3 Salt dependencies on the Salt SSH master \n' -+ 'to interact with Python 2.7 / Python 3 targets', -+ 7: 'Install Python 2.6 / Python 3 Salt dependencies on the Salt SSH master \n' -+ 'to interact with Python 2.6 / Python 3 targets', -+ }, -+ 3: { -+ 'default': '- Install Python 2.6/2.7 Salt dependencies on the Salt SSH \n' -+ ' master to interact with Python 2.6/2.7 targets\n' -+ '- Install Python 3 on the target machine(s)', -+ }, -+ 'default': 'Matching major/minor Python release (>=2.6) needed both on the Salt SSH \n' -+ 'master and target machine', -+ } -+ major, minor = sys.version_info[:2] -+ help_msg = ( -+ messages.get(major, {}).get(minor) -+ or messages.get(major, {}).get('default') -+ or messages['default'] -+ ) -+ return 'Python version error. Recommendation(s) follow:\n' + help_msg -+ - errors = [ - ( - (), -@@ -1396,7 +1420,7 @@ ARGS = {arguments}\n'''.format(config=self.minion_config, - ( - (salt.defaults.exitcodes.EX_THIN_PYTHON_INVALID,), - 'Python interpreter is too old', -- 'salt requires python 2.6 or newer on target hosts, must have same major version as origin host' -+ _version_mismatch_error() - ), - ( - (salt.defaults.exitcodes.EX_THIN_CHECKSUM,), --- -2.13.7 - - diff --git a/strip-trailing-commas-on-linux-user-gecos-fields.patch b/strip-trailing-commas-on-linux-user-gecos-fields.patch deleted file mode 100644 index 150c8b4..0000000 --- a/strip-trailing-commas-on-linux-user-gecos-fields.patch +++ /dev/null @@ -1,55 +0,0 @@ -From f515f99ee42ffaba30cee2e1941a7e9af9db7453 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= - -Date: Wed, 18 Apr 2018 12:05:35 +0100 -Subject: [PATCH] Strip trailing commas on Linux user GECOS fields - -Add unit tests for GECOS fields ---- - salt/modules/useradd.py | 2 +- - tests/unit/modules/test_useradd.py | 18 ++++++++++++++++++ - 2 files changed, 19 insertions(+), 1 deletion(-) - -diff --git a/salt/modules/useradd.py b/salt/modules/useradd.py -index 545fe2a6f1..a61ba0e960 100644 ---- a/salt/modules/useradd.py -+++ b/salt/modules/useradd.py -@@ -81,7 +81,7 @@ def _build_gecos(gecos_dict): - return '{0},{1},{2},{3}'.format(gecos_dict.get('fullname', ''), - gecos_dict.get('roomnumber', ''), - gecos_dict.get('workphone', ''), -- gecos_dict.get('homephone', '')) -+ gecos_dict.get('homephone', '')).rstrip(',') - - - def _update_gecos(name, key, value, root=None): -diff --git a/tests/unit/modules/test_useradd.py b/tests/unit/modules/test_useradd.py -index eb983685bb..fa30a0df71 100644 ---- a/tests/unit/modules/test_useradd.py -+++ b/tests/unit/modules/test_useradd.py -@@ -393,3 +393,21 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin): - mock = MagicMock(side_effect=[{'name': ''}, False, {'name': ''}]) - with patch.object(useradd, 'info', mock): - self.assertFalse(useradd.rename('salt', 'salt')) -+ -+ def test_build_gecos_field(self): -+ ''' -+ Test if gecos fields are built correctly (removing trailing commas) -+ ''' -+ test_gecos = {'fullname': 'Testing', -+ 'roomnumber': 1234, -+ 'workphone': 22222, -+ 'homephone': 99999} -+ expected_gecos_fields = 'Testing,1234,22222,99999' -+ self.assertEqual(useradd._build_gecos(test_gecos), expected_gecos_fields) -+ test_gecos.pop('roomnumber') -+ test_gecos.pop('workphone') -+ expected_gecos_fields = 'Testing,,,99999' -+ self.assertEqual(useradd._build_gecos(test_gecos), expected_gecos_fields) -+ test_gecos.pop('homephone') -+ expected_gecos_fields = 'Testing' -+ self.assertEqual(useradd._build_gecos(test_gecos), expected_gecos_fields) --- -2.13.7 - - diff --git a/support-use-of-gce-instance-credentials-109.patch b/support-use-of-gce-instance-credentials-109.patch deleted file mode 100644 index 04e80eb..0000000 --- a/support-use-of-gce-instance-credentials-109.patch +++ /dev/null @@ -1,33 +0,0 @@ -From 4571116a54ff51683cb695ce795f04f8b318b440 Mon Sep 17 00:00:00 2001 -From: jgleissner -Date: Wed, 19 Sep 2018 14:37:12 +0200 -Subject: [PATCH] Support use of GCE instance credentials (#109) - -* Integration of MSI authentication with azurearm cloud driver (#105) - -* allow empty service_account_private_key in GCE driver - -Passing an emoty service_account_private_key to libcloud will enable -authentication using instance credentials, which is used by CaaSP in GCE. ---- - salt/cloud/clouds/gce.py | 3 ++- - 1 file changed, 2 insertions(+), 1 deletion(-) - -diff --git a/salt/cloud/clouds/gce.py b/salt/cloud/clouds/gce.py -index 75109491be..1018e36ed5 100644 ---- a/salt/cloud/clouds/gce.py -+++ b/salt/cloud/clouds/gce.py -@@ -134,7 +134,8 @@ def __virtual__(): - - parameters = details['gce'] - pathname = os.path.expanduser(parameters['service_account_private_key']) -- if salt.utils.cloud.check_key_path_and_mode( -+ # empty pathname will tell libcloud to use instance credentials -+ if pathname and salt.utils.cloud.check_key_path_and_mode( - provider, pathname - ) is False: - return False --- -2.17.1 - - diff --git a/temporary-fix-extend-the-whitelist-of-allowed-comman.patch b/temporary-fix-extend-the-whitelist-of-allowed-comman.patch new file mode 100644 index 0000000..ab06a9c --- /dev/null +++ b/temporary-fix-extend-the-whitelist-of-allowed-comman.patch @@ -0,0 +1,26 @@ +From c9c50ab75b4a8a73f57e8c2eeaa24401409e8c3c Mon Sep 17 00:00:00 2001 +From: Bo Maryniuk +Date: Thu, 24 Jan 2019 18:12:35 +0100 +Subject: [PATCH] temporary fix: extend the whitelist of allowed commands + +--- + salt/auth/__init__.py | 2 ++ + 1 file changed, 2 insertions(+) + +diff --git a/salt/auth/__init__.py b/salt/auth/__init__.py +index ca7168d00e..aa4c5c3670 100644 +--- a/salt/auth/__init__.py ++++ b/salt/auth/__init__.py +@@ -46,6 +46,8 @@ AUTH_INTERNAL_KEYWORDS = frozenset([ + 'gather_job_timeout', + 'kwarg', + 'match', ++ "id_", ++ "force", + 'metadata', + 'print_event', + 'raw', +-- +2.20.1 + + diff --git a/update-error-list-for-zypper.patch b/update-error-list-for-zypper.patch deleted file mode 100644 index 3987c08..0000000 --- a/update-error-list-for-zypper.patch +++ /dev/null @@ -1,62 +0,0 @@ -From 71e7ecfbb07cf14680a2a39de48a6e60cd20cb07 Mon Sep 17 00:00:00 2001 -From: Bo Maryniuk -Date: Wed, 26 Sep 2018 17:54:53 +0200 -Subject: [PATCH] Update error list for zypper - -Add error logging ---- - salt/modules/zypper.py | 30 ++++++++++++++++++++++++++++-- - 1 file changed, 28 insertions(+), 2 deletions(-) - -diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py -index e4423cf1fc..6845e44ab6 100644 ---- a/salt/modules/zypper.py -+++ b/salt/modules/zypper.py -@@ -75,7 +75,25 @@ class _Zypper(object): - Allows serial zypper calls (first came, first won). - ''' - -- SUCCESS_EXIT_CODES = [0, 100, 101, 102, 103] -+ SUCCESS_EXIT_CODES = { -+ 0: 'Successful run of zypper with no special info.', -+ 100: 'Patches are available for installation.', -+ 101: 'Security patches are available for installation.', -+ 102: 'Installation successful, reboot required.', -+ 103: 'Installation succesful, restart of the package manager itself required.', -+ } -+ -+ WARNING_EXIT_CODES = { -+ 6: 'No repositories are defined.', -+ 7: 'The ZYPP library is locked.', -+ 106: 'Some repository had to be disabled temporarily because it failed to refresh. ' -+ 'You should check your repository configuration (e.g. zypper ref -f).', -+ 107: 'Installation basically succeeded, but some of the packages %post install scripts returned an error. ' -+ 'These packages were successfully unpacked to disk and are registered in the rpm database, ' -+ 'but due to the failed install script they may not work as expected. The failed scripts output might ' -+ 'reveal what actually went wrong. Any scripts output is also logged to /var/log/zypp/history.' -+ } -+ - LOCK_EXIT_CODE = 7 - XML_DIRECTIVES = ['-x', '--xmlout'] - ZYPPER_LOCK = '/var/run/zypp.pid' -@@ -188,7 +206,15 @@ class _Zypper(object): - - :return: - ''' -- return self.exit_code not in self.SUCCESS_EXIT_CODES -+ if self.exit_code: -+ msg = self.SUCCESS_EXIT_CODES.get(self.exit_code) -+ if msg: -+ log.info(msg) -+ msg = self.WARNING_EXIT_CODES.get(self.exit_code) -+ if msg: -+ log.warning(msg) -+ -+ return self.exit_code not in self.SUCCESS_EXIT_CODES and self.exit_code not in self.WARNING_EXIT_CODES - - def _is_lock(self): - ''' --- -2.19.0 - - diff --git a/use-adler32-algorithm-to-compute-string-checksums.patch b/use-adler32-algorithm-to-compute-string-checksums.patch index 8ff8ef6..a916c36 100644 --- a/use-adler32-algorithm-to-compute-string-checksums.patch +++ b/use-adler32-algorithm-to-compute-string-checksums.patch @@ -1,4 +1,4 @@ -From 1cb2d2bc6c1cf1a39e735120c184d6ade9e64c34 Mon Sep 17 00:00:00 2001 +From 9d09fcb60b8babd415af76812c93d38b6cbce661 Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Sat, 28 Jul 2018 22:59:04 +0200 Subject: [PATCH] Use Adler32 algorithm to compute string checksums @@ -15,24 +15,18 @@ Choose CRC method, default to faster but less reliable "adler32", if crc is in u Add warning for Sodium. --- - salt/config/__init__.py | 13 +++++++++- - salt/grains/core.py | 54 +++++++++++++++++++++++++++-------------- - 2 files changed, 48 insertions(+), 19 deletions(-) + salt/config/__init__.py | 7 +++++- + salt/grains/core.py | 53 +++++++++++++++++++++++++++-------------- + 2 files changed, 41 insertions(+), 19 deletions(-) diff --git a/salt/config/__init__.py b/salt/config/__init__.py -index feda0abac1..59df7e1cba 100644 +index 6b74b90ce0..5d0c18b5d1 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py -@@ -1186,6 +1186,16 @@ VALID_OPTS = { +@@ -1212,6 +1212,10 @@ VALID_OPTS = { - # Enable calling ssh minions from the salt master - 'enable_ssh_minions': bool, -+ -+ # Thorium saltenv -+ 'thoriumenv': (type(None), six.string_types), -+ -+ # Thorium top file location -+ 'thorium_top': six.string_types, + # Thorium top file location + 'thorium_top': six.string_types, + + # Use Adler32 hashing algorithm for server_id (default False until Sodium, "adler32" after) + # Possible values are: False, adler32, crc32 @@ -40,7 +34,7 @@ index feda0abac1..59df7e1cba 100644 } # default configurations -@@ -1486,7 +1496,8 @@ DEFAULT_MINION_OPTS = { +@@ -1520,7 +1524,8 @@ DEFAULT_MINION_OPTS = { }, 'discovery': False, 'schedule': {}, @@ -51,7 +45,7 @@ index feda0abac1..59df7e1cba 100644 DEFAULT_MASTER_OPTS = { diff --git a/salt/grains/core.py b/salt/grains/core.py -index a5c3a6a8cf..6aaf38096d 100644 +index 85a929a485..378d3cb786 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -20,6 +20,7 @@ import platform @@ -61,17 +55,16 @@ index a5c3a6a8cf..6aaf38096d 100644 +import zlib from errno import EACCES, EPERM import datetime - -@@ -46,6 +47,8 @@ import salt.utils.files - import salt.utils.network - import salt.utils.path + import warnings +@@ -61,6 +62,7 @@ import salt.utils.path + import salt.utils.pkg.rpm import salt.utils.platform -+import salt.utils.stringutils + import salt.utils.stringutils +import salt.utils.versions from salt.ext import six from salt.ext.six.moves import range -@@ -2420,40 +2423,55 @@ def _hw_data(osdata): +@@ -2730,40 +2732,55 @@ def _hw_data(osdata): return grains @@ -146,6 +139,6 @@ index a5c3a6a8cf..6aaf38096d 100644 def get_master(): -- -2.19.0 +2.20.1 diff --git a/v2018.3.2.tar.gz b/v2018.3.2.tar.gz deleted file mode 100644 index 7fc7708..0000000 --- a/v2018.3.2.tar.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:515df2eed05b1a31101dc8d7cfb52f554ced6db52417a3e9c2096f055807235b -size 13024996 diff --git a/v2019.2.0.tar.gz b/v2019.2.0.tar.gz new file mode 100644 index 0000000..bf375cb --- /dev/null +++ b/v2019.2.0.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:263b7f6fd915eb8876795714cc4d2b6bb8ccb97423858b479eefb1c5429424d5 +size 15065369 diff --git a/x509-fixes-111.patch b/x509-fixes-111.patch index c34067f..bb8af15 100644 --- a/x509-fixes-111.patch +++ b/x509-fixes-111.patch @@ -1,4 +1,4 @@ -From 053d97afcc7486f7300e339bc56cb3c850cc523b Mon Sep 17 00:00:00 2001 +From c5adc0c126e593d12c9b18bcf60f96336c75e4a8 Mon Sep 17 00:00:00 2001 From: Florian Bergmann Date: Fri, 14 Sep 2018 10:30:39 +0200 Subject: [PATCH] X509 fixes (#111) @@ -33,17 +33,16 @@ PEP8: line too long * Fix unit tests --- - salt/modules/publish.py | 8 +- - salt/modules/x509.py | 132 ++++++++++++-------------------- - salt/states/x509.py | 22 ++++-- - tests/unit/modules/test_x509.py | 9 ++- - 4 files changed, 74 insertions(+), 97 deletions(-) + salt/modules/publish.py | 8 +-- + salt/modules/x509.py | 132 ++++++++++++++++------------------------ + salt/states/x509.py | 22 ++++--- + 3 files changed, 69 insertions(+), 93 deletions(-) diff --git a/salt/modules/publish.py b/salt/modules/publish.py -index 2de99583f4..ac31b4b65f 100644 +index 62e3e98f2f..fda848d1ec 100644 --- a/salt/modules/publish.py +++ b/salt/modules/publish.py -@@ -83,10 +83,8 @@ def _publish( +@@ -82,10 +82,8 @@ def _publish( in minion configuration but `via_master` was specified.') else: # Find the master in the list of master_uris generated by the minion base class @@ -56,9 +55,9 @@ index 2de99583f4..ac31b4b65f 100644 if not matching_master_uris: raise SaltInvocationError('Could not find match for {0} in \ -@@ -176,6 +174,8 @@ def _publish( - else: - return ret +@@ -178,6 +176,8 @@ def _publish( + finally: + channel.close() + return {} + @@ -66,10 +65,10 @@ index 2de99583f4..ac31b4b65f 100644 def publish(tgt, fun, diff --git a/salt/modules/x509.py b/salt/modules/x509.py -index 9901bc5bd9..45afcccd99 100644 +index 8689bfad35..4126f34960 100644 --- a/salt/modules/x509.py +++ b/salt/modules/x509.py -@@ -36,14 +36,13 @@ from salt.state import STATE_INTERNAL_KEYWORDS as _STATE_INTERNAL_KEYWORDS +@@ -38,14 +38,13 @@ from salt.state import STATE_INTERNAL_KEYWORDS as _STATE_INTERNAL_KEYWORDS # Import 3rd Party Libs try: import M2Crypto @@ -87,7 +86,7 @@ index 9901bc5bd9..45afcccd99 100644 __virtualname__ = 'x509' -@@ -81,10 +80,7 @@ def __virtual__(): +@@ -83,10 +82,7 @@ def __virtual__(): ''' only load this module if m2crypto is available ''' @@ -99,7 +98,7 @@ index 9901bc5bd9..45afcccd99 100644 class _Ctx(ctypes.Structure): -@@ -127,10 +123,8 @@ def _new_extension(name, value, critical=0, issuer=None, _pyfree=1): +@@ -129,10 +125,8 @@ def _new_extension(name, value, critical=0, issuer=None, _pyfree=1): doesn't support getting the publickeyidentifier from the issuer to create the authoritykeyidentifier extension. ''' @@ -112,7 +111,7 @@ index 9901bc5bd9..45afcccd99 100644 # ensure name and value are bytes name = salt.utils.stringutils.to_str(name) -@@ -145,7 +139,7 @@ def _new_extension(name, value, critical=0, issuer=None, _pyfree=1): +@@ -147,7 +141,7 @@ def _new_extension(name, value, critical=0, issuer=None, _pyfree=1): x509_ext_ptr = M2Crypto.m2.x509v3_ext_conf(None, ctx, name, value) lhash = None except AttributeError: @@ -121,7 +120,7 @@ index 9901bc5bd9..45afcccd99 100644 ctx = M2Crypto.m2.x509v3_set_conf_lhash( lhash) # pylint: disable=no-member # ctx not zeroed -@@ -196,10 +190,8 @@ def _get_csr_extensions(csr): +@@ -198,10 +192,8 @@ def _get_csr_extensions(csr): csrtempfile.flush() csryaml = _parse_openssl_req(csrtempfile.name) csrtempfile.close() @@ -134,7 +133,7 @@ index 9901bc5bd9..45afcccd99 100644 if not csrexts: return ret -@@ -294,7 +286,7 @@ def _get_signing_policy(name): +@@ -296,7 +288,7 @@ def _get_signing_policy(name): signing_policy = policies.get(name) if signing_policy: return signing_policy @@ -143,9 +142,9 @@ index 9901bc5bd9..45afcccd99 100644 def _pretty_hex(hex_str): -@@ -321,9 +313,11 @@ def _text_or_file(input_): +@@ -335,9 +327,11 @@ def _text_or_file(input_): ''' - if os.path.isfile(input_): + if _isfile(input_): with salt.utils.files.fopen(input_) as fp_: - return salt.utils.stringutils.to_str(fp_.read()) + out = salt.utils.stringutils.to_str(fp_.read()) @@ -157,7 +156,7 @@ index 9901bc5bd9..45afcccd99 100644 def _parse_subject(subject): -@@ -341,7 +335,7 @@ def _parse_subject(subject): +@@ -355,7 +349,7 @@ def _parse_subject(subject): ret[nid_name] = val nids.append(nid_num) except TypeError as err: @@ -166,7 +165,7 @@ index 9901bc5bd9..45afcccd99 100644 return ret -@@ -520,8 +514,8 @@ def get_pem_entries(glob_path): +@@ -533,8 +527,8 @@ def get_pem_entries(glob_path): if os.path.isfile(path): try: ret[path] = get_pem_entry(text=path) @@ -177,7 +176,7 @@ index 9901bc5bd9..45afcccd99 100644 return ret -@@ -599,8 +593,8 @@ def read_certificates(glob_path): +@@ -612,8 +606,8 @@ def read_certificates(glob_path): if os.path.isfile(path): try: ret[path] = read_certificate(certificate=path) @@ -188,7 +187,7 @@ index 9901bc5bd9..45afcccd99 100644 return ret -@@ -629,12 +623,10 @@ def read_csr(csr): +@@ -642,12 +636,10 @@ def read_csr(csr): # Get size returns in bytes. The world thinks of key sizes in bits. 'Subject': _parse_subject(csr.get_subject()), 'Subject Hash': _dec2hex(csr.get_subject().as_hash()), @@ -203,7 +202,7 @@ index 9901bc5bd9..45afcccd99 100644 return ret -@@ -937,7 +929,7 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals +@@ -943,7 +935,7 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals # pyOpenSSL Note due to current limitations in pyOpenSSL it is impossible # to specify a digest For signing the CRL. This will hopefully be fixed # soon: https://github.com/pyca/pyopenssl/pull/161 @@ -212,7 +211,7 @@ index 9901bc5bd9..45afcccd99 100644 raise salt.exceptions.SaltInvocationError( 'Could not load OpenSSL module, OpenSSL unavailable' ) -@@ -962,8 +954,7 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals +@@ -969,8 +961,7 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals continue if 'revocation_date' not in rev_item: @@ -222,7 +221,7 @@ index 9901bc5bd9..45afcccd99 100644 rev_date = datetime.datetime.strptime( rev_item['revocation_date'], '%Y-%m-%d %H:%M:%S') -@@ -1002,8 +993,9 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals +@@ -1011,8 +1002,9 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals try: crltext = crl.export(**export_kwargs) except (TypeError, ValueError): @@ -234,7 +233,7 @@ index 9901bc5bd9..45afcccd99 100644 export_kwargs.pop('digest', None) crltext = crl.export(**export_kwargs) -@@ -1042,8 +1034,7 @@ def sign_remote_certificate(argdic, **kwargs): +@@ -1050,8 +1042,7 @@ def sign_remote_certificate(argdic, **kwargs): if 'signing_policy' in argdic: signing_policy = _get_signing_policy(argdic['signing_policy']) if not signing_policy: @@ -244,7 +243,7 @@ index 9901bc5bd9..45afcccd99 100644 if isinstance(signing_policy, list): dict_ = {} -@@ -1080,6 +1071,7 @@ def get_signing_policy(signing_policy_name): +@@ -1091,6 +1082,7 @@ def get_signing_policy(signing_policy_name): signing_policy = _get_signing_policy(signing_policy_name) if not signing_policy: return 'Signing policy {0} does not exist.'.format(signing_policy_name) @@ -252,7 +251,7 @@ index 9901bc5bd9..45afcccd99 100644 if isinstance(signing_policy, list): dict_ = {} for item in signing_policy: -@@ -1092,10 +1084,9 @@ def get_signing_policy(signing_policy_name): +@@ -1103,10 +1095,9 @@ def get_signing_policy(signing_policy_name): pass try: @@ -265,8 +264,8 @@ index 9901bc5bd9..45afcccd99 100644 return signing_policy -@@ -1346,8 +1337,7 @@ def create_certificate( - signing_private_key='/etc/pki/myca.key' csr='/etc/pki/myca.csr'} +@@ -1356,8 +1347,7 @@ def create_certificate( + salt '*' x509.create_certificate path=/etc/pki/myca.crt signing_private_key='/etc/pki/myca.key' csr='/etc/pki/myca.csr'} ''' - if not path and not text and \ @@ -275,7 +274,7 @@ index 9901bc5bd9..45afcccd99 100644 raise salt.exceptions.SaltInvocationError( 'Either path or text must be specified.') if path and text: -@@ -1376,8 +1366,7 @@ def create_certificate( +@@ -1386,8 +1376,7 @@ def create_certificate( # Including listen_in and preqreuired because they are not included # in STATE_INTERNAL_KEYWORDS # for salt 2014.7.2 @@ -285,7 +284,7 @@ index 9901bc5bd9..45afcccd99 100644 kwargs.pop(ignore, None) certs = __salt__['publish.publish']( -@@ -1484,8 +1473,7 @@ def create_certificate( +@@ -1500,8 +1489,7 @@ def create_certificate( continue # Use explicitly set values first, fall back to CSR values. @@ -295,7 +294,7 @@ index 9901bc5bd9..45afcccd99 100644 critical = False if extval.startswith('critical '): -@@ -1608,8 +1596,8 @@ def create_csr(path=None, text=False, **kwargs): +@@ -1623,8 +1611,8 @@ def create_csr(path=None, text=False, **kwargs): if 'private_key' not in kwargs and 'public_key' in kwargs: kwargs['private_key'] = kwargs['public_key'] @@ -306,7 +305,7 @@ index 9901bc5bd9..45afcccd99 100644 if 'private_key' not in kwargs: raise salt.exceptions.SaltInvocationError('private_key is required') -@@ -1621,11 +1609,9 @@ def create_csr(path=None, text=False, **kwargs): +@@ -1636,11 +1624,9 @@ def create_csr(path=None, text=False, **kwargs): kwargs['private_key_passphrase'] = None if 'public_key_passphrase' not in kwargs: kwargs['public_key_passphrase'] = None @@ -320,7 +319,7 @@ index 9901bc5bd9..45afcccd99 100644 kwargs['public_key_passphrase'] = kwargs['private_key_passphrase'] csr.set_pubkey(get_public_key(kwargs['public_key'], -@@ -1669,18 +1655,10 @@ def create_csr(path=None, text=False, **kwargs): +@@ -1684,18 +1670,10 @@ def create_csr(path=None, text=False, **kwargs): extstack.push(ext) csr.add_extensions(extstack) @@ -340,7 +339,7 @@ index 9901bc5bd9..45afcccd99 100644 def verify_private_key(private_key, public_key, passphrase=None): -@@ -1705,8 +1683,7 @@ def verify_private_key(private_key, public_key, passphrase=None): +@@ -1720,8 +1698,7 @@ def verify_private_key(private_key, public_key, passphrase=None): salt '*' x509.verify_private_key private_key=/etc/pki/myca.key \\ public_key=/etc/pki/myca.crt ''' @@ -350,7 +349,7 @@ index 9901bc5bd9..45afcccd99 100644 def verify_signature(certificate, signing_pub_key=None, -@@ -1760,9 +1737,8 @@ def verify_crl(crl, cert): +@@ -1775,9 +1752,8 @@ def verify_crl(crl, cert): salt '*' x509.verify_crl crl=/etc/pki/myca.crl cert=/etc/pki/myca.crt ''' if not salt.utils.path.which('openssl'): @@ -362,7 +361,7 @@ index 9901bc5bd9..45afcccd99 100644 crltext = _text_or_file(crl) crltext = get_pem_entry(crltext, pem_type='X509 CRL') crltempfile = tempfile.NamedTemporaryFile() -@@ -1783,10 +1759,7 @@ def verify_crl(crl, cert): +@@ -1798,10 +1774,7 @@ def verify_crl(crl, cert): crltempfile.close() certtempfile.close() @@ -374,7 +373,7 @@ index 9901bc5bd9..45afcccd99 100644 def expired(certificate): -@@ -1823,8 +1796,9 @@ def expired(certificate): +@@ -1838,8 +1811,9 @@ def expired(certificate): ret['expired'] = True else: ret['expired'] = False @@ -386,7 +385,7 @@ index 9901bc5bd9..45afcccd99 100644 return ret -@@ -1847,6 +1821,7 @@ def will_expire(certificate, days): +@@ -1862,6 +1836,7 @@ def will_expire(certificate, days): salt '*' x509.will_expire "/etc/pki/mycert.crt" days=30 ''' @@ -394,7 +393,7 @@ index 9901bc5bd9..45afcccd99 100644 ret = {} if os.path.isfile(certificate): -@@ -1856,18 +1831,13 @@ def will_expire(certificate, days): +@@ -1871,18 +1846,13 @@ def will_expire(certificate, days): cert = _get_certificate_obj(certificate) @@ -419,7 +418,7 @@ index 9901bc5bd9..45afcccd99 100644 return ret diff --git a/salt/states/x509.py b/salt/states/x509.py -index 7bb941f393..3ba4f79c79 100644 +index 209cbc6738..8c79c6d034 100644 --- a/salt/states/x509.py +++ b/salt/states/x509.py @@ -163,6 +163,7 @@ import copy @@ -458,7 +457,7 @@ index 7bb941f393..3ba4f79c79 100644 overwrite: Overwrite an existing private key if the provided passphrase cannot decrypt it. -@@ -453,8 +455,10 @@ def certificate_managed(name, +@@ -459,8 +461,10 @@ def certificate_managed(name, private_key_args['name'], pem_type='RSA PRIVATE KEY') else: new_private_key = True @@ -471,7 +470,7 @@ index 7bb941f393..3ba4f79c79 100644 kwargs['public_key'] = private_key -@@ -664,8 +668,10 @@ def crl_managed(name, +@@ -671,8 +675,10 @@ def crl_managed(name, else: current = '{0} does not exist.'.format(name) @@ -484,35 +483,15 @@ index 7bb941f393..3ba4f79c79 100644 new = __salt__['x509.read_crl'](crl=new_crl) new_comp = new.copy() -@@ -707,6 +713,6 @@ def pem_managed(name, - Any arguments supported by :state:`file.managed ` are supported. +@@ -714,6 +720,6 @@ def pem_managed(name, + Any arguments supported by :py:func:`file.managed ` are supported. ''' file_args, kwargs = _get_file_args(name, **kwargs) - file_args['contents'] = __salt__['x509.get_pem_entry'](text=text) + file_args['contents'] = salt.utils.stringutils.to_str(__salt__['x509.get_pem_entry'](text=text)) return __states__['file.managed'](**file_args) -diff --git a/tests/unit/modules/test_x509.py b/tests/unit/modules/test_x509.py -index c300a56d64..7e00c97140 100644 ---- a/tests/unit/modules/test_x509.py -+++ b/tests/unit/modules/test_x509.py -@@ -67,10 +67,11 @@ class X509TestCase(TestCase, LoaderModuleMockMixin): - - subj = FakeSubject() - x509._parse_subject(subj) -- x509.log.trace.assert_called_once() -- assert x509.log.trace.call_args[0][0] == "Missing attribute '%s'. Error: %s" -- assert x509.log.trace.call_args[0][1] == list(subj.nid.keys())[0] -- assert isinstance(x509.log.trace.call_args[0][2], TypeError) -+ x509.log.debug.assert_called_once() -+ -+ assert x509.log.debug.call_args[0][0] == "Missing attribute '%s'. Error: %s" -+ assert x509.log.debug.call_args[0][1] == list(subj.nid.keys())[0] -+ assert isinstance(x509.log.debug.call_args[0][2], TypeError) - - @skipIf(not HAS_M2CRYPTO, 'Skipping, M2Crypt is unavailble') - def test_get_pem_entry(self): -- -2.19.0 +2.17.1 diff --git a/x509-fixes-for-remote-signing-106.patch b/x509-fixes-for-remote-signing-106.patch deleted file mode 100644 index d9507c2..0000000 --- a/x509-fixes-for-remote-signing-106.patch +++ /dev/null @@ -1,80 +0,0 @@ -From 6276eb2cd3f2b396c13118a111998230477cc65a Mon Sep 17 00:00:00 2001 -From: Florian Bergmann -Date: Tue, 11 Sep 2018 14:02:55 +0200 -Subject: [PATCH] X509 fixes for remote signing (#106) - -* Use to_str salt.utils when writing to a file. - -* Assign the certificate as a string. - -* Convert to string before sending via 'publish'. - -Otherwise the publish call with receive a "b''" string, which can not be used -in the functions. - -* Do not silently ignore errors. - -At least log the occurring errors to debug and trace. ---- - salt/modules/x509.py | 10 +++++----- - salt/states/x509.py | 2 +- - 2 files changed, 6 insertions(+), 6 deletions(-) - -diff --git a/salt/modules/x509.py b/salt/modules/x509.py -index 15de06e200..9901bc5bd9 100644 ---- a/salt/modules/x509.py -+++ b/salt/modules/x509.py -@@ -658,7 +658,7 @@ def read_crl(crl): - text = get_pem_entry(text, pem_type='X509 CRL') - - crltempfile = tempfile.NamedTemporaryFile() -- crltempfile.write(text) -+ crltempfile.write(salt.utils.stringutils.to_str(text)) - crltempfile.flush() - crlparsed = _parse_openssl_crl(crltempfile.name) - crltempfile.close() -@@ -1368,9 +1368,9 @@ def create_certificate( - pem_type='CERTIFICATE REQUEST').replace('\n', '') - if 'public_key' in kwargs: - # Strip newlines to make passing through as cli functions easier -- kwargs['public_key'] = get_public_key( -+ kwargs['public_key'] = salt.utils.stringutils.to_str(get_public_key( - kwargs['public_key'], -- passphrase=kwargs['public_key_passphrase']).replace('\n', '') -+ passphrase=kwargs['public_key_passphrase'])).replace('\n', '') - - # Remove system entries in kwargs - # Including listen_in and preqreuired because they are not included -@@ -1766,13 +1766,13 @@ def verify_crl(crl, cert): - crltext = _text_or_file(crl) - crltext = get_pem_entry(crltext, pem_type='X509 CRL') - crltempfile = tempfile.NamedTemporaryFile() -- crltempfile.write(crltext) -+ crltempfile.write(salt.utils.stringutils.to_str(crltext)) - crltempfile.flush() - - certtext = _text_or_file(cert) - certtext = get_pem_entry(certtext, pem_type='CERTIFICATE') - certtempfile = tempfile.NamedTemporaryFile() -- certtempfile.write(certtext) -+ certtempfile.write(salt.utils.stringutils.to_str(certtext)) - certtempfile.flush() - - cmd = ('openssl crl -noout -in {0} -CAfile {1}'.format( -diff --git a/salt/states/x509.py b/salt/states/x509.py -index 832f74168c..7bb941f393 100644 ---- a/salt/states/x509.py -+++ b/salt/states/x509.py -@@ -545,7 +545,7 @@ def certificate_managed(name, - if not private_ret['result']: - return private_ret - -- file_args['contents'] += certificate -+ file_args['contents'] += salt.utils.stringutils.to_str(certificate) - - if not append_certs: - append_certs = [] --- -2.19.0 - - diff --git a/zypper-add-root-configuration-parameter.patch b/zypper-add-root-configuration-parameter.patch new file mode 100644 index 0000000..69e513d --- /dev/null +++ b/zypper-add-root-configuration-parameter.patch @@ -0,0 +1,2147 @@ +From e20116f09c1f68238008c13a0517a8d36a7be56a Mon Sep 17 00:00:00 2001 +From: Alberto Planas +Date: Wed, 17 Oct 2018 11:58:04 +0200 +Subject: [PATCH] zypper: add root configuration parameter + +Fix typo in comment + +lowpkg: add parameter to change root directory + +The CLI rpm command allows the --root parameter to change the +expected location where the rpm database can be found. + +This patch add a new optional parameter in the public interface +to allow the set of the new root location. + +Update the tests to use the extra parameter. + +Add root parameter into the zypper module + +The zypper CLI provides a way to change the path where zypper expect +to find the required configuration files and repositories. + +This feature is useful to bootstrap chroot environments, inspect +repositories and packages from locally mounted devices, or help +during the installation of a new OS from the SUSE family. + +This patch add the root optional parameter for each command in the +public interface, and fix the tests. + +pkg: Transfer optional parameters to lower levels. + +pkgrepo: Transfer optional parameters to lower levels. + +zypper: fix the reset after the call + +_Zypper class take note when a .call() is done, to clean up the data +when we access to some attribute. + +This produces a bug when two calls are one after another an we set +some attributes via the __call__ method, as whatever is set will be +cleared after the first attribute is accessed. + +For example: + +zypper.attrib.call(..) +zypper(root=root).otherattrib.call(..) + +The first call will set __called as True, and the reset of the inner +state of zypper will be cleared when otherattrib is accessed, +cleanning the status for __root. + +This patch makes sure to clean the status also during the __call__ +method, avoiding the cleanning when the attribute is accessed. + +zypper: add no_recommends parameter + +Add no_recommends parameter to install and upgrade actions. +--- + salt/modules/rpm_lowpkg.py | 101 +++++-- + salt/modules/zypperpkg.py | 390 ++++++++++++++++++-------- + salt/states/pkg.py | 28 +- + salt/states/pkgrepo.py | 14 +- + tests/unit/modules/test_rpm_lowpkg.py | 92 +++++- + tests/unit/modules/test_zypperpkg.py | 45 +-- + tests/unit/states/test_pkg.py | 7 +- + 7 files changed, 488 insertions(+), 189 deletions(-) + +diff --git a/salt/modules/rpm_lowpkg.py b/salt/modules/rpm_lowpkg.py +index 893ae4f817..e577c4391a 100644 +--- a/salt/modules/rpm_lowpkg.py ++++ b/salt/modules/rpm_lowpkg.py +@@ -76,7 +76,7 @@ def bin_pkg_info(path, saltenv='base'): + minion so that it can be examined. + + saltenv : base +- Salt fileserver envrionment from which to retrieve the package. Ignored ++ Salt fileserver environment from which to retrieve the package. Ignored + if ``path`` is a local file path on the minion. + + CLI Example: +@@ -128,12 +128,15 @@ def bin_pkg_info(path, saltenv='base'): + return ret + + +-def list_pkgs(*packages): ++def list_pkgs(*packages, **kwargs): + ''' + List the packages currently installed in a dict:: + + {'': ''} + ++ root ++ use root as top level directory (default: "/") ++ + CLI Example: + + .. code-block:: bash +@@ -141,8 +144,11 @@ def list_pkgs(*packages): + salt '*' lowpkg.list_pkgs + ''' + pkgs = {} +- cmd = ['rpm', '-q' if packages else '-qa', +- '--queryformat', r'%{NAME} %{VERSION}\n'] ++ cmd = ['rpm'] ++ if kwargs.get('root'): ++ cmd.extend(['--root', kwargs['root']]) ++ cmd.extend(['-q' if packages else '-qa', ++ '--queryformat', r'%{NAME} %{VERSION}\n']) + if packages: + cmd.extend(packages) + out = __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False) +@@ -158,6 +164,9 @@ def verify(*packages, **kwargs): + ''' + Runs an rpm -Va on a system, and returns the results in a dict + ++ root ++ use root as top level directory (default: "/") ++ + Files with an attribute of config, doc, ghost, license or readme in the + package header can be ignored using the ``ignore_types`` keyword argument + +@@ -199,6 +208,8 @@ def verify(*packages, **kwargs): + verify_options = [x.strip() for x in six.text_type(verify_options).split(',')] + + cmd = ['rpm'] ++ if kwargs.get('root'): ++ cmd.extend(['--root', kwargs['root']]) + cmd.extend(['--' + x for x in verify_options]) + if packages: + cmd.append('-V') +@@ -258,6 +269,9 @@ def modified(*packages, **flags): + + .. versionadded:: 2015.5.0 + ++ root ++ use root as top level directory (default: "/") ++ + CLI examples: + + .. code-block:: bash +@@ -266,10 +280,12 @@ def modified(*packages, **flags): + salt '*' lowpkg.modified httpd postfix + salt '*' lowpkg.modified + ''' +- ret = __salt__['cmd.run_all']( +- ['rpm', '-Va'] + list(packages), +- output_loglevel='trace', +- python_shell=False) ++ cmd = ['rpm'] ++ if flags.get('root'): ++ cmd.extend(['--root', flags.pop('root')]) ++ cmd.append('-Va') ++ cmd.extend(packages) ++ ret = __salt__['cmd.run_all'](cmd, output_loglevel='trace', python_shell=False) + + data = {} + +@@ -324,12 +340,15 @@ def modified(*packages, **flags): + return filtered_data + + +-def file_list(*packages): ++def file_list(*packages, **kwargs): + ''' + List the files that belong to a package. Not specifying any packages will + return a list of _every_ file on the system's rpm database (not generally + recommended). + ++ root ++ use root as top level directory (default: "/") ++ + CLI Examples: + + .. code-block:: bash +@@ -338,12 +357,15 @@ def file_list(*packages): + salt '*' lowpkg.file_list httpd postfix + salt '*' lowpkg.file_list + ''' +- if not packages: +- cmd = ['rpm', '-qla'] +- else: +- cmd = ['rpm', '-ql'] ++ cmd = ['rpm'] ++ if kwargs.get('root'): ++ cmd.extend(['--root', kwargs['root']]) ++ ++ cmd.append('-ql' if packages else '-qla') ++ if packages: + # Can't concatenate a tuple, must do a list.extend() + cmd.extend(packages) ++ + ret = __salt__['cmd.run']( + cmd, + output_loglevel='trace', +@@ -351,12 +373,15 @@ def file_list(*packages): + return {'errors': [], 'files': ret} + + +-def file_dict(*packages): ++def file_dict(*packages, **kwargs): + ''' + List the files that belong to a package, sorted by group. Not specifying + any packages will return a list of _every_ file on the system's rpm + database (not generally recommended). + ++ root ++ use root as top level directory (default: "/") ++ + CLI Examples: + + .. code-block:: bash +@@ -368,8 +393,11 @@ def file_dict(*packages): + errors = [] + ret = {} + pkgs = {} +- cmd = ['rpm', '-q' if packages else '-qa', +- '--queryformat', r'%{NAME} %{VERSION}\n'] ++ cmd = ['rpm'] ++ if kwargs.get('root'): ++ cmd.extend(['--root', kwargs['root']]) ++ cmd.extend(['-q' if packages else '-qa', ++ '--queryformat', r'%{NAME} %{VERSION}\n']) + if packages: + cmd.extend(packages) + out = __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False) +@@ -380,8 +408,10 @@ def file_dict(*packages): + comps = line.split() + pkgs[comps[0]] = {'version': comps[1]} + for pkg in pkgs: +- files = [] +- cmd = ['rpm', '-ql', pkg] ++ cmd = ['rpm'] ++ if kwargs.get('root'): ++ cmd.extend(['--root', kwargs['root']]) ++ cmd.extend(['-ql', pkg]) + out = __salt__['cmd.run']( + ['rpm', '-ql', pkg], + output_loglevel='trace', +@@ -390,7 +420,7 @@ def file_dict(*packages): + return {'errors': errors, 'packages': ret} + + +-def owner(*paths): ++def owner(*paths, **kwargs): + ''' + Return the name of the package that owns the file. Multiple file paths can + be passed. If a single path is passed, a string will be returned, +@@ -400,6 +430,9 @@ def owner(*paths): + If the file is not owned by a package, or is not present on the minion, + then an empty string will be returned for that path. + ++ root ++ use root as top level directory (default: "/") ++ + CLI Examples: + + .. code-block:: bash +@@ -411,7 +444,10 @@ def owner(*paths): + return '' + ret = {} + for path in paths: +- cmd = ['rpm', '-qf', '--queryformat', '%{name}', path] ++ cmd = ['rpm'] ++ if kwargs.get('root'): ++ cmd.extend(['--root', kwargs['root']]) ++ cmd.extend(['-qf', '--queryformat', '%{name}', path]) + ret[path] = __salt__['cmd.run_stdout'](cmd, + output_loglevel='trace', + python_shell=False) +@@ -471,6 +507,9 @@ def info(*packages, **kwargs): + :param all_versions: + Return information for all installed versions of the packages + ++ :param root: ++ use root as top level directory (default: "/") ++ + :return: + + CLI example: +@@ -493,7 +532,14 @@ def info(*packages, **kwargs): + else: + size_tag = '%{SIZE}' + +- cmd = packages and "rpm -q {0}".format(' '.join(packages)) or "rpm -qa" ++ cmd = ['rpm'] ++ if kwargs.get('root'): ++ cmd.extend(['--root', kwargs['root']]) ++ if packages: ++ cmd.append('-q') ++ cmd.extend(packages) ++ else: ++ cmd.append('-qa') + + # Construct query format + attr_map = { +@@ -544,6 +590,7 @@ def info(*packages, **kwargs): + query.append(attr_map['description']) + query.append("-----\\n") + ++ cmd = ' '.join(cmd) + call = __salt__['cmd.run_all'](cmd + (" --queryformat '{0}'".format(''.join(query))), + output_loglevel='trace', env={'TZ': 'UTC'}, clean_env=True) + if call['retcode'] != 0: +@@ -744,10 +791,13 @@ def version_cmp(ver1, ver2, ignore_epoch=False): + return salt.utils.versions.version_cmp(ver1, ver2, ignore_epoch=False) + + +-def checksum(*paths): ++def checksum(*paths, **kwargs): + ''' + Return if the signature of a RPM file is valid. + ++ root ++ use root as top level directory (default: "/") ++ + CLI Example: + + .. code-block:: bash +@@ -760,9 +810,14 @@ def checksum(*paths): + if not paths: + raise CommandExecutionError("No package files has been specified.") + ++ cmd = ['rpm'] ++ if kwargs.get('root'): ++ cmd.extend(['--root', kwargs['root']]) ++ cmd.extend(['-K', '--quiet']) + for package_file in paths: ++ cmd_ = cmd + [package_file] + ret[package_file] = (bool(__salt__['file.file_exists'](package_file)) and +- not __salt__['cmd.retcode'](["rpm", "-K", "--quiet", package_file], ++ not __salt__['cmd.retcode'](cmd_, + ignore_retcode=True, + output_loglevel='trace', + python_shell=False)) +diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py +index 92e7052020..c442337c58 100644 +--- a/salt/modules/zypperpkg.py ++++ b/salt/modules/zypperpkg.py +@@ -99,6 +99,7 @@ class _Zypper(object): + + LOCK_EXIT_CODE = 7 + XML_DIRECTIVES = ['-x', '--xmlout'] ++ # ZYPPER_LOCK is not affected by --root + ZYPPER_LOCK = '/var/run/zypp.pid' + TAG_RELEASED = 'zypper/released' + TAG_BLOCKED = 'zypper/blocked' +@@ -107,7 +108,6 @@ class _Zypper(object): + ''' + Constructor + ''' +- self.__called = False + self._reset() + + def _reset(self): +@@ -129,6 +129,10 @@ class _Zypper(object): + self.__refresh = False + self.__ignore_repo_failure = False + self.__systemd_scope = False ++ self.__root = None ++ ++ # Call status ++ self.__called = False + + def __call__(self, *args, **kwargs): + ''' +@@ -136,11 +140,17 @@ class _Zypper(object): + :param kwargs: + :return: + ''' ++ # Reset after the call ++ if self.__called: ++ self._reset() ++ + # Ignore exit code for 106 (repo is not available) + if 'no_repo_failure' in kwargs: + self.__ignore_repo_failure = kwargs['no_repo_failure'] + if 'systemd_scope' in kwargs: + self.__systemd_scope = kwargs['systemd_scope'] ++ if 'root' in kwargs: ++ self.__root = kwargs['root'] + return self + + def __getattr__(self, item): +@@ -153,7 +163,6 @@ class _Zypper(object): + # Reset after the call + if self.__called: + self._reset() +- self.__called = False + + if item == 'xml': + self.__xml = True +@@ -284,6 +293,8 @@ class _Zypper(object): + self.__cmd.append('--xmlout') + if not self.__refresh: + self.__cmd.append('--no-refresh') ++ if self.__root: ++ self.__cmd.extend(['--root', self.__root]) + + self.__cmd.extend(args) + kwargs['output_loglevel'] = 'trace' +@@ -442,7 +453,7 @@ def _clean_cache(): + __context__.pop(cache_name, None) + + +-def list_upgrades(refresh=True, **kwargs): ++def list_upgrades(refresh=True, root=None, **kwargs): + ''' + List all available package upgrades on this system + +@@ -451,6 +462,9 @@ def list_upgrades(refresh=True, **kwargs): + If set to False it depends on zypper if a refresh is + executed. + ++ root ++ operate on a different root directory. ++ + CLI Example: + + .. code-block:: bash +@@ -458,7 +472,7 @@ def list_upgrades(refresh=True, **kwargs): + salt '*' pkg.list_upgrades + ''' + if refresh: +- refresh_db() ++ refresh_db(root) + + ret = dict() + cmd = ['list-updates'] +@@ -467,7 +481,7 @@ def list_upgrades(refresh=True, **kwargs): + if not isinstance(repo_name, six.string_types): + repo_name = six.text_type(repo_name) + cmd.extend(['--repo', repo_name]) +- for update_node in __zypper__.nolock.xml.call(*cmd).getElementsByTagName('update'): ++ for update_node in __zypper__(root=root).nolock.xml.call(*cmd).getElementsByTagName('update'): + if update_node.getAttribute('kind') == 'package': + ret[update_node.getAttribute('name')] = update_node.getAttribute('edition') + +@@ -504,6 +518,9 @@ def info_installed(*names, **kwargs): + :param all_versions: + Include information for all versions of the packages installed on the minion. + ++ :param root: ++ Operate on a different root directory. ++ + CLI example: + + .. code-block:: bash +@@ -544,6 +561,9 @@ def info_available(*names, **kwargs): + If set to False it depends on zypper if a refresh is + executed or not. + ++ root ++ operate on a different root directory. ++ + CLI example: + + .. code-block:: bash +@@ -558,9 +578,11 @@ def info_available(*names, **kwargs): + else: + names = sorted(list(set(names))) + ++ root = kwargs.get('root', None) ++ + # Refresh db before extracting the latest package + if kwargs.get('refresh', True): +- refresh_db() ++ refresh_db(root) + + pkg_info = [] + batch = names[:] +@@ -569,7 +591,8 @@ def info_available(*names, **kwargs): + # Run in batches + while batch: + pkg_info.extend(re.split(r"Information for package*", +- __zypper__.nolock.call('info', '-t', 'package', *batch[:batch_size]))) ++ __zypper__(root=root).nolock.call('info', '-t', 'package', ++ *batch[:batch_size]))) + batch = batch[batch_size:] + + for pkg_data in pkg_info: +@@ -629,6 +652,9 @@ def latest_version(*names, **kwargs): + If set to False it depends on zypper if a refresh is + executed or not. + ++ root ++ operate on a different root directory. ++ + CLI example: + + .. code-block:: bash +@@ -671,6 +697,9 @@ def upgrade_available(name, **kwargs): + If set to False it depends on zypper if a refresh is + executed or not. + ++ root ++ operate on a different root directory. ++ + CLI Example: + + .. code-block:: bash +@@ -687,6 +716,9 @@ def version(*names, **kwargs): + installed. If more than one package name is specified, a dict of + name/version pairs is returned. + ++ root ++ operate on a different root directory. ++ + CLI Example: + + .. code-block:: bash +@@ -719,7 +751,7 @@ def version_cmp(ver1, ver2, ignore_epoch=False): + return __salt__['lowpkg.version_cmp'](ver1, ver2, ignore_epoch=ignore_epoch) + + +-def list_pkgs(versions_as_list=False, **kwargs): ++def list_pkgs(versions_as_list=False, root=None, **kwargs): + ''' + List the packages currently installed as a dict. By default, the dict + contains versions as a comma separated string:: +@@ -731,6 +763,9 @@ def list_pkgs(versions_as_list=False, **kwargs): + + {'': ['', '']} + ++ root: ++ operate on a different root directory. ++ + attr: + If a list of package attributes is specified, returned value will + contain them in addition to version, eg.:: +@@ -770,10 +805,14 @@ def list_pkgs(versions_as_list=False, **kwargs): + + contextkey = 'pkg.list_pkgs' + ++ # TODO(aplanas): this cached value depends on the parameters + if contextkey not in __context__: + ret = {} +- cmd = ['rpm', '-qa', '--queryformat', +- salt.utils.pkg.rpm.QUERYFORMAT.replace('%{REPOID}', '(none)') + '\n'] ++ cmd = ['rpm'] ++ if root: ++ cmd.extend(['--root', root]) ++ cmd.extend(['-qa', '--queryformat', ++ salt.utils.pkg.rpm.QUERYFORMAT.replace('%{REPOID}', '(none)') + '\n']) + output = __salt__['cmd.run'](cmd, + python_shell=False, + output_loglevel='trace') +@@ -859,6 +898,9 @@ def list_repo_pkgs(*args, **kwargs): + When ``True``, the return data for each package will be organized by + repository. + ++ root ++ operate on a different root directory. ++ + CLI Examples: + + .. code-block:: bash +@@ -891,7 +933,8 @@ def list_repo_pkgs(*args, **kwargs): + return True + return False + +- for node in __zypper__.xml.call('se', '-s', *targets).getElementsByTagName('solvable'): ++ root = kwargs.get('root') or None ++ for node in __zypper__(root=root).xml.call('se', '-s', *targets).getElementsByTagName('solvable'): + pkginfo = dict(node.attributes.items()) + try: + if pkginfo['kind'] != 'package': +@@ -933,23 +976,27 @@ def list_repo_pkgs(*args, **kwargs): + return byrepo_ret + + +-def _get_configured_repos(): ++def _get_configured_repos(root=None): + ''' + Get all the info about repositories from the configurations. + ''' + ++ repos = os.path.join(root, os.path.relpath(REPOS, os.path.sep)) if root else REPOS + repos_cfg = configparser.ConfigParser() +- repos_cfg.read([REPOS + '/' + fname for fname in os.listdir(REPOS) if fname.endswith(".repo")]) ++ if os.path.exists(repos): ++ repos_cfg.read([repos + '/' + fname for fname in os.listdir(repos) if fname.endswith(".repo")]) ++ else: ++ log.error('Repositories not found in {}'.format(repos)) + + return repos_cfg + + +-def _get_repo_info(alias, repos_cfg=None): ++def _get_repo_info(alias, repos_cfg=None, root=None): + ''' + Get one repo meta-data. + ''' + try: +- meta = dict((repos_cfg or _get_configured_repos()).items(alias)) ++ meta = dict((repos_cfg or _get_configured_repos(root=root)).items(alias)) + meta['alias'] = alias + for key, val in six.iteritems(meta): + if val in ['0', '1']: +@@ -961,51 +1008,60 @@ def _get_repo_info(alias, repos_cfg=None): + return {} + + +-def get_repo(repo, **kwargs): # pylint: disable=unused-argument ++def get_repo(repo, root=None, **kwargs): # pylint: disable=unused-argument + ''' + Display a repo. + ++ root ++ operate on a different root directory. ++ + CLI Example: + + .. code-block:: bash + + salt '*' pkg.get_repo alias + ''' +- return _get_repo_info(repo) ++ return _get_repo_info(repo, root=root) + + +-def list_repos(): ++def list_repos(root=None): + ''' + Lists all repos. + ++ root ++ operate on a different root directory. ++ + CLI Example: + + .. code-block:: bash + + salt '*' pkg.list_repos + ''' +- repos_cfg = _get_configured_repos() ++ repos_cfg = _get_configured_repos(root=root) + all_repos = {} + for alias in repos_cfg.sections(): +- all_repos[alias] = _get_repo_info(alias, repos_cfg=repos_cfg) ++ all_repos[alias] = _get_repo_info(alias, repos_cfg=repos_cfg, root=root) + + return all_repos + + +-def del_repo(repo): ++def del_repo(repo, root=None): + ''' + Delete a repo. + ++ root ++ operate on a different root directory. ++ + CLI Examples: + + .. code-block:: bash + + salt '*' pkg.del_repo alias + ''' +- repos_cfg = _get_configured_repos() ++ repos_cfg = _get_configured_repos(root=root) + for alias in repos_cfg.sections(): + if alias == repo: +- doc = __zypper__.xml.call('rr', '--loose-auth', '--loose-query', alias) ++ doc = __zypper__(root=root).xml.call('rr', '--loose-auth', '--loose-query', alias) + msg = doc.getElementsByTagName('message') + if doc.getElementsByTagName('progress') and msg: + return { +@@ -1044,6 +1100,9 @@ def mod_repo(repo, **kwargs): + If set to True, automatically trust and import public GPG key for + the repository. + ++ root ++ operate on a different root directory. ++ + Key/Value pairs may also be removed from a repo's configuration by setting + a key to a blank value. Bear in mind that a name cannot be deleted, and a + URL can only be deleted if a ``mirrorlist`` is specified (or vice versa). +@@ -1056,7 +1115,8 @@ def mod_repo(repo, **kwargs): + salt '*' pkg.mod_repo alias url= mirrorlist=http://host.com/ + ''' + +- repos_cfg = _get_configured_repos() ++ root = kwargs.get('root') or None ++ repos_cfg = _get_configured_repos(root=root) + added = False + + # An attempt to add new one? +@@ -1076,7 +1136,7 @@ def mod_repo(repo, **kwargs): + + # Is there already such repo under different alias? + for alias in repos_cfg.sections(): +- repo_meta = _get_repo_info(alias, repos_cfg=repos_cfg) ++ repo_meta = _get_repo_info(alias, repos_cfg=repos_cfg, root=root) + + # Complete user URL, in case it is not + new_url = _urlparse(url) +@@ -1098,17 +1158,17 @@ def mod_repo(repo, **kwargs): + ) + + # Add new repo +- __zypper__.xml.call('ar', url, repo) ++ __zypper__(root=root).xml.call('ar', url, repo) + + # Verify the repository has been added +- repos_cfg = _get_configured_repos() ++ repos_cfg = _get_configured_repos(root=root) + if repo not in repos_cfg.sections(): + raise CommandExecutionError( + 'Failed add new repository \'{0}\' for unspecified reason. ' + 'Please check zypper logs.'.format(repo)) + added = True + +- repo_info = _get_repo_info(repo) ++ repo_info = _get_repo_info(repo, root=root) + if ( + not added and 'baseurl' in kwargs and + not (kwargs['baseurl'] == repo_info['baseurl']) +@@ -1117,8 +1177,8 @@ def mod_repo(repo, **kwargs): + # we need to remove the repository and add it again with the new baseurl + repo_info.update(kwargs) + repo_info.setdefault('cache', False) +- del_repo(repo) +- return mod_repo(repo, **repo_info) ++ del_repo(repo, root=root) ++ return mod_repo(repo, root=root, **repo_info) + + # Modify added or existing repo according to the options + cmd_opt = [] +@@ -1151,7 +1211,7 @@ def mod_repo(repo, **kwargs): + + if cmd_opt: + cmd_opt = global_cmd_opt + ['mr'] + cmd_opt + [repo] +- __zypper__.refreshable.xml.call(*cmd_opt) ++ __zypper__(root=root).refreshable.xml.call(*cmd_opt) + + comment = None + if call_refresh: +@@ -1159,23 +1219,26 @@ def mod_repo(repo, **kwargs): + # --gpg-auto-import-keys is not doing anything + # so we need to specifically refresh here with --gpg-auto-import-keys + refresh_opts = global_cmd_opt + ['refresh'] + [repo] +- __zypper__.xml.call(*refresh_opts) ++ __zypper__(root=root).xml.call(*refresh_opts) + elif not added and not cmd_opt: + comment = 'Specified arguments did not result in modification of repo' + +- repo = get_repo(repo) ++ repo = get_repo(repo, root=root) + if comment: + repo['comment'] = comment + + return repo + + +-def refresh_db(): ++def refresh_db(root=None): + ''' + Force a repository refresh by calling ``zypper refresh --force``, return a dict:: + + {'': Bool} + ++ root ++ operate on a different root directory. ++ + CLI Example: + + .. code-block:: bash +@@ -1185,7 +1248,7 @@ def refresh_db(): + # Remove rtag file to keep multiple refreshes from happening in pkg states + salt.utils.pkg.clear_rtag(__opts__) + ret = {} +- out = __zypper__.refreshable.call('refresh', '--force') ++ out = __zypper__(root=root).refreshable.call('refresh', '--force') + + for line in out.splitlines(): + if not line: +@@ -1213,6 +1276,8 @@ def install(name=None, + skip_verify=False, + version=None, + ignore_repo_failure=False, ++ no_recommends=False, ++ root=None, + **kwargs): + ''' + .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 +@@ -1301,6 +1366,12 @@ def install(name=None, + Zypper returns error code 106 if one of the repositories are not available for various reasons. + In case to set strict check, this parameter needs to be set to True. Default: False. + ++ no_recommends ++ Do not install recommended packages, only required ones. ++ ++ root ++ operate on a different root directory. ++ + diff_attr: + If a list of package attributes is specified, returned value will + contain them, eg.:: +@@ -1340,7 +1411,7 @@ def install(name=None, + 'arch': ''}}} + ''' + if refresh: +- refresh_db() ++ refresh_db(root) + + try: + pkg_params, pkg_type = __salt__['pkg_resource.parse_targets'](name, pkgs, sources, **kwargs) +@@ -1350,7 +1421,7 @@ def install(name=None, + if pkg_params is None or len(pkg_params) == 0: + return {} + +- version_num = Wildcard(__zypper__)(name, version) ++ version_num = Wildcard(__zypper__(root=root))(name, version) + + if version_num: + if pkgs is None and sources is None: +@@ -1375,7 +1446,7 @@ def install(name=None, + targets.append(target) + elif pkg_type == 'advisory': + targets = [] +- cur_patches = list_patches() ++ cur_patches = list_patches(root=root) + for advisory_id in pkg_params: + if advisory_id not in cur_patches: + raise CommandExecutionError('Advisory id "{0}" not found'.format(advisory_id)) +@@ -1385,7 +1456,7 @@ def install(name=None, + targets = pkg_params + + diff_attr = kwargs.get("diff_attr") +- old = list_pkgs(attr=diff_attr) if not downloadonly else list_downloaded() ++ old = list_pkgs(attr=diff_attr, root=root) if not downloadonly else list_downloaded(root) + downgrades = [] + if fromrepo: + fromrepoopt = ['--force', '--force-resolution', '--from', fromrepo] +@@ -1404,6 +1475,8 @@ def install(name=None, + cmd_install.append('--download-only') + if fromrepo: + cmd_install.extend(fromrepoopt) ++ if no_recommends: ++ cmd_install.append('--no-recommends') + + errors = [] + if pkg_type == 'advisory': +@@ -1415,7 +1488,7 @@ def install(name=None, + while targets: + cmd = cmd_install + targets[:500] + targets = targets[500:] +- for line in __zypper__(no_repo_failure=ignore_repo_failure, systemd_scope=systemd_scope).call(*cmd).splitlines(): ++ for line in __zypper__(no_repo_failure=ignore_repo_failure, systemd_scope=systemd_scope, root=root).call(*cmd).splitlines(): + match = re.match(r"^The selected package '([^']+)'.+has lower version", line) + if match: + downgrades.append(match.group(1)) +@@ -1423,10 +1496,10 @@ def install(name=None, + while downgrades: + cmd = cmd_install + ['--force'] + downgrades[:500] + downgrades = downgrades[500:] +- __zypper__(no_repo_failure=ignore_repo_failure).call(*cmd) ++ __zypper__(no_repo_failure=ignore_repo_failure, root=root).call(*cmd) + + _clean_cache() +- new = list_pkgs(attr=diff_attr) if not downloadonly else list_downloaded() ++ new = list_pkgs(attr=diff_attr, root=root) if not downloadonly else list_downloaded(root) + ret = salt.utils.data.compare_dicts(old, new) + + if errors: +@@ -1446,6 +1519,8 @@ def upgrade(refresh=True, + fromrepo=None, + novendorchange=False, + skip_verify=False, ++ no_recommends=False, ++ root=None, + **kwargs): # pylint: disable=unused-argument + ''' + .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 +@@ -1485,6 +1560,12 @@ def upgrade(refresh=True, + skip_verify + Skip the GPG verification check (e.g., ``--no-gpg-checks``) + ++ no_recommends ++ Do not install recommended packages, only required ones. ++ ++ root ++ Operate on a different root directory. ++ + Returns a dictionary containing the changes: + + .. code-block:: python +@@ -1507,7 +1588,7 @@ def upgrade(refresh=True, + cmd_update.insert(0, '--no-gpg-checks') + + if refresh: +- refresh_db() ++ refresh_db(root) + + if dryrun: + cmd_update.append('--dry-run') +@@ -1526,16 +1607,20 @@ def upgrade(refresh=True, + else: + log.warning('Disabling vendor changes is not supported on this Zypper version') + ++ if no_recommends: ++ cmd_update.append('--no-recommends') ++ log.info('Disabling recommendations') ++ + if dryrun: + # Creates a solver test case for debugging. + log.info('Executing debugsolver and performing a dry-run dist-upgrade') +- __zypper__(systemd_scope=_systemd_scope()).noraise.call(*cmd_update + ['--debug-solver']) ++ __zypper__(systemd_scope=_systemd_scope(), root=root).noraise.call(*cmd_update + ['--debug-solver']) + +- old = list_pkgs() ++ old = list_pkgs(root=root) + +- __zypper__(systemd_scope=_systemd_scope()).noraise.call(*cmd_update) ++ __zypper__(systemd_scope=_systemd_scope(), root=root).noraise.call(*cmd_update) + _clean_cache() +- new = list_pkgs() ++ new = list_pkgs(root=root) + ret = salt.utils.data.compare_dicts(old, new) + + if __zypper__.exit_code not in __zypper__.SUCCESS_EXIT_CODES: +@@ -1556,7 +1641,7 @@ def upgrade(refresh=True, + return ret + + +-def _uninstall(name=None, pkgs=None): ++def _uninstall(name=None, pkgs=None, root=None): + ''' + Remove and purge do identical things but with different Zypper commands, + this function performs the common logic. +@@ -1566,7 +1651,7 @@ def _uninstall(name=None, pkgs=None): + except MinionError as exc: + raise CommandExecutionError(exc) + +- old = list_pkgs() ++ old = list_pkgs(root=root) + targets = [] + for target in pkg_params: + # Check if package version set to be removed is actually installed: +@@ -1582,11 +1667,11 @@ def _uninstall(name=None, pkgs=None): + + errors = [] + while targets: +- __zypper__(systemd_scope=systemd_scope).call('remove', *targets[:500]) ++ __zypper__(systemd_scope=systemd_scope, root=root).call('remove', *targets[:500]) + targets = targets[500:] + + _clean_cache() +- ret = salt.utils.data.compare_dicts(old, list_pkgs()) ++ ret = salt.utils.data.compare_dicts(old, list_pkgs(root=root)) + + if errors: + raise CommandExecutionError( +@@ -1623,7 +1708,7 @@ def normalize_name(name): + return name + + +-def remove(name=None, pkgs=None, **kwargs): # pylint: disable=unused-argument ++def remove(name=None, pkgs=None, root=None, **kwargs): # pylint: disable=unused-argument + ''' + .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 + On minions running systemd>=205, `systemd-run(1)`_ is now used to +@@ -1651,6 +1736,9 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=unused-argument + A list of packages to delete. Must be passed as a python list. The + ``name`` parameter will be ignored if this option is passed. + ++ root ++ Operate on a different root directory. ++ + .. versionadded:: 0.16.0 + + +@@ -1664,10 +1752,10 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=unused-argument + salt '*' pkg.remove ,, + salt '*' pkg.remove pkgs='["foo", "bar"]' + ''' +- return _uninstall(name=name, pkgs=pkgs) ++ return _uninstall(name=name, pkgs=pkgs, root=root) + + +-def purge(name=None, pkgs=None, **kwargs): # pylint: disable=unused-argument ++def purge(name=None, pkgs=None, root=None, **kwargs): # pylint: disable=unused-argument + ''' + .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 + On minions running systemd>=205, `systemd-run(1)`_ is now used to +@@ -1696,6 +1784,9 @@ def purge(name=None, pkgs=None, **kwargs): # pylint: disable=unused-argument + A list of packages to delete. Must be passed as a python list. The + ``name`` parameter will be ignored if this option is passed. + ++ root ++ Operate on a different root directory. ++ + .. versionadded:: 0.16.0 + + +@@ -1709,13 +1800,16 @@ def purge(name=None, pkgs=None, **kwargs): # pylint: disable=unused-argument + salt '*' pkg.purge ,, + salt '*' pkg.purge pkgs='["foo", "bar"]' + ''' +- return _uninstall(name=name, pkgs=pkgs) ++ return _uninstall(name=name, pkgs=pkgs, root=root) + + +-def list_locks(): ++def list_locks(root=None): + ''' + List current package locks. + ++ root ++ operate on a different root directory. ++ + Return a dict containing the locked package with attributes:: + + {'': {'case_sensitive': '', +@@ -1729,8 +1823,9 @@ def list_locks(): + salt '*' pkg.list_locks + ''' + locks = {} +- if os.path.exists(LOCKS): +- with salt.utils.files.fopen(LOCKS) as fhr: ++ _locks = os.path.join(root, os.path.relpath(LOCKS, os.path.sep)) if root else LOCKS ++ try: ++ with salt.utils.files.fopen(_locks) as fhr: + items = salt.utils.stringutils.to_unicode(fhr.read()).split('\n\n') + for meta in [item.split('\n') for item in items]: + lock = {} +@@ -1739,15 +1834,22 @@ def list_locks(): + lock.update(dict([tuple([i.strip() for i in element.split(':', 1)]), ])) + if lock.get('solvable_name'): + locks[lock.pop('solvable_name')] = lock ++ except IOError: ++ pass ++ except Exception: ++ log.warning('Detected a problem when accessing {}'.format(_locks)) + + return locks + + +-def clean_locks(): ++def clean_locks(root=None): + ''' + Remove unused locks that do not currently (with regard to repositories + used) lock any package. + ++ root ++ Operate on a different root directory. ++ + CLI Example: + + .. code-block:: bash +@@ -1756,10 +1858,11 @@ def clean_locks(): + ''' + LCK = "removed" + out = {LCK: 0} +- if not os.path.exists("/etc/zypp/locks"): ++ locks = os.path.join(root, os.path.relpath(LOCKS, os.path.sep)) if root else LOCKS ++ if not os.path.exists(locks): + return out + +- for node in __zypper__.xml.call('cl').getElementsByTagName("message"): ++ for node in __zypper__(root=root).xml.call('cl').getElementsByTagName("message"): + text = node.childNodes[0].nodeValue.lower() + if text.startswith(LCK): + out[LCK] = text.split(" ")[1] +@@ -1772,6 +1875,9 @@ def unhold(name=None, pkgs=None, **kwargs): + ''' + Remove specified package lock. + ++ root ++ operate on a different root directory. ++ + CLI Example: + + .. code-block:: bash +@@ -1781,12 +1887,13 @@ def unhold(name=None, pkgs=None, **kwargs): + salt '*' pkg.remove_lock pkgs='["foo", "bar"]' + ''' + ret = {} ++ root = kwargs.get('root') + if (not name and not pkgs) or (name and pkgs): + raise CommandExecutionError('Name or packages must be specified.') + elif name: + pkgs = [name] + +- locks = list_locks() ++ locks = list_locks(root) + try: + pkgs = list(__salt__['pkg_resource.parse_targets'](pkgs)[0].keys()) + except MinionError as exc: +@@ -1803,12 +1910,12 @@ def unhold(name=None, pkgs=None, **kwargs): + ret[pkg]['comment'] = 'Package {0} unable to be unheld.'.format(pkg) + + if removed: +- __zypper__.call('rl', *removed) ++ __zypper__(root=root).call('rl', *removed) + + return ret + + +-def remove_lock(packages, **kwargs): # pylint: disable=unused-argument ++def remove_lock(packages, root=None, **kwargs): # pylint: disable=unused-argument + ''' + Remove specified package lock. + +@@ -1821,7 +1928,7 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument + salt '*' pkg.remove_lock pkgs='["foo", "bar"]' + ''' + salt.utils.versions.warn_until('Sodium', 'This function is deprecated. Please use unhold() instead.') +- locks = list_locks() ++ locks = list_locks(root) + try: + packages = list(__salt__['pkg_resource.parse_targets'](packages)[0].keys()) + except MinionError as exc: +@@ -1836,7 +1943,7 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument + missing.append(pkg) + + if removed: +- __zypper__.call('rl', *removed) ++ __zypper__(root=root).call('rl', *removed) + + return {'removed': len(removed), 'not_found': missing} + +@@ -1859,12 +1966,13 @@ def hold(name=None, pkgs=None, **kwargs): + :return: + ''' + ret = {} ++ root = kwargs.get('root') + if (not name and not pkgs) or (name and pkgs): + raise CommandExecutionError('Name or packages must be specified.') + elif name: + pkgs = [name] + +- locks = list_locks() ++ locks = list_locks(root=root) + added = [] + try: + pkgs = list(__salt__['pkg_resource.parse_targets'](pkgs)[0].keys()) +@@ -1880,15 +1988,18 @@ def hold(name=None, pkgs=None, **kwargs): + ret[pkg]['comment'] = 'Package {0} is already set to be held.'.format(pkg) + + if added: +- __zypper__.call('al', *added) ++ __zypper__(root=root).call('al', *added) + + return ret + + +-def add_lock(packages, **kwargs): # pylint: disable=unused-argument ++def add_lock(packages, root=None, **kwargs): # pylint: disable=unused-argument + ''' + Add a package lock. Specify packages to lock by exact name. + ++ root ++ operate on a different root directory. ++ + CLI Example: + + .. code-block:: bash +@@ -1898,7 +2009,7 @@ def add_lock(packages, **kwargs): # pylint: disable=unused-argument + salt '*' pkg.add_lock pkgs='["foo", "bar"]' + ''' + salt.utils.versions.warn_until('Sodium', 'This function is deprecated. Please use hold() instead.') +- locks = list_locks() ++ locks = list_locks(root) + added = [] + try: + packages = list(__salt__['pkg_resource.parse_targets'](packages)[0].keys()) +@@ -1910,7 +2021,7 @@ def add_lock(packages, **kwargs): # pylint: disable=unused-argument + added.append(pkg) + + if added: +- __zypper__.call('al', *added) ++ __zypper__(root=root).call('al', *added) + + return {'added': len(added), 'packages': added} + +@@ -1920,7 +2031,9 @@ def verify(*names, **kwargs): + Runs an rpm -Va on a system, and returns the results in a dict + + Files with an attribute of config, doc, ghost, license or readme in the +- package header can be ignored using the ``ignore_types`` keyword argument ++ package header can be ignored using the ``ignore_types`` keyword argument. ++ ++ The root parameter can also be passed via the keyword argument. + + CLI Example: + +@@ -1934,12 +2047,14 @@ def verify(*names, **kwargs): + return __salt__['lowpkg.verify'](*names, **kwargs) + + +-def file_list(*packages): ++def file_list(*packages, **kwargs): + ''' + List the files that belong to a package. Not specifying any packages will + return a list of *every* file on the system's rpm database (not generally + recommended). + ++ The root parameter can also be passed via the keyword argument. ++ + CLI Examples: + + .. code-block:: bash +@@ -1948,15 +2063,17 @@ def file_list(*packages): + salt '*' pkg.file_list httpd postfix + salt '*' pkg.file_list + ''' +- return __salt__['lowpkg.file_list'](*packages) ++ return __salt__['lowpkg.file_list'](*packages, **kwargs) + + +-def file_dict(*packages): ++def file_dict(*packages, **kwargs): + ''' + List the files that belong to a package, grouped by package. Not + specifying any packages will return a list of *every* file on the system's + rpm database (not generally recommended). + ++ The root parameter can also be passed via the keyword argument. ++ + CLI Examples: + + .. code-block:: bash +@@ -1965,7 +2082,7 @@ def file_dict(*packages): + salt '*' pkg.file_list httpd postfix + salt '*' pkg.file_list + ''' +- return __salt__['lowpkg.file_dict'](*packages) ++ return __salt__['lowpkg.file_dict'](*packages, **kwargs) + + + def modified(*packages, **flags): +@@ -2004,6 +2121,9 @@ def modified(*packages, **flags): + capabilities + Include only files where capabilities differ or not. Note: supported only on newer RPM versions. + ++ root ++ operate on a different root directory. ++ + CLI Examples: + + .. code-block:: bash +@@ -2017,7 +2137,7 @@ def modified(*packages, **flags): + return __salt__['lowpkg.modified'](*packages, **flags) + + +-def owner(*paths): ++def owner(*paths, **kwargs): + ''' + Return the name of the package that owns the file. Multiple file paths can + be passed. If a single path is passed, a string will be returned, +@@ -2027,6 +2147,8 @@ def owner(*paths): + If the file is not owned by a package, or is not present on the minion, + then an empty string will be returned for that path. + ++ The root parameter can also be passed via the keyword argument. ++ + CLI Examples: + + .. code-block:: bash +@@ -2034,15 +2156,15 @@ def owner(*paths): + salt '*' pkg.owner /usr/bin/apachectl + salt '*' pkg.owner /usr/bin/apachectl /etc/httpd/conf/httpd.conf + ''' +- return __salt__['lowpkg.owner'](*paths) ++ return __salt__['lowpkg.owner'](*paths, **kwargs) + + +-def _get_patterns(installed_only=None): ++def _get_patterns(installed_only=None, root=None): + ''' + List all known patterns in repos. + ''' + patterns = {} +- for element in __zypper__.nolock.xml.call('se', '-t', 'pattern').getElementsByTagName('solvable'): ++ for element in __zypper__(root=root).nolock.xml.call('se', '-t', 'pattern').getElementsByTagName('solvable'): + installed = element.getAttribute('status') == 'installed' + if (installed_only and installed) or not installed_only: + patterns[element.getAttribute('name')] = { +@@ -2053,7 +2175,7 @@ def _get_patterns(installed_only=None): + return patterns + + +-def list_patterns(refresh=False): ++def list_patterns(refresh=False, root=None): + ''' + List all known patterns from available repos. + +@@ -2062,6 +2184,9 @@ def list_patterns(refresh=False): + If set to False (default) it depends on zypper if a refresh is + executed. + ++ root ++ operate on a different root directory. ++ + CLI Examples: + + .. code-block:: bash +@@ -2069,27 +2194,30 @@ def list_patterns(refresh=False): + salt '*' pkg.list_patterns + ''' + if refresh: +- refresh_db() ++ refresh_db(root) + +- return _get_patterns() ++ return _get_patterns(root=root) + + +-def list_installed_patterns(): ++def list_installed_patterns(root=None): + ''' + List installed patterns on the system. + ++ root ++ operate on a different root directory. ++ + CLI Examples: + + .. code-block:: bash + + salt '*' pkg.list_installed_patterns + ''' +- return _get_patterns(installed_only=True) ++ return _get_patterns(installed_only=True, root=root) + + + def search(criteria, refresh=False, **kwargs): + ''' +- List known packags, available to the system. ++ List known packages, available to the system. + + refresh + force a refresh if set to True. +@@ -2137,6 +2265,9 @@ def search(criteria, refresh=False, **kwargs): + details (bool) + Show version and repository + ++ root ++ operate on a different root directory. ++ + CLI Examples: + + .. code-block:: bash +@@ -2157,8 +2288,11 @@ def search(criteria, refresh=False, **kwargs): + 'not_installed_only': '-u', + 'details': '--details' + } ++ ++ root = kwargs.get('root', None) ++ + if refresh: +- refresh_db() ++ refresh_db(root) + + cmd = ['search'] + if kwargs.get('match') == 'exact': +@@ -2173,7 +2307,7 @@ def search(criteria, refresh=False, **kwargs): + cmd.append(ALLOWED_SEARCH_OPTIONS.get(opt)) + + cmd.append(criteria) +- solvables = __zypper__.nolock.noraise.xml.call(*cmd).getElementsByTagName('solvable') ++ solvables = __zypper__(root=root).nolock.noraise.xml.call(*cmd).getElementsByTagName('solvable') + if not solvables: + raise CommandExecutionError( + 'No packages found matching \'{0}\''.format(criteria) +@@ -2202,7 +2336,7 @@ def _get_first_aggregate_text(node_list): + return '\n'.join(out) + + +-def list_products(all=False, refresh=False): ++def list_products(all=False, refresh=False, root=None): + ''' + List all available or installed SUSE products. + +@@ -2214,6 +2348,9 @@ def list_products(all=False, refresh=False): + If set to False (default) it depends on zypper if a refresh is + executed. + ++ root ++ operate on a different root directory. ++ + Includes handling for OEM products, which read the OEM productline file + and overwrite the release value. + +@@ -2225,10 +2362,12 @@ def list_products(all=False, refresh=False): + salt '*' pkg.list_products all=True + ''' + if refresh: +- refresh_db() ++ refresh_db(root) + + ret = list() +- OEM_PATH = "/var/lib/suseRegister/OEM" ++ OEM_PATH = '/var/lib/suseRegister/OEM' ++ if root: ++ OEM_PATH = os.path.join(root, os.path.relpath(OEM_PATH, os.path.sep)) + cmd = list() + if not all: + cmd.append('--disable-repos') +@@ -2236,7 +2375,7 @@ def list_products(all=False, refresh=False): + if not all: + cmd.append('-i') + +- product_list = __zypper__.nolock.xml.call(*cmd).getElementsByTagName('product-list') ++ product_list = __zypper__(root=root).nolock.xml.call(*cmd).getElementsByTagName('product-list') + if not product_list: + return ret # No products found + +@@ -2278,6 +2417,9 @@ def download(*packages, **kwargs): + If set to False (default) it depends on zypper if a refresh is + executed. + ++ root ++ operate on a different root directory. ++ + CLI example: + + .. code-block:: bash +@@ -2288,12 +2430,14 @@ def download(*packages, **kwargs): + if not packages: + raise SaltInvocationError('No packages specified') + ++ root = kwargs.get('root', None) ++ + refresh = kwargs.get('refresh', False) + if refresh: +- refresh_db() ++ refresh_db(root) + + pkg_ret = {} +- for dld_result in __zypper__.xml.call('download', *packages).getElementsByTagName("download-result"): ++ for dld_result in __zypper__(root=root).xml.call('download', *packages).getElementsByTagName("download-result"): + repo = dld_result.getElementsByTagName("repository")[0] + path = dld_result.getElementsByTagName("localfile")[0].getAttribute("path") + pkg_info = { +@@ -2304,7 +2448,7 @@ def download(*packages, **kwargs): + key = _get_first_aggregate_text( + dld_result.getElementsByTagName('name') + ) +- if __salt__['lowpkg.checksum'](pkg_info['path']): ++ if __salt__['lowpkg.checksum'](pkg_info['path'], root=root): + pkg_ret[key] = pkg_info + + if pkg_ret: +@@ -2318,12 +2462,15 @@ def download(*packages, **kwargs): + ) + + +-def list_downloaded(): ++def list_downloaded(root=None): + ''' + .. versionadded:: 2017.7.0 + + List prefetched packages downloaded by Zypper in the local disk. + ++ root ++ operate on a different root directory. ++ + CLI example: + + .. code-block:: bash +@@ -2331,6 +2478,8 @@ def list_downloaded(): + salt '*' pkg.list_downloaded + ''' + CACHE_DIR = '/var/cache/zypp/packages/' ++ if root: ++ CACHE_DIR = os.path.join(root, os.path.relpath(CACHE_DIR, os.path.sep)) + + ret = {} + for root, dirnames, filenames in salt.utils.path.os_walk(CACHE_DIR): +@@ -2347,12 +2496,14 @@ def list_downloaded(): + return ret + + +-def diff(*paths): ++def diff(*paths, **kwargs): + ''' + Return a formatted diff between current files and original in a package. + NOTE: this function includes all files (configuration and not), but does + not work on binary content. + ++ The root parameter can also be passed via the keyword argument. ++ + :param path: Full path to the installed file + :return: Difference string or raises and exception if examined file is binary. + +@@ -2366,7 +2517,7 @@ def diff(*paths): + + pkg_to_paths = {} + for pth in paths: +- pth_pkg = __salt__['lowpkg.owner'](pth) ++ pth_pkg = __salt__['lowpkg.owner'](pth, **kwargs) + if not pth_pkg: + ret[pth] = os.path.exists(pth) and 'Not managed' or 'N/A' + else: +@@ -2375,7 +2526,7 @@ def diff(*paths): + pkg_to_paths[pth_pkg].append(pth) + + if pkg_to_paths: +- local_pkgs = __salt__['pkg.download'](*pkg_to_paths.keys()) ++ local_pkgs = __salt__['pkg.download'](*pkg_to_paths.keys(), **kwargs) + for pkg, files in six.iteritems(pkg_to_paths): + for path in files: + ret[path] = __salt__['lowpkg.diff']( +@@ -2386,12 +2537,12 @@ def diff(*paths): + return ret + + +-def _get_patches(installed_only=False): ++def _get_patches(installed_only=False, root=None): + ''' + List all known patches in repos. + ''' + patches = {} +- for element in __zypper__.nolock.xml.call('se', '-t', 'patch').getElementsByTagName('solvable'): ++ for element in __zypper__(root=root).nolock.xml.call('se', '-t', 'patch').getElementsByTagName('solvable'): + installed = element.getAttribute('status') == 'installed' + if (installed_only and installed) or not installed_only: + patches[element.getAttribute('name')] = { +@@ -2402,7 +2553,7 @@ def _get_patches(installed_only=False): + return patches + + +-def list_patches(refresh=False): ++def list_patches(refresh=False, root=None): + ''' + .. versionadded:: 2017.7.0 + +@@ -2413,6 +2564,9 @@ def list_patches(refresh=False): + If set to False (default) it depends on zypper if a refresh is + executed. + ++ root ++ operate on a different root directory. ++ + CLI Examples: + + .. code-block:: bash +@@ -2420,33 +2574,39 @@ def list_patches(refresh=False): + salt '*' pkg.list_patches + ''' + if refresh: +- refresh_db() ++ refresh_db(root) + +- return _get_patches() ++ return _get_patches(root=root) + + +-def list_installed_patches(): ++def list_installed_patches(root=None): + ''' + .. versionadded:: 2017.7.0 + + List installed advisory patches on the system. + ++ root ++ operate on a different root directory. ++ + CLI Examples: + + .. code-block:: bash + + salt '*' pkg.list_installed_patches + ''' +- return _get_patches(installed_only=True) ++ return _get_patches(installed_only=True, root=root) + + +-def list_provides(**kwargs): ++def list_provides(root=None, **kwargs): + ''' + .. versionadded:: 2018.3.0 + + List package provides of installed packages as a dict. + {'': ['', '', ...]} + ++ root ++ operate on a different root directory. ++ + CLI Examples: + + .. code-block:: bash +@@ -2455,7 +2615,10 @@ def list_provides(**kwargs): + ''' + ret = __context__.get('pkg.list_provides') + if not ret: +- cmd = ['rpm', '-qa', '--queryformat', '%{PROVIDES}_|-%{NAME}\n'] ++ cmd = ['rpm'] ++ if root: ++ cmd.extend(['--root', root]) ++ cmd.extend(['-qa', '--queryformat', '%{PROVIDES}_|-%{NAME}\n']) + ret = dict() + for line in __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False).splitlines(): + provide, realname = line.split('_|-') +@@ -2471,7 +2634,7 @@ def list_provides(**kwargs): + return ret + + +-def resolve_capabilities(pkgs, refresh, **kwargs): ++def resolve_capabilities(pkgs, refresh=False, root=None, **kwargs): + ''' + .. versionadded:: 2018.3.0 + +@@ -2485,6 +2648,9 @@ def resolve_capabilities(pkgs, refresh, **kwargs): + If set to False (default) it depends on zypper if a refresh is + executed. + ++ root ++ operate on a different root directory. ++ + resolve_capabilities + If this option is set to True the input will be checked if + a package with this name exists. If not, this function will +@@ -2500,7 +2666,7 @@ def resolve_capabilities(pkgs, refresh, **kwargs): + salt '*' pkg.resolve_capabilities resolve_capabilities=True w3m_ssl + ''' + if refresh: +- refresh_db() ++ refresh_db(root) + + ret = list() + for pkg in pkgs: +@@ -2513,12 +2679,12 @@ def resolve_capabilities(pkgs, refresh, **kwargs): + + if kwargs.get('resolve_capabilities', False): + try: +- search(name, match='exact') ++ search(name, root=root, match='exact') + except CommandExecutionError: + # no package this such a name found + # search for a package which provides this name + try: +- result = search(name, provides=True, match='exact') ++ result = search(name, root=root, provides=True, match='exact') + if len(result) == 1: + name = next(iter(result.keys())) + elif len(result) > 1: +diff --git a/salt/states/pkg.py b/salt/states/pkg.py +index 0aca1e0af8..22a97fe98c 100644 +--- a/salt/states/pkg.py ++++ b/salt/states/pkg.py +@@ -241,7 +241,7 @@ def _fulfills_version_spec(versions, oper, desired_version, + return False + + +-def _find_unpurge_targets(desired): ++def _find_unpurge_targets(desired, **kwargs): + ''' + Find packages which are marked to be purged but can't yet be removed + because they are dependencies for other installed packages. These are the +@@ -250,7 +250,7 @@ def _find_unpurge_targets(desired): + ''' + return [ + x for x in desired +- if x in __salt__['pkg.list_pkgs'](purge_desired=True) ++ if x in __salt__['pkg.list_pkgs'](purge_desired=True, **kwargs) + ] + + +@@ -265,7 +265,7 @@ def _find_download_targets(name=None, + Inspect the arguments to pkg.downloaded and discover what packages need to + be downloaded. Return a dict of packages to download. + ''' +- cur_pkgs = __salt__['pkg.list_downloaded']() ++ cur_pkgs = __salt__['pkg.list_downloaded'](**kwargs) + if pkgs: + to_download = _repack_pkgs(pkgs, normalize=normalize) + +@@ -383,7 +383,7 @@ def _find_advisory_targets(name=None, + Inspect the arguments to pkg.patch_installed and discover what advisory + patches need to be installed. Return a dict of advisory patches to install. + ''' +- cur_patches = __salt__['pkg.list_installed_patches']() ++ cur_patches = __salt__['pkg.list_installed_patches'](**kwargs) + if advisory_ids: + to_download = advisory_ids + else: +@@ -587,7 +587,7 @@ def _find_install_targets(name=None, + 'minion log.'.format('pkgs' if pkgs + else 'sources')} + +- to_unpurge = _find_unpurge_targets(desired) ++ to_unpurge = _find_unpurge_targets(desired, **kwargs) + else: + if salt.utils.platform.is_windows(): + pkginfo = _get_package_info(name, saltenv=kwargs['saltenv']) +@@ -607,7 +607,7 @@ def _find_install_targets(name=None, + else: + desired = {name: version} + +- to_unpurge = _find_unpurge_targets(desired) ++ to_unpurge = _find_unpurge_targets(desired, **kwargs) + + # FreeBSD pkg supports `openjdk` and `java/openjdk7` package names + origin = bool(re.search('/', name)) +@@ -766,7 +766,8 @@ def _find_install_targets(name=None, + verify_result = __salt__['pkg.verify']( + package_name, + ignore_types=ignore_types, +- verify_options=verify_options ++ verify_options=verify_options, ++ **kwargs + ) + except (CommandExecutionError, SaltInvocationError) as exc: + failed_verify = exc.strerror +@@ -795,7 +796,9 @@ def _find_install_targets(name=None, + verify_result = __salt__['pkg.verify']( + package_name, + ignore_types=ignore_types, +- verify_options=verify_options) ++ verify_options=verify_options, ++ **kwargs ++ ) + except (CommandExecutionError, SaltInvocationError) as exc: + failed_verify = exc.strerror + continue +@@ -1910,7 +1913,8 @@ def installed( + # have caught invalid arguments earlier. + verify_result = __salt__['pkg.verify'](reinstall_pkg, + ignore_types=ignore_types, +- verify_options=verify_options) ++ verify_options=verify_options, ++ **kwargs) + if verify_result: + failed.append(reinstall_pkg) + altered_files[reinstall_pkg] = verify_result +@@ -2098,7 +2102,7 @@ def downloaded(name, + 'package(s): {0}'.format(exc) + return ret + +- new_pkgs = __salt__['pkg.list_downloaded']() ++ new_pkgs = __salt__['pkg.list_downloaded'](**kwargs) + ok, failed = _verify_install(targets, new_pkgs, ignore_epoch=ignore_epoch) + + if failed: +@@ -2974,7 +2978,7 @@ def uptodate(name, refresh=False, pkgs=None, **kwargs): + pkgs, refresh = _resolve_capabilities(pkgs, refresh=refresh, **kwargs) + try: + packages = __salt__['pkg.list_upgrades'](refresh=refresh, **kwargs) +- expected = {pkgname: {'new': pkgver, 'old': __salt__['pkg.version'](pkgname)} ++ expected = {pkgname: {'new': pkgver, 'old': __salt__['pkg.version'](pkgname, **kwargs)} + for pkgname, pkgver in six.iteritems(packages)} + if isinstance(pkgs, list): + packages = [pkg for pkg in packages if pkg in pkgs] +@@ -3156,7 +3160,7 @@ def group_installed(name, skip=None, include=None, **kwargs): + .format(name, exc)) + return ret + +- failed = [x for x in targets if x not in __salt__['pkg.list_pkgs']()] ++ failed = [x for x in targets if x not in __salt__['pkg.list_pkgs'](**kwargs)] + if failed: + ret['comment'] = ( + 'Failed to install the following packages: {0}' +diff --git a/salt/states/pkgrepo.py b/salt/states/pkgrepo.py +index 4d5e9eea92..6d8e94aa18 100644 +--- a/salt/states/pkgrepo.py ++++ b/salt/states/pkgrepo.py +@@ -393,10 +393,7 @@ def managed(name, ppa=None, **kwargs): + kwargs.pop(kwarg, None) + + try: +- pre = __salt__['pkg.get_repo']( +- repo, +- ppa_auth=kwargs.get('ppa_auth', None) +- ) ++ pre = __salt__['pkg.get_repo'](repo=repo, **kwargs) + except CommandExecutionError as exc: + ret['result'] = False + ret['comment'] = \ +@@ -512,10 +509,7 @@ def managed(name, ppa=None, **kwargs): + return ret + + try: +- post = __salt__['pkg.get_repo']( +- repo, +- ppa_auth=kwargs.get('ppa_auth', None) +- ) ++ post = __salt__['pkg.get_repo'](repo=repo, **kwargs) + if pre: + for kwarg in sanitizedkwargs: + if post.get(kwarg) != pre.get(kwarg): +@@ -608,9 +602,7 @@ def absent(name, **kwargs): + return ret + + try: +- repo = __salt__['pkg.get_repo']( +- name, ppa_auth=kwargs.get('ppa_auth', None) +- ) ++ repo = __salt__['pkg.get_repo'](name, **kwargs) + except CommandExecutionError as exc: + ret['result'] = False + ret['comment'] = \ +diff --git a/tests/unit/modules/test_rpm_lowpkg.py b/tests/unit/modules/test_rpm_lowpkg.py +index 0a2359ccb2..dc9f52c572 100644 +--- a/tests/unit/modules/test_rpm_lowpkg.py ++++ b/tests/unit/modules/test_rpm_lowpkg.py +@@ -20,6 +20,11 @@ from tests.support.mock import ( + import salt.modules.rpm_lowpkg as rpm + + ++def _called_with_root(mock): ++ cmd = ' '.join(mock.call_args[0][0]) ++ return cmd.startswith('rpm --root /') ++ ++ + @skipIf(NO_MOCK, NO_MOCK_REASON) + class RpmTestCase(TestCase, LoaderModuleMockMixin): + ''' +@@ -28,7 +33,7 @@ class RpmTestCase(TestCase, LoaderModuleMockMixin): + def setup_loader_modules(self): + return {rpm: {'rpm': MagicMock(return_value=MagicMock)}} + +- # 'list_pkgs' function tests: 1 ++ # 'list_pkgs' function tests: 2 + + def test_list_pkgs(self): + ''' +@@ -37,13 +42,24 @@ class RpmTestCase(TestCase, LoaderModuleMockMixin): + mock = MagicMock(return_value='') + with patch.dict(rpm.__salt__, {'cmd.run': mock}): + self.assertDictEqual(rpm.list_pkgs(), {}) ++ self.assertFalse(_called_with_root(mock)) ++ ++ def test_list_pkgs_root(self): ++ ''' ++ Test if it list the packages currently installed in a dict, ++ called with root parameter ++ ''' ++ mock = MagicMock(return_value='') ++ with patch.dict(rpm.__salt__, {'cmd.run': mock}): ++ rpm.list_pkgs(root='/') ++ self.assertTrue(_called_with_root(mock)) + +- # 'verify' function tests: 1 ++ # 'verify' function tests: 2 + + def test_verify(self): + ''' +- Test if it runs an rpm -Va on a system, +- and returns the results in a dict ++ Test if it runs an rpm -Va on a system, and returns the ++ results in a dict + ''' + mock = MagicMock(return_value={'stdout': '', + 'stderr': '', +@@ -51,8 +67,22 @@ class RpmTestCase(TestCase, LoaderModuleMockMixin): + 'pid': 12345}) + with patch.dict(rpm.__salt__, {'cmd.run_all': mock}): + self.assertDictEqual(rpm.verify('httpd'), {}) ++ self.assertFalse(_called_with_root(mock)) ++ ++ def test_verify_root(self): ++ ''' ++ Test if it runs an rpm -Va on a system, and returns the ++ results in a dict, called with root parameter ++ ''' ++ mock = MagicMock(return_value={'stdout': '', ++ 'stderr': '', ++ 'retcode': 0, ++ 'pid': 12345}) ++ with patch.dict(rpm.__salt__, {'cmd.run_all': mock}): ++ rpm.verify('httpd', root='/') ++ self.assertTrue(_called_with_root(mock)) + +- # 'file_list' function tests: 1 ++ # 'file_list' function tests: 2 + + def test_file_list(self): + ''' +@@ -62,8 +92,20 @@ class RpmTestCase(TestCase, LoaderModuleMockMixin): + with patch.dict(rpm.__salt__, {'cmd.run': mock}): + self.assertDictEqual(rpm.file_list('httpd'), + {'errors': [], 'files': []}) ++ self.assertFalse(_called_with_root(mock)) + +- # 'file_dict' function tests: 1 ++ def test_file_list_root(self): ++ ''' ++ Test if it list the files that belong to a package, using the ++ root parameter. ++ ''' ++ ++ mock = MagicMock(return_value='') ++ with patch.dict(rpm.__salt__, {'cmd.run': mock}): ++ rpm.file_list('httpd', root='/') ++ self.assertTrue(_called_with_root(mock)) ++ ++ # 'file_dict' function tests: 2 + + def test_file_dict(self): + ''' +@@ -73,6 +115,16 @@ class RpmTestCase(TestCase, LoaderModuleMockMixin): + with patch.dict(rpm.__salt__, {'cmd.run': mock}): + self.assertDictEqual(rpm.file_dict('httpd'), + {'errors': [], 'packages': {}}) ++ self.assertFalse(_called_with_root(mock)) ++ ++ def test_file_dict_root(self): ++ ''' ++ Test if it list the files that belong to a package ++ ''' ++ mock = MagicMock(return_value='') ++ with patch.dict(rpm.__salt__, {'cmd.run': mock}): ++ rpm.file_dict('httpd', root='/') ++ self.assertTrue(_called_with_root(mock)) + + # 'owner' function tests: 1 + +@@ -86,6 +138,7 @@ class RpmTestCase(TestCase, LoaderModuleMockMixin): + mock = MagicMock(return_value=ret) + with patch.dict(rpm.__salt__, {'cmd.run_stdout': mock}): + self.assertEqual(rpm.owner('/usr/bin/salt-jenkins-build'), '') ++ self.assertFalse(_called_with_root(mock)) + + ret = {'/usr/bin/vim': 'vim-enhanced-7.4.160-1.e17.x86_64', + '/usr/bin/python': 'python-2.7.5-16.e17.x86_64'} +@@ -94,8 +147,22 @@ class RpmTestCase(TestCase, LoaderModuleMockMixin): + with patch.dict(rpm.__salt__, {'cmd.run_stdout': mock}): + self.assertDictEqual(rpm.owner('/usr/bin/python', '/usr/bin/vim'), + ret) ++ self.assertFalse(_called_with_root(mock)) + +- # 'checksum' function tests: 1 ++ def test_owner_root(self): ++ ''' ++ Test if it return the name of the package that owns the file, ++ using the parameter root. ++ ''' ++ self.assertEqual(rpm.owner(), '') ++ ++ ret = 'file /usr/bin/salt-jenkins-build is not owned by any package' ++ mock = MagicMock(return_value=ret) ++ with patch.dict(rpm.__salt__, {'cmd.run_stdout': mock}): ++ rpm.owner('/usr/bin/salt-jenkins-build', root='/') ++ self.assertTrue(_called_with_root(mock)) ++ ++ # 'checksum' function tests: 2 + + def test_checksum(self): + ''' +@@ -110,6 +177,17 @@ class RpmTestCase(TestCase, LoaderModuleMockMixin): + mock = MagicMock(side_effect=[True, 0, True, 1, False, 0]) + with patch.dict(rpm.__salt__, {'file.file_exists': mock, 'cmd.retcode': mock}): + self.assertDictEqual(rpm.checksum("file1.rpm", "file2.rpm", "file3.rpm"), ret) ++ self.assertFalse(_called_with_root(mock)) ++ ++ def test_checksum_root(self): ++ ''' ++ Test if checksum validate as expected, using the parameter ++ root ++ ''' ++ mock = MagicMock(side_effect=[True, 0]) ++ with patch.dict(rpm.__salt__, {'file.file_exists': mock, 'cmd.retcode': mock}): ++ rpm.checksum("file1.rpm", root='/') ++ self.assertTrue(_called_with_root(mock)) + + def test_version_cmp_rpm(self): + ''' +diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py +index f586c23fd0..e7474ff777 100644 +--- a/tests/unit/modules/test_zypperpkg.py ++++ b/tests/unit/modules/test_zypperpkg.py +@@ -40,6 +40,9 @@ class ZyppCallMock(object): + return self + + def __call__(self, *args, **kwargs): ++ # If the call is for a configuration modifier, we return self ++ if any(i in kwargs for i in ('no_repo_failure', 'systemd_scope', 'root')): ++ return self + return MagicMock(return_value=self.__return_value)() + + +@@ -925,7 +928,7 @@ Repository 'DUMMY' not found by its alias, number, or URI. + 'pico': '0.1.1', + } + +- def __call__(self): ++ def __call__(self, root=None): + pkgs = self._pkgs.copy() + for target in self._packages: + if self._pkgs.get(target): +@@ -991,10 +994,10 @@ Repository 'DUMMY' not found by its alias, number, or URI. + with zypper_patcher: + zypper.mod_repo(name, **{'url': url}) + self.assertEqual( +- zypper.__zypper__.xml.call.call_args_list, ++ zypper.__zypper__(root=None).xml.call.call_args_list, + [call('ar', url, name)] + ) +- self.assertTrue(zypper.__zypper__.refreshable.xml.call.call_count == 0) ++ self.assertTrue(zypper.__zypper__(root=None).refreshable.xml.call.call_count == 0) + + def test_repo_noadd_nomod_noref(self): + ''' +@@ -1016,8 +1019,8 @@ Repository 'DUMMY' not found by its alias, number, or URI. + self.assertEqual( + out['comment'], + 'Specified arguments did not result in modification of repo') +- self.assertTrue(zypper.__zypper__.xml.call.call_count == 0) +- self.assertTrue(zypper.__zypper__.refreshable.xml.call.call_count == 0) ++ self.assertTrue(zypper.__zypper__(root=None).xml.call.call_count == 0) ++ self.assertTrue(zypper.__zypper__(root=None).refreshable.xml.call.call_count == 0) + + def test_repo_noadd_modbaseurl_ref(self): + ''' +@@ -1045,9 +1048,11 @@ Repository 'DUMMY' not found by its alias, number, or URI. + 'priority': 1, + 'cache': False, + 'keeppackages': False, +- 'type': 'rpm-md'} +- self.assertTrue(zypper.mod_repo.call_count == 2) +- self.assertTrue(zypper.mod_repo.mock_calls[1] == call(name, **expected_params)) ++ 'type': 'rpm-md', ++ 'root': None, ++ } ++ self.assertEqual(zypper.mod_repo.call_count, 2) ++ self.assertEqual(zypper.mod_repo.mock_calls[1], call(name, **expected_params)) + + def test_repo_add_mod_noref(self): + ''' +@@ -1063,10 +1068,10 @@ Repository 'DUMMY' not found by its alias, number, or URI. + with zypper_patcher: + zypper.mod_repo(name, **{'url': url, 'refresh': True}) + self.assertEqual( +- zypper.__zypper__.xml.call.call_args_list, ++ zypper.__zypper__(root=None).xml.call.call_args_list, + [call('ar', url, name)] + ) +- zypper.__zypper__.refreshable.xml.call.assert_called_once_with( ++ zypper.__zypper__(root=None).refreshable.xml.call.assert_called_once_with( + 'mr', '--refresh', name + ) + +@@ -1085,8 +1090,8 @@ Repository 'DUMMY' not found by its alias, number, or URI. + 'salt.modules.zypperpkg', **self.zypper_patcher_config) + with zypper_patcher: + zypper.mod_repo(name, **{'url': url, 'refresh': True}) +- self.assertTrue(zypper.__zypper__.xml.call.call_count == 0) +- zypper.__zypper__.refreshable.xml.call.assert_called_once_with( ++ self.assertTrue(zypper.__zypper__(root=None).xml.call.call_count == 0) ++ zypper.__zypper__(root=None).refreshable.xml.call.assert_called_once_with( + 'mr', '--refresh', name + ) + +@@ -1105,13 +1110,13 @@ Repository 'DUMMY' not found by its alias, number, or URI. + with zypper_patcher: + zypper.mod_repo(name, **{'url': url, 'gpgautoimport': True}) + self.assertEqual( +- zypper.__zypper__.xml.call.call_args_list, ++ zypper.__zypper__(root=None).xml.call.call_args_list, + [ + call('ar', url, name), + call('--gpg-auto-import-keys', 'refresh', name) + ] + ) +- self.assertTrue(zypper.__zypper__.refreshable.xml.call.call_count == 0) ++ self.assertTrue(zypper.__zypper__(root=None).refreshable.xml.call.call_count == 0) + + def test_repo_noadd_nomod_ref(self): + ''' +@@ -1132,10 +1137,10 @@ Repository 'DUMMY' not found by its alias, number, or URI. + with zypper_patcher: + zypper.mod_repo(name, **{'url': url, 'gpgautoimport': True}) + self.assertEqual( +- zypper.__zypper__.xml.call.call_args_list, ++ zypper.__zypper__(root=None).xml.call.call_args_list, + [call('--gpg-auto-import-keys', 'refresh', name)] + ) +- self.assertTrue(zypper.__zypper__.refreshable.xml.call.call_count == 0) ++ self.assertTrue(zypper.__zypper__(root=None).refreshable.xml.call.call_count == 0) + + def test_repo_add_mod_ref(self): + ''' +@@ -1156,13 +1161,13 @@ Repository 'DUMMY' not found by its alias, number, or URI. + **{'url': url, 'refresh': True, 'gpgautoimport': True} + ) + self.assertEqual( +- zypper.__zypper__.xml.call.call_args_list, ++ zypper.__zypper__(root=None).xml.call.call_args_list, + [ + call('ar', url, name), + call('--gpg-auto-import-keys', 'refresh', name) + ] + ) +- zypper.__zypper__.refreshable.xml.call.assert_called_once_with( ++ zypper.__zypper__(root=None).refreshable.xml.call.assert_called_once_with( + '--gpg-auto-import-keys', 'mr', '--refresh', name + ) + +@@ -1188,10 +1193,10 @@ Repository 'DUMMY' not found by its alias, number, or URI. + **{'url': url, 'refresh': True, 'gpgautoimport': True} + ) + self.assertEqual( +- zypper.__zypper__.xml.call.call_args_list, ++ zypper.__zypper__(root=None).xml.call.call_args_list, + [call('--gpg-auto-import-keys', 'refresh', name)] + ) +- zypper.__zypper__.refreshable.xml.call.assert_called_once_with( ++ zypper.__zypper__(root=None).refreshable.xml.call.assert_called_once_with( + '--gpg-auto-import-keys', 'mr', '--refresh', name + ) + +diff --git a/tests/unit/states/test_pkg.py b/tests/unit/states/test_pkg.py +index 42fe6c6867..d30e064167 100644 +--- a/tests/unit/states/test_pkg.py ++++ b/tests/unit/states/test_pkg.py +@@ -46,7 +46,7 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin): + pkgname: pkgver['new'] for pkgname, pkgver in six.iteritems(self.pkgs) + }) + upgrade = MagicMock(return_value=self.pkgs) +- version = MagicMock(side_effect=lambda pkgname: self.pkgs[pkgname]['old']) ++ version = MagicMock(side_effect=lambda pkgname, **_: self.pkgs[pkgname]['old']) + + with patch.dict(pkg.__salt__, + {'pkg.list_upgrades': list_upgrades, +@@ -55,7 +55,6 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin): + + # Run state with test=false + with patch.dict(pkg.__opts__, {'test': False}): +- + ret = pkg.uptodate('dummy', test=True) + self.assertTrue(ret['result']) + self.assertDictEqual(ret['changes'], self.pkgs) +@@ -81,7 +80,7 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin): + pkgname: pkgver['new'] for pkgname, pkgver in six.iteritems(self.pkgs) + }) + upgrade = MagicMock(return_value=self.pkgs) +- version = MagicMock(side_effect=lambda pkgname: pkgs[pkgname]['old']) ++ version = MagicMock(side_effect=lambda pkgname, **_: pkgs[pkgname]['old']) + + with patch.dict(pkg.__salt__, + {'pkg.list_upgrades': list_upgrades, +@@ -160,7 +159,7 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin): + pkgname: pkgver['new'] for pkgname, pkgver in six.iteritems(self.pkgs) + }) + upgrade = MagicMock(return_value={}) +- version = MagicMock(side_effect=lambda pkgname: pkgs[pkgname]['old']) ++ version = MagicMock(side_effect=lambda pkgname, **_: pkgs[pkgname]['old']) + + with patch.dict(pkg.__salt__, + {'pkg.list_upgrades': list_upgrades, +-- +2.20.1 + +