SHA256
1
0
forked from pool/salt

osc copypac from project:systemsmanagement:saltstack:testing package:salt revision:253

OBS-URL: https://build.opensuse.org/package/show/systemsmanagement:saltstack/salt?expand=0&rev=131
This commit is contained in:
Pablo Suárez Hernández 2019-01-17 09:18:02 +00:00 committed by Git OBS Bridge
parent 7f8d4ffeff
commit ff3dbe1ea9
36 changed files with 8831 additions and 8 deletions

View File

@ -1 +1 @@
ebc77d067d9fa300bdc5bb5dcccaa09e1787f688
2460cb78e6bda580f2567781e060a3e6c6ba25de

View File

@ -0,0 +1,194 @@
From 3bad9e211c2e76ddac48f7c8ff1632e32e0a256e Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 9 Oct 2018 14:08:50 +0200
Subject: [PATCH] Add CPE_NAME for osversion* grain parsing (U#49946)
Remove unnecessary linebreak
Override VERSION_ID from os-release, if CPE_NAME is given
Add unit test for WFN format of CPE_NAME
Add unit test for v2.3 of CPE format
Add unit test for broken CPE_NAME
Prevent possible crash if CPE_NAME is wrongly written in the distro
Add part parsing
Keep CPE_NAME only for opensuse series
Remove linebreak
Expand unit test to verify part name
Fix proper part name in the string-bound CPE
---
salt/grains/core.py | 43 +++++++++++++++++++++---
tests/unit/grains/test_core.py | 60 +++++++++++++++++++++++++++++-----
2 files changed, 90 insertions(+), 13 deletions(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 80eebd1c05..e41ab4e0ae 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1355,6 +1355,34 @@ def _parse_os_release(os_release_files):
return data
+def _parse_cpe_name(cpe):
+ '''
+ Parse CPE_NAME data from the os-release
+
+ Info: https://csrc.nist.gov/projects/security-content-automation-protocol/scap-specifications/cpe
+
+ :param cpe:
+ :return:
+ '''
+ part = {
+ 'o': 'operating system',
+ 'h': 'hardware',
+ 'a': 'application',
+ }
+ ret = {}
+ cpe = (cpe or '').split(':')
+ if len(cpe) > 4 and cpe[0] == 'cpe':
+ if cpe[1].startswith('/'): # WFN to URI
+ ret['vendor'], ret['product'], ret['version'] = cpe[2:5]
+ ret['phase'] = cpe[5] if len(cpe) > 5 else None
+ ret['part'] = part.get(cpe[1][1:])
+ elif len(cpe) == 13 and cpe[1] == '2.3': # WFN to a string
+ ret['vendor'], ret['product'], ret['version'], ret['phase'] = [x if x != '*' else None for x in cpe[3:7]]
+ ret['part'] = part.get(cpe[2])
+
+ return ret
+
+
def os_data():
'''
Return grains pertaining to the operating system
@@ -1554,13 +1582,20 @@ def os_data():
codename = codename_match.group(1)
grains['lsb_distrib_codename'] = codename
if 'CPE_NAME' in os_release:
- if ":suse:" in os_release['CPE_NAME'] or ":opensuse:" in os_release['CPE_NAME']:
+ cpe = _parse_cpe_name(os_release['CPE_NAME'])
+ if not cpe:
+ log.error('Broken CPE_NAME format in /etc/os-release!')
+ elif cpe.get('vendor', '').lower() in ['suse', 'opensuse']:
grains['os'] = "SUSE"
# openSUSE `osfullname` grain normalization
if os_release.get("NAME") == "openSUSE Leap":
grains['osfullname'] = "Leap"
elif os_release.get("VERSION") == "Tumbleweed":
grains['osfullname'] = os_release["VERSION"]
+ # Override VERSION_ID, if CPE_NAME around
+ if cpe.get('version') and cpe.get('vendor') == 'opensuse': # Keep VERSION_ID for SLES
+ grains['lsb_distrib_release'] = cpe['version']
+
elif os.path.isfile('/etc/SuSE-release'):
grains['lsb_distrib_id'] = 'SUSE'
version = ''
@@ -1666,8 +1701,7 @@ def os_data():
# Commit introducing this comment should be reverted after the upstream bug is released.
if 'CentOS Linux 7' in grains.get('lsb_distrib_codename', ''):
grains.pop('lsb_distrib_release', None)
- grains['osrelease'] = \
- grains.get('lsb_distrib_release', osrelease).strip()
+ grains['osrelease'] = grains.get('lsb_distrib_release', osrelease).strip()
grains['oscodename'] = grains.get('lsb_distrib_codename', '').strip() or oscodename
if 'Red Hat' in grains['oscodename']:
grains['oscodename'] = oscodename
@@ -1702,8 +1736,7 @@ def os_data():
r'((?:Open|Oracle )?Solaris|OpenIndiana|OmniOS) (Development)?'
r'\s*(\d+\.?\d*|v\d+)\s?[A-Z]*\s?(r\d+|\d+\/\d+|oi_\S+|snv_\S+)?'
)
- osname, development, osmajorrelease, osminorrelease = \
- release_re.search(rel_data).groups()
+ osname, development, osmajorrelease, osminorrelease = release_re.search(rel_data).groups()
except AttributeError:
# Set a blank osrelease grain and fallback to 'Solaris'
# as the 'os' grain.
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index e973428add..2ab32ef41b 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -62,10 +62,11 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
def test_parse_etc_os_release(self, path_isfile_mock):
path_isfile_mock.side_effect = lambda x: x == "/usr/lib/os-release"
with salt.utils.files.fopen(os.path.join(OS_RELEASE_DIR, "ubuntu-17.10")) as os_release_file:
- os_release_content = os_release_file.readlines()
- with patch("salt.utils.files.fopen", mock_open()) as os_release_file:
- os_release_file.return_value.__iter__.return_value = os_release_content
- os_release = core._parse_os_release(["/etc/os-release", "/usr/lib/os-release"])
+ os_release_content = os_release_file.read()
+ with patch("salt.utils.files.fopen", mock_open(read_data=os_release_content)):
+ os_release = core._parse_os_release(
+ '/etc/os-release',
+ '/usr/lib/os-release')
self.assertEqual(os_release, {
"NAME": "Ubuntu",
"VERSION": "17.10 (Artful Aardvark)",
@@ -81,10 +82,53 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
"UBUNTU_CODENAME": "artful",
})
- @patch("os.path.isfile")
- def test_missing_os_release(self, path_isfile_mock):
- path_isfile_mock.return_value = False
- os_release = core._parse_os_release(["/etc/os-release", "/usr/lib/os-release"])
+ def test_parse_cpe_name_wfn(self):
+ '''
+ Parse correct CPE_NAME data WFN formatted
+ :return:
+ '''
+ for cpe, cpe_ret in [('cpe:/o:opensuse:leap:15.0',
+ {'phase': None, 'version': '15.0', 'product': 'leap',
+ 'vendor': 'opensuse', 'part': 'operating system'}),
+ ('cpe:/o:vendor:product:42:beta',
+ {'phase': 'beta', 'version': '42', 'product': 'product',
+ 'vendor': 'vendor', 'part': 'operating system'})]:
+ ret = core._parse_cpe_name(cpe)
+ for key in cpe_ret:
+ assert key in ret
+ assert cpe_ret[key] == ret[key]
+
+ def test_parse_cpe_name_v23(self):
+ '''
+ Parse correct CPE_NAME data v2.3 formatted
+ :return:
+ '''
+ for cpe, cpe_ret in [('cpe:2.3:o:microsoft:windows_xp:5.1.601:beta:*:*:*:*:*:*',
+ {'phase': 'beta', 'version': '5.1.601', 'product': 'windows_xp',
+ 'vendor': 'microsoft', 'part': 'operating system'}),
+ ('cpe:2.3:h:corellian:millenium_falcon:1.0:*:*:*:*:*:*:*',
+ {'phase': None, 'version': '1.0', 'product': 'millenium_falcon',
+ 'vendor': 'corellian', 'part': 'hardware'}),
+ ('cpe:2.3:*:dark_empire:light_saber:3.0:beta:*:*:*:*:*:*',
+ {'phase': 'beta', 'version': '3.0', 'product': 'light_saber',
+ 'vendor': 'dark_empire', 'part': None})]:
+ ret = core._parse_cpe_name(cpe)
+ for key in cpe_ret:
+ assert key in ret
+ assert cpe_ret[key] == ret[key]
+
+ def test_parse_cpe_name_broken(self):
+ '''
+ Parse broken CPE_NAME data
+ :return:
+ '''
+ for cpe in ['cpe:broken', 'cpe:broken:in:all:ways:*:*:*:*',
+ 'cpe:x:still:broken:123', 'who:/knows:what:is:here']:
+ assert core._parse_cpe_name(cpe) == {}
+
+ def test_missing_os_release(self):
+ with patch('salt.utils.files.fopen', mock_open(read_data={})):
+ os_release = core._parse_os_release('/etc/os-release', '/usr/lib/os-release')
self.assertEqual(os_release, {})
@skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
--
2.19.0

View File

@ -0,0 +1,144 @@
From ba5171ce35b733a1f7997b4ea038998802b67298 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 6 Dec 2018 16:26:23 +0100
Subject: [PATCH] Add hold/unhold functions
Add unhold function
Add warnings
---
salt/modules/zypper.py | 88 +++++++++++++++++++++++++++++++++++++++++-
1 file changed, 87 insertions(+), 1 deletion(-)
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 6845e44ab6..773354b2f3 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -41,6 +41,7 @@ import salt.utils.pkg
import salt.utils.pkg.rpm
import salt.utils.stringutils
import salt.utils.systemd
+import salt.utils.versions
from salt.utils.versions import LooseVersion
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError
@@ -1738,7 +1739,7 @@ def clean_locks():
return out
-def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
+def unhold(name=None, pkgs=None, **kwargs):
'''
Remove specified package lock.
@@ -1750,7 +1751,47 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
salt '*' pkg.remove_lock <package1>,<package2>,<package3>
salt '*' pkg.remove_lock pkgs='["foo", "bar"]'
'''
+ ret = {}
+ if (not name and not pkgs) or (name and pkgs):
+ raise CommandExecutionError('Name or packages must be specified.')
+ elif name:
+ pkgs = [name]
+
+ locks = list_locks()
+ try:
+ pkgs = list(__salt__['pkg_resource.parse_targets'](pkgs)[0].keys())
+ except MinionError as exc:
+ raise CommandExecutionError(exc)
+
+ removed = []
+ missing = []
+ for pkg in pkgs:
+ if locks.get(pkg):
+ removed.append(pkg)
+ ret[pkg]['comment'] = 'Package {0} is no longer held.'.format(pkg)
+ else:
+ missing.append(pkg)
+ ret[pkg]['comment'] = 'Package {0} unable to be unheld.'.format(pkg)
+
+ if removed:
+ __zypper__.call('rl', *removed)
+
+ return ret
+
+
+def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
+ '''
+ Remove specified package lock.
+
+ CLI Example:
+
+ .. code-block:: bash
+ salt '*' pkg.remove_lock <package name>
+ salt '*' pkg.remove_lock <package1>,<package2>,<package3>
+ salt '*' pkg.remove_lock pkgs='["foo", "bar"]'
+ '''
+ salt.utils.versions.warn_until('Sodium', 'This function is deprecated. Please use unhold() instead.')
locks = list_locks()
try:
packages = list(__salt__['pkg_resource.parse_targets'](packages)[0].keys())
@@ -1771,6 +1812,50 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
return {'removed': len(removed), 'not_found': missing}
+def hold(name=None, pkgs=None, **kwargs):
+ '''
+ Add a package lock. Specify packages to lock by exact name.
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' pkg.add_lock <package name>
+ salt '*' pkg.add_lock <package1>,<package2>,<package3>
+ salt '*' pkg.add_lock pkgs='["foo", "bar"]'
+
+ :param name:
+ :param pkgs:
+ :param kwargs:
+ :return:
+ '''
+ ret = {}
+ if (not name and not pkgs) or (name and pkgs):
+ raise CommandExecutionError('Name or packages must be specified.')
+ elif name:
+ pkgs = [name]
+
+ locks = list_locks()
+ added = []
+ try:
+ pkgs = list(__salt__['pkg_resource.parse_targets'](pkgs)[0].keys())
+ except MinionError as exc:
+ raise CommandExecutionError(exc)
+
+ for pkg in pkgs:
+ ret[pkg] = {'name': pkg, 'changes': {}, 'result': False, 'comment': ''}
+ if not locks.get(pkg):
+ added.append(pkg)
+ ret[pkg]['comment'] = 'Package {0} is now being held.'.format(pkg)
+ else:
+ ret[pkg]['comment'] = 'Package {0} is already set to be held.'.format(pkg)
+
+ if added:
+ __zypper__.call('al', *added)
+
+ return ret
+
+
def add_lock(packages, **kwargs): # pylint: disable=unused-argument
'''
Add a package lock. Specify packages to lock by exact name.
@@ -1783,6 +1868,7 @@ def add_lock(packages, **kwargs): # pylint: disable=unused-argument
salt '*' pkg.add_lock <package1>,<package2>,<package3>
salt '*' pkg.add_lock pkgs='["foo", "bar"]'
'''
+ salt.utils.versions.warn_until('Sodium', 'This function is deprecated. Please use hold() instead.')
locks = list_locks()
added = []
try:
--
2.20.1

View File

@ -0,0 +1,116 @@
From 671bb9d48e120c806ca1f6f176b0ada43b1e7594 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 12 Oct 2018 16:20:40 +0200
Subject: [PATCH] Add multi-file support and globbing to the filetree
(U#50018)
Add more possible logs
Support multiple files grabbing
Collect system logs and boot logs
Support globbing in filetree
---
salt/cli/support/intfunc.py | 49 ++++++++++++++++-----------
salt/cli/support/profiles/default.yml | 7 ++++
2 files changed, 37 insertions(+), 19 deletions(-)
diff --git a/salt/cli/support/intfunc.py b/salt/cli/support/intfunc.py
index 2727cd6394..f15f4d4097 100644
--- a/salt/cli/support/intfunc.py
+++ b/salt/cli/support/intfunc.py
@@ -6,6 +6,7 @@ Internal functions.
from __future__ import absolute_import, print_function, unicode_literals
import os
+import glob
from salt.cli.support.console import MessagesOutput
import salt.utils.files
@@ -13,7 +14,7 @@ import salt.utils.files
out = MessagesOutput()
-def filetree(collector, path):
+def filetree(collector, *paths):
'''
Add all files in the tree. If the "path" is a file,
only that file will be added.
@@ -21,22 +22,32 @@ def filetree(collector, path):
:param path: File or directory
:return:
'''
- if not path:
- out.error('Path not defined', ident=2)
- else:
- # The filehandler needs to be explicitly passed here, so PyLint needs to accept that.
- # pylint: disable=W8470
- if os.path.isfile(path):
- filename = os.path.basename(path)
- try:
- file_ref = salt.utils.files.fopen(path) # pylint: disable=W
- out.put('Add {}'.format(filename), indent=2)
- collector.add(filename)
- collector.link(title=path, path=file_ref)
- except Exception as err:
- out.error(err, ident=4)
- # pylint: enable=W8470
+ _paths = []
+ # Unglob
+ for path in paths:
+ _paths += glob.glob(path)
+ for path in set(_paths):
+ if not path:
+ out.error('Path not defined', ident=2)
+ elif not os.path.exists(path):
+ out.warning('Path {} does not exists'.format(path))
else:
- for fname in os.listdir(path):
- fname = os.path.join(path, fname)
- filetree(collector, fname)
+ # The filehandler needs to be explicitly passed here, so PyLint needs to accept that.
+ # pylint: disable=W8470
+ if os.path.isfile(path):
+ filename = os.path.basename(path)
+ try:
+ file_ref = salt.utils.files.fopen(path) # pylint: disable=W
+ out.put('Add {}'.format(filename), indent=2)
+ collector.add(filename)
+ collector.link(title=path, path=file_ref)
+ except Exception as err:
+ out.error(err, ident=4)
+ # pylint: enable=W8470
+ else:
+ try:
+ for fname in os.listdir(path):
+ fname = os.path.join(path, fname)
+ filetree(collector, [fname])
+ except Exception as err:
+ out.error(err, ident=4)
diff --git a/salt/cli/support/profiles/default.yml b/salt/cli/support/profiles/default.yml
index 01d9a26193..3defb5eef3 100644
--- a/salt/cli/support/profiles/default.yml
+++ b/salt/cli/support/profiles/default.yml
@@ -62,10 +62,17 @@ general-health:
- ps.top:
info: Top CPU consuming processes
+boot_log:
+ - filetree:
+ info: Collect boot logs
+ args:
+ - /var/log/boot.*
+
system.log:
# This works on any file system object.
- filetree:
info: Add system log
args:
- /var/log/syslog
+ - /var/log/messages
--
2.19.0

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,31 @@
From 326e649ef1f14b609916f0e9ce75e29a5e7f4d05 Mon Sep 17 00:00:00 2001
From: Robert Munteanu <rombert@apache.org>
Date: Mon, 19 Nov 2018 17:52:34 +0100
Subject: [PATCH] azurefs: gracefully handle AttributeError
It is possible that the azure.storage object has no __version__ defined.
In that case, prevent console spam with unhandled AttributeError
messages and instead consider that Azure support is not present.
Problem was encountered on openSUSE Tumbleweed.
---
salt/fileserver/azurefs.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/fileserver/azurefs.py b/salt/fileserver/azurefs.py
index c266323fbe..a42c10c594 100644
--- a/salt/fileserver/azurefs.py
+++ b/salt/fileserver/azurefs.py
@@ -68,7 +68,7 @@ try:
if LooseVersion(azure.storage.__version__) < LooseVersion('0.20.0'):
raise ImportError('azure.storage.__version__ must be >= 0.20.0')
HAS_AZURE = True
-except ImportError:
+except (ImportError, AttributeError):
HAS_AZURE = False
# Import third party libs
--
2.20.1

View File

@ -0,0 +1,27 @@
From e82dc4c556497b612d31b65e60b34c979c957424 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 5 Oct 2018 12:02:08 +0200
Subject: [PATCH] Bugfix: any unicode string of length 16 will raise
TypeError instead of ValueError
---
salt/_compat.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/salt/_compat.py b/salt/_compat.py
index 0576210afc..71963a4ead 100644
--- a/salt/_compat.py
+++ b/salt/_compat.py
@@ -192,7 +192,7 @@ class IPv6AddressScoped(ipaddress.IPv6Address):
if len(data) == 16 and ':' not in data:
try:
packed = bool(int(str(bytearray(data)).encode('hex'), 16))
- except ValueError:
+ except (ValueError, TypeError):
pass
return packed
--
2.17.1

View File

@ -0,0 +1,642 @@
From 9b2473001dcf25c53dff469d3ffb38113e0402eb Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 20 Nov 2018 16:06:31 +0100
Subject: [PATCH] Debian info_installed compatibility (#50453)
Remove unused variable
Get unit ticks installation time
Pass on unix ticks installation date time
Implement function to figure out package build time
Unify arch attribute
Add 'attr' support.
Use attr parameter in aptpkg
Add 'all_versions' output structure backward compatibility
Fix docstring
Add UT for generic test of function 'info'
Add UT for 'info' function with the parameter 'attr'
Add UT for info_installed's 'attr' param
Fix docstring
Add returned type check
Add UT for info_installed with 'all_versions=True' output structure
Refactor UT for 'owner' function
Refactor UT: move to decorators, add more checks
Schedule TODO for next refactoring of UT 'show' function
Refactor UT: get rid of old assertion way, flatten tests
Refactor UT: move to native assertions, cleanup noise, flatten complexity for better visibility what is tested
Lintfix: too many empty lines
Adjust architecture getter according to the lowpkg info
Fix wrong Git merge: missing function signature
---
salt/modules/aptpkg.py | 20 +++-
salt/modules/dpkg.py | 93 +++++++++++++--
tests/unit/modules/test_aptpkg.py | 189 +++++++++++++++++-------------
tests/unit/modules/test_dpkg.py | 69 +++++++++++
4 files changed, 274 insertions(+), 97 deletions(-)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 90b99c44b9..dc27903230 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -2800,6 +2800,15 @@ def info_installed(*names, **kwargs):
.. versionadded:: 2016.11.3
+ attr
+ Comma-separated package attributes. If no 'attr' is specified, all available attributes returned.
+
+ Valid attributes are:
+ version, vendor, release, build_date, build_date_time_t, install_date, install_date_time_t,
+ build_host, group, source_rpm, arch, epoch, size, license, signature, packager, url, summary, description.
+
+ .. versionadded:: Neon
+
CLI example:
.. code-block:: bash
@@ -2810,11 +2819,15 @@ def info_installed(*names, **kwargs):
'''
kwargs = salt.utils.args.clean_kwargs(**kwargs)
failhard = kwargs.pop('failhard', True)
+ kwargs.pop('errors', None) # Only for compatibility with RPM
+ attr = kwargs.pop('attr', None) # Package attributes to return
+ all_versions = kwargs.pop('all_versions', False) # This is for backward compatible structure only
+
if kwargs:
salt.utils.args.invalid_kwargs(kwargs)
ret = dict()
- for pkg_name, pkg_nfo in __salt__['lowpkg.info'](*names, failhard=failhard).items():
+ for pkg_name, pkg_nfo in __salt__['lowpkg.info'](*names, failhard=failhard, attr=attr).items():
t_nfo = dict()
# Translate dpkg-specific keys to a common structure
for key, value in pkg_nfo.items():
@@ -2831,7 +2844,10 @@ def info_installed(*names, **kwargs):
else:
t_nfo[key] = value
- ret[pkg_name] = t_nfo
+ if all_versions:
+ ret.setdefault(pkg_name, []).append(t_nfo)
+ else:
+ ret[pkg_name] = t_nfo
return ret
diff --git a/salt/modules/dpkg.py b/salt/modules/dpkg.py
index 03be5f821a..26ca5dcf5a 100644
--- a/salt/modules/dpkg.py
+++ b/salt/modules/dpkg.py
@@ -252,6 +252,38 @@ def file_dict(*packages):
return {'errors': errors, 'packages': ret}
+def _get_pkg_build_time(name):
+ '''
+ Get package build time, if possible.
+
+ :param name:
+ :return:
+ '''
+ iso_time = iso_time_t = None
+ changelog_dir = os.path.join('/usr/share/doc', name)
+ if os.path.exists(changelog_dir):
+ for fname in os.listdir(changelog_dir):
+ try:
+ iso_time_t = int(os.path.getmtime(os.path.join(changelog_dir, fname)))
+ iso_time = datetime.datetime.utcfromtimestamp(iso_time_t).isoformat() + 'Z'
+ break
+ except OSError:
+ pass
+
+ # Packager doesn't care about Debian standards, therefore Plan B: brute-force it.
+ if not iso_time:
+ for pkg_f_path in __salt__['cmd.run']('dpkg-query -L {}'.format(name)).splitlines():
+ if 'changelog' in pkg_f_path.lower() and os.path.exists(pkg_f_path):
+ try:
+ iso_time_t = int(os.path.getmtime(pkg_f_path))
+ iso_time = datetime.datetime.utcfromtimestamp(iso_time_t).isoformat() + 'Z'
+ break
+ except OSError:
+ pass
+
+ return iso_time, iso_time_t
+
+
def _get_pkg_info(*packages, **kwargs):
'''
Return list of package information. If 'packages' parameter is empty,
@@ -274,7 +306,7 @@ def _get_pkg_info(*packages, **kwargs):
ret = []
cmd = "dpkg-query -W -f='package:" + bin_var + "\\n" \
"revision:${binary:Revision}\\n" \
- "architecture:${Architecture}\\n" \
+ "arch:${Architecture}\\n" \
"maintainer:${Maintainer}\\n" \
"summary:${Summary}\\n" \
"source:${source:Package}\\n" \
@@ -307,9 +339,14 @@ def _get_pkg_info(*packages, **kwargs):
key, value = pkg_info_line.split(":", 1)
if value:
pkg_data[key] = value
- install_date = _get_pkg_install_time(pkg_data.get('package'))
- if install_date:
- pkg_data['install_date'] = install_date
+ install_date, install_date_t = _get_pkg_install_time(pkg_data.get('package'), pkg_data.get('arch'))
+ if install_date:
+ pkg_data['install_date'] = install_date
+ pkg_data['install_date_time_t'] = install_date_t # Unix ticks
+ build_date, build_date_t = _get_pkg_build_time(pkg_data.get('package'))
+ if build_date:
+ pkg_data['build_date'] = build_date
+ pkg_data['build_date_time_t'] = build_date_t
pkg_data['description'] = pkg_descr.split(":", 1)[-1]
ret.append(pkg_data)
@@ -335,19 +372,32 @@ def _get_pkg_license(pkg):
return ", ".join(sorted(licenses))
-def _get_pkg_install_time(pkg):
+def _get_pkg_install_time(pkg, arch):
'''
Return package install time, based on the /var/lib/dpkg/info/<package>.list
:return:
'''
- iso_time = None
+ iso_time = iso_time_t = None
+ loc_root = '/var/lib/dpkg/info'
if pkg is not None:
- location = "/var/lib/dpkg/info/{0}.list".format(pkg)
- if os.path.exists(location):
- iso_time = datetime.datetime.utcfromtimestamp(int(os.path.getmtime(location))).isoformat() + "Z"
+ locations = []
+ if arch is not None and arch != 'all':
+ locations.append(os.path.join(loc_root, '{0}:{1}.list'.format(pkg, arch)))
+
+ locations.append(os.path.join(loc_root, '{0}.list'.format(pkg)))
+ for location in locations:
+ try:
+ iso_time_t = int(os.path.getmtime(location))
+ iso_time = datetime.datetime.utcfromtimestamp(iso_time_t).isoformat() + 'Z'
+ break
+ except OSError:
+ pass
- return iso_time
+ if iso_time is None:
+ log.debug('Unable to get package installation time for package "%s".', pkg)
+
+ return iso_time, iso_time_t
def _get_pkg_ds_avail():
@@ -397,6 +447,15 @@ def info(*packages, **kwargs):
.. versionadded:: 2016.11.3
+ attr
+ Comma-separated package attributes. If no 'attr' is specified, all available attributes returned.
+
+ Valid attributes are:
+ version, vendor, release, build_date, build_date_time_t, install_date, install_date_time_t,
+ build_host, group, source_rpm, arch, epoch, size, license, signature, packager, url, summary, description.
+
+ .. versionadded:: Neon
+
CLI example:
.. code-block:: bash
@@ -411,6 +470,10 @@ def info(*packages, **kwargs):
kwargs = salt.utils.args.clean_kwargs(**kwargs)
failhard = kwargs.pop('failhard', True)
+ attr = kwargs.pop('attr', None) or None
+ if attr:
+ attr = attr.split(',')
+
if kwargs:
salt.utils.args.invalid_kwargs(kwargs)
@@ -430,6 +493,14 @@ def info(*packages, **kwargs):
lic = _get_pkg_license(pkg['package'])
if lic:
pkg['license'] = lic
- ret[pkg['package']] = pkg
+
+ # Remove keys that aren't in attrs
+ pkg_name = pkg['package']
+ if attr:
+ for k in list(pkg.keys())[:]:
+ if k not in attr:
+ del pkg[k]
+
+ ret[pkg_name] = pkg
return ret
diff --git a/tests/unit/modules/test_aptpkg.py b/tests/unit/modules/test_aptpkg.py
index c0e26cfcd4..5352e39982 100644
--- a/tests/unit/modules/test_aptpkg.py
+++ b/tests/unit/modules/test_aptpkg.py
@@ -13,12 +13,14 @@ import copy
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
-from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON
+from tests.support.mock import Mock, MagicMock, patch, NO_MOCK, NO_MOCK_REASON
# Import Salt Libs
from salt.ext import six
from salt.exceptions import CommandExecutionError, SaltInvocationError
import salt.modules.aptpkg as aptpkg
+import pytest
+import textwrap
APT_KEY_LIST = r'''
@@ -142,51 +144,39 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {aptpkg: {}}
+ @patch('salt.modules.aptpkg.__salt__',
+ {'pkg_resource.version': MagicMock(return_value=LOWPKG_INFO['wget']['version'])})
def test_version(self):
'''
Test - Returns a string representing the package version or an empty string if
not installed.
'''
- version = LOWPKG_INFO['wget']['version']
- mock = MagicMock(return_value=version)
- with patch.dict(aptpkg.__salt__, {'pkg_resource.version': mock}):
- self.assertEqual(aptpkg.version(*['wget']), version)
+ assert aptpkg.version(*['wget']) == aptpkg.__salt__['pkg_resource.version']()
+ @patch('salt.modules.aptpkg.latest_version', MagicMock(return_value=''))
def test_upgrade_available(self):
'''
Test - Check whether or not an upgrade is available for a given package.
'''
- with patch('salt.modules.aptpkg.latest_version',
- MagicMock(return_value='')):
- self.assertFalse(aptpkg.upgrade_available('wget'))
+ assert not aptpkg.upgrade_available('wget')
+ @patch('salt.modules.aptpkg.get_repo_keys', MagicMock(return_value=REPO_KEYS))
+ @patch('salt.modules.aptpkg.__salt__', {'cmd.run_all': MagicMock(return_value={'retcode': 0, 'stdout': 'OK'})})
def test_add_repo_key(self):
'''
Test - Add a repo key.
'''
- with patch('salt.modules.aptpkg.get_repo_keys',
- MagicMock(return_value=REPO_KEYS)):
- mock = MagicMock(return_value={
- 'retcode': 0,
- 'stdout': 'OK'
- })
- with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock}):
- self.assertTrue(aptpkg.add_repo_key(keyserver='keyserver.ubuntu.com',
- keyid='FBB75451'))
+ assert aptpkg.add_repo_key(keyserver='keyserver.ubuntu.com', keyid='FBB75451')
+ @patch('salt.modules.aptpkg.get_repo_keys', MagicMock(return_value=REPO_KEYS))
+ @patch('salt.modules.aptpkg.__salt__', {'cmd.run_all': MagicMock(return_value={'retcode': 0, 'stdout': 'OK'})})
def test_add_repo_key_failed(self):
'''
Test - Add a repo key using incomplete input data.
'''
- with patch('salt.modules.aptpkg.get_repo_keys',
- MagicMock(return_value=REPO_KEYS)):
- kwargs = {'keyserver': 'keyserver.ubuntu.com'}
- mock = MagicMock(return_value={
- 'retcode': 0,
- 'stdout': 'OK'
- })
- with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock}):
- self.assertRaises(SaltInvocationError, aptpkg.add_repo_key, **kwargs)
+ with pytest.raises(SaltInvocationError) as ex:
+ aptpkg.add_repo_key(keyserver='keyserver.ubuntu.com')
+ assert ' No keyid or keyid too short for keyserver: keyserver.ubuntu.com' in str(ex)
def test_get_repo_keys(self):
'''
@@ -199,35 +189,31 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock}):
self.assertEqual(aptpkg.get_repo_keys(), REPO_KEYS)
+ @patch('salt.modules.aptpkg.__salt__', {'lowpkg.file_dict': MagicMock(return_value=LOWPKG_FILES)})
def test_file_dict(self):
'''
Test - List the files that belong to a package, grouped by package.
'''
- mock = MagicMock(return_value=LOWPKG_FILES)
- with patch.dict(aptpkg.__salt__, {'lowpkg.file_dict': mock}):
- self.assertEqual(aptpkg.file_dict('wget'), LOWPKG_FILES)
+ assert aptpkg.file_dict('wget') == LOWPKG_FILES
+ @patch('salt.modules.aptpkg.__salt__', {
+ 'lowpkg.file_list': MagicMock(return_value={'errors': LOWPKG_FILES['errors'],
+ 'files': LOWPKG_FILES['packages']['wget']})})
def test_file_list(self):
'''
- Test - List the files that belong to a package.
+ Test 'file_list' function, which is just an alias to the lowpkg 'file_list'
+
'''
- files = {
- 'errors': LOWPKG_FILES['errors'],
- 'files': LOWPKG_FILES['packages']['wget'],
- }
- mock = MagicMock(return_value=files)
- with patch.dict(aptpkg.__salt__, {'lowpkg.file_list': mock}):
- self.assertEqual(aptpkg.file_list('wget'), files)
+ assert aptpkg.file_list('wget') == aptpkg.__salt__['lowpkg.file_list']()
+ @patch('salt.modules.aptpkg.__salt__', {'cmd.run_stdout': MagicMock(return_value='wget\t\t\t\t\t\tinstall')})
def test_get_selections(self):
'''
Test - View package state from the dpkg database.
'''
- selections = {'install': ['wget']}
- mock = MagicMock(return_value='wget\t\t\t\t\t\tinstall')
- with patch.dict(aptpkg.__salt__, {'cmd.run_stdout': mock}):
- self.assertEqual(aptpkg.get_selections('wget'), selections)
+ assert aptpkg.get_selections('wget') == {'install': ['wget']}
+ @patch('salt.modules.aptpkg.__salt__', {'lowpkg.info': MagicMock(return_value=LOWPKG_INFO)})
def test_info_installed(self):
'''
Test - Return the information of the named package(s) installed on the system.
@@ -243,19 +229,72 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
if installed['wget'].get(names[name], False):
installed['wget'][name] = installed['wget'].pop(names[name])
- mock = MagicMock(return_value=LOWPKG_INFO)
- with patch.dict(aptpkg.__salt__, {'lowpkg.info': mock}):
- self.assertEqual(aptpkg.info_installed('wget'), installed)
+ assert aptpkg.info_installed('wget') == installed
+
+ @patch('salt.modules.aptpkg.__salt__', {'lowpkg.info': MagicMock(return_value=LOWPKG_INFO)})
+ def test_info_installed_attr(self):
+ '''
+ Test info_installed 'attr'.
+ This doesn't test 'attr' behaviour per se, since the underlying function is in dpkg.
+ The test should simply not raise exceptions for invalid parameter.
+
+ :return:
+ '''
+ ret = aptpkg.info_installed('emacs', attr='foo,bar')
+ assert isinstance(ret, dict)
+ assert 'wget' in ret
+ assert isinstance(ret['wget'], dict)
+
+ wget_pkg = ret['wget']
+ expected_pkg = {'url': 'http://www.gnu.org/software/wget/',
+ 'packager': 'Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>', 'name': 'wget',
+ 'install_date': '2016-08-30T22:20:15Z', 'description': 'retrieves files from the web',
+ 'version': '1.15-1ubuntu1.14.04.2', 'architecture': 'amd64', 'group': 'web', 'source': 'wget'}
+ for k in wget_pkg:
+ assert k in expected_pkg
+ assert wget_pkg[k] == expected_pkg[k]
+
+ @patch('salt.modules.aptpkg.__salt__', {'lowpkg.info': MagicMock(return_value=LOWPKG_INFO)})
+ def test_info_installed_all_versions(self):
+ '''
+ Test info_installed 'all_versions'.
+ Since Debian won't return same name packages with the different names,
+ this should just return different structure, backward compatible with
+ the RPM equivalents.
+
+ :return:
+ '''
+ print()
+ ret = aptpkg.info_installed('emacs', all_versions=True)
+ assert isinstance(ret, dict)
+ assert 'wget' in ret
+ assert isinstance(ret['wget'], list)
+ pkgs = ret['wget']
+
+ assert len(pkgs) == 1
+ assert isinstance(pkgs[0], dict)
+
+ wget_pkg = pkgs[0]
+ expected_pkg = {'url': 'http://www.gnu.org/software/wget/',
+ 'packager': 'Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>', 'name': 'wget',
+ 'install_date': '2016-08-30T22:20:15Z', 'description': 'retrieves files from the web',
+ 'version': '1.15-1ubuntu1.14.04.2', 'architecture': 'amd64', 'group': 'web', 'source': 'wget'}
+ for k in wget_pkg:
+ assert k in expected_pkg
+ assert wget_pkg[k] == expected_pkg[k]
+
+ @patch('salt.modules.aptpkg.__salt__', {'cmd.run_stdout': MagicMock(return_value='wget: /usr/bin/wget')})
def test_owner(self):
'''
Test - Return the name of the package that owns the file.
'''
- paths = ['/usr/bin/wget']
- mock = MagicMock(return_value='wget: /usr/bin/wget')
- with patch.dict(aptpkg.__salt__, {'cmd.run_stdout': mock}):
- self.assertEqual(aptpkg.owner(*paths), 'wget')
+ assert aptpkg.owner('/usr/bin/wget') == 'wget'
+ @patch('salt.utils.pkg.clear_rtag', MagicMock())
+ @patch('salt.modules.aptpkg.__salt__', {'cmd.run_all': MagicMock(return_value={'retcode': 0,
+ 'stdout': APT_Q_UPDATE}),
+ 'config.get': MagicMock(return_value=False)})
def test_refresh_db(self):
'''
Test - Updates the APT database to latest packages based upon repositories.
@@ -267,26 +306,20 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
'http://security.ubuntu.com trusty-security/main amd64 Packages': True,
'http://security.ubuntu.com trusty-security/main i386 Packages': True
}
- mock = MagicMock(return_value={
- 'retcode': 0,
- 'stdout': APT_Q_UPDATE
- })
- with patch('salt.utils.pkg.clear_rtag', MagicMock()):
- with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock}):
- self.assertEqual(aptpkg.refresh_db(), refresh_db)
+ assert aptpkg.refresh_db() == refresh_db
+ @patch('salt.utils.pkg.clear_rtag', MagicMock())
+ @patch('salt.modules.aptpkg.__salt__', {'cmd.run_all': MagicMock(return_value={'retcode': 0,
+ 'stdout': APT_Q_UPDATE_ERROR}),
+ 'config.get': MagicMock(return_value=False)})
def test_refresh_db_failed(self):
'''
Test - Update the APT database using unreachable repositories.
'''
- kwargs = {'failhard': True}
- mock = MagicMock(return_value={
- 'retcode': 0,
- 'stdout': APT_Q_UPDATE_ERROR
- })
- with patch('salt.utils.pkg.clear_rtag', MagicMock()):
- with patch.dict(aptpkg.__salt__, {'cmd.run_all': mock}):
- self.assertRaises(CommandExecutionError, aptpkg.refresh_db, **kwargs)
+ with pytest.raises(CommandExecutionError) as err:
+ aptpkg.refresh_db(failhard=True)
+ assert 'Error getting repos' in str(err)
+ assert 'http://security.ubuntu.com trusty InRelease, http://security.ubuntu.com trusty Release.gpg' in str(err)
def test_autoremove(self):
'''
@@ -306,38 +339,26 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(aptpkg.autoremove(list_only=True), list())
self.assertEqual(aptpkg.autoremove(list_only=True, purge=True), list())
+ @patch('salt.modules.aptpkg._uninstall', MagicMock(return_value=UNINSTALL))
def test_remove(self):
'''
Test - Remove packages.
'''
- with patch('salt.modules.aptpkg._uninstall',
- MagicMock(return_value=UNINSTALL)):
- self.assertEqual(aptpkg.remove(name='tmux'), UNINSTALL)
+ assert aptpkg.remove(name='tmux') == UNINSTALL
+ @patch('salt.modules.aptpkg._uninstall', MagicMock(return_value=UNINSTALL))
def test_purge(self):
'''
Test - Remove packages along with all configuration files.
'''
- with patch('salt.modules.aptpkg._uninstall',
- MagicMock(return_value=UNINSTALL)):
- self.assertEqual(aptpkg.purge(name='tmux'), UNINSTALL)
+ assert aptpkg.purge(name='tmux') == UNINSTALL
+ @patch('salt.utils.pkg.clear_rtag', MagicMock())
+ @patch('salt.modules.aptpkg.list_pkgs', MagicMock(return_value=UNINSTALL))
+ @patch.multiple(aptpkg, **{'__salt__': {'config.get': MagicMock(return_value=True),
+ 'cmd.run_all': MagicMock(return_value={'retcode': 0, 'stdout': UPGRADE})}})
def test_upgrade(self):
'''
Test - Upgrades all packages.
'''
- with patch('salt.utils.pkg.clear_rtag', MagicMock()):
- with patch('salt.modules.aptpkg.list_pkgs',
- MagicMock(return_value=UNINSTALL)):
- mock_cmd = MagicMock(return_value={
- 'retcode': 0,
- 'stdout': UPGRADE
- })
- patch_kwargs = {
- '__salt__': {
- 'config.get': MagicMock(return_value=True),
- 'cmd.run_all': mock_cmd
- }
- }
- with patch.multiple(aptpkg, **patch_kwargs):
- self.assertEqual(aptpkg.upgrade(), dict())
+ assert aptpkg.upgrade() == {}
diff --git a/tests/unit/modules/test_dpkg.py b/tests/unit/modules/test_dpkg.py
index fcfa7caf77..1acfd89ccf 100644
--- a/tests/unit/modules/test_dpkg.py
+++ b/tests/unit/modules/test_dpkg.py
@@ -25,6 +25,30 @@ class DpkgTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.modules.dpkg
'''
+ dselect_pkg = {
+ 'emacs': {'priority': 'optional', 'filename': 'pool/main/e/emacs-defaults/emacs_46.1_all.deb',
+ 'description': 'GNU Emacs editor (metapackage)', 'md5sum': '766eb2cee55ba0122dac64c4cea04445',
+ 'sha256': 'd172289b9a1608820eddad85c7ffc15f346a6e755c3120de0f64739c4bbc44ce',
+ 'description-md5': '21fb7da111336097a2378959f6d6e6a8',
+ 'bugs': 'https://bugs.launchpad.net/springfield/+filebug',
+ 'depends': 'emacs24 | emacs24-lucid | emacs24-nox', 'origin': 'Simpsons', 'version': '46.1',
+ 'task': 'ubuntu-usb, edubuntu-usb', 'original-maintainer': 'Homer Simpson <homer@springfield.org>',
+ 'package': 'emacs', 'architecture': 'all', 'size': '1692',
+ 'sha1': '9271bcec53c1f7373902b1e594d9fc0359616407', 'source': 'emacs-defaults',
+ 'maintainer': 'Simpsons Developers <simpsons-devel-discuss@lists.springfield.org>', 'supported': '9m',
+ 'section': 'editors', 'installed-size': '25'}
+ }
+
+ pkgs_info = [
+ {'version': '46.1', 'arch': 'all', 'build_date': '2014-08-07T16:51:48Z', 'install_date_time_t': 1481745778,
+ 'section': 'editors', 'description': 'GNU Emacs editor (metapackage)\n GNU Emacs is the extensible '
+ 'self-documenting text editor.\n This is a metapackage that will always '
+ 'depend on the latest\n recommended Emacs release.\n',
+ 'package': 'emacs', 'source': 'emacs-defaults',
+ 'maintainer': 'Simpsons Developers <simpsons-devel-discuss@lists.springfield.org>',
+ 'build_date_time_t': 1407430308, 'installed_size': '25', 'install_date': '2016-12-14T20:02:58Z'}
+ ]
+
def setup_loader_modules(self):
return {dpkg: {}}
@@ -102,3 +126,48 @@ class DpkgTestCase(TestCase, LoaderModuleMockMixin):
'stdout': 'Salt'})
with patch.dict(dpkg.__salt__, {'cmd.run_all': mock}):
self.assertEqual(dpkg.file_dict('httpd'), 'Error: error')
+
+ @patch('salt.modules.dpkg._get_pkg_ds_avail', MagicMock(return_value=dselect_pkg))
+ @patch('salt.modules.dpkg._get_pkg_info', MagicMock(return_value=pkgs_info))
+ @patch('salt.modules.dpkg._get_pkg_license', MagicMock(return_value='BSD v3'))
+ def test_info(self):
+ '''
+ Test info
+ :return:
+ '''
+ ret = dpkg.info('emacs')
+
+ assert isinstance(ret, dict)
+ assert len(ret.keys()) == 1
+ assert 'emacs' in ret
+
+ pkg_data = ret['emacs']
+
+ assert isinstance(pkg_data, dict)
+ for pkg_section in ['section', 'architecture', 'original-maintainer', 'maintainer', 'package', 'installed-size',
+ 'build_date_time_t', 'sha256', 'origin', 'build_date', 'size', 'source', 'version',
+ 'install_date_time_t', 'license', 'priority', 'description', 'md5sum', 'supported',
+ 'filename', 'sha1', 'install_date', 'arch']:
+ assert pkg_section in pkg_data
+
+ assert pkg_data['section'] == 'editors'
+ assert pkg_data['maintainer'] == 'Simpsons Developers <simpsons-devel-discuss@lists.springfield.org>'
+ assert pkg_data['license'] == 'BSD v3'
+
+ @patch('salt.modules.dpkg._get_pkg_ds_avail', MagicMock(return_value=dselect_pkg))
+ @patch('salt.modules.dpkg._get_pkg_info', MagicMock(return_value=pkgs_info))
+ @patch('salt.modules.dpkg._get_pkg_license', MagicMock(return_value='BSD v3'))
+ def test_info_attr(self):
+ '''
+ Test info with 'attr' parameter
+ :return:
+ '''
+ ret = dpkg.info('emacs', attr='arch,license,version')
+ assert isinstance(ret, dict)
+ assert 'emacs' in ret
+ for attr in ['arch', 'license', 'version']:
+ assert attr in ret['emacs']
+
+ assert ret['emacs']['arch'] == 'all'
+ assert ret['emacs']['license'] == 'BSD v3'
+ assert ret['emacs']['version'] == '46.1'
--
2.19.1

View File

@ -0,0 +1,54 @@
From 5eacdf8fef35cdd05cae1b65485b3f820c86bc68 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 4 Dec 2018 16:39:08 +0100
Subject: [PATCH] Decide if the source should be actually skipped
---
salt/modules/aptpkg.py | 23 ++++++++++++++++++++++-
1 file changed, 22 insertions(+), 1 deletion(-)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index dc27903230..42d606926f 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -1698,6 +1698,27 @@ def list_repo_pkgs(*args, **kwargs): # pylint: disable=unused-import
return ret
+def _skip_source(source):
+ '''
+ Decide to skip source or not.
+
+ :param source:
+ :return:
+ '''
+ if source.invalid:
+ if source.uri and source.type and source.type in ("deb", "deb-src", "rpm", "rpm-src"):
+ pieces = source.mysplit(source.line)
+ if pieces[1].strip()[0] == "[":
+ options = pieces.pop(1).strip("[]").split()
+ if len(options) > 0:
+ log.debug("Source %s will be included although is marked invalid", source.uri)
+ return False
+ return True
+ else:
+ return True
+ return False
+
+
def list_repos():
'''
Lists all repos in the sources.list (and sources.lists.d) files
@@ -1713,7 +1734,7 @@ def list_repos():
repos = {}
sources = sourceslist.SourcesList()
for source in sources.list:
- if source.invalid:
+ if _skip_source(source):
continue
repo = {}
repo['file'] = source.file
--
2.20.1

View File

@ -0,0 +1,73 @@
From 7727ab13e3492b722b316469cc912d9dd64f063e Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 21 Sep 2018 17:31:39 +0200
Subject: [PATCH] Do not load pip state if there is no 3rd party
dependencies
Safe import 3rd party dependency
---
salt/modules/pip.py | 12 ++++++++++--
salt/states/pip_state.py | 9 +++++----
2 files changed, 15 insertions(+), 6 deletions(-)
diff --git a/salt/modules/pip.py b/salt/modules/pip.py
index f1a2e42433..85844f098b 100644
--- a/salt/modules/pip.py
+++ b/salt/modules/pip.py
@@ -79,7 +79,10 @@ from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging
import os
-import pkg_resources
+try:
+ import pkg_resources
+except ImportError:
+ pkg_resources = None
import re
import shutil
import sys
@@ -116,7 +119,12 @@ def __virtual__():
entire filesystem. If it's not installed in a conventional location, the
user is required to provide the location of pip each time it is used.
'''
- return 'pip'
+ if pkg_resources is None:
+ ret = False, 'Package dependency "pkg_resource" is missing'
+ else:
+ ret = 'pip'
+
+ return ret
def _clear_context(bin_env=None):
diff --git a/salt/states/pip_state.py b/salt/states/pip_state.py
index ab58fbd5fc..afe41d7fc8 100644
--- a/salt/states/pip_state.py
+++ b/salt/states/pip_state.py
@@ -23,7 +23,10 @@ requisite to a pkg.installed state for the package which provides pip
from __future__ import absolute_import, print_function, unicode_literals
import re
import logging
-import pkg_resources
+try:
+ import pkg_resources
+except ImportError:
+ pkg_resources = None
# Import salt libs
import salt.utils.versions
@@ -71,9 +74,7 @@ def __virtual__():
'''
Only load if the pip module is available in __salt__
'''
- if 'pip.list' in __salt__:
- return __virtualname__
- return False
+ return 'pip.list' in __salt__ and __virtualname__ or False
def _find_key(prefix, pip_list):
--
2.19.0

View File

@ -0,0 +1,33 @@
From 34089db15e7d3a1e361789f04613d0a13138dea0 Mon Sep 17 00:00:00 2001
From: rallytime <nicole@saltstack.com>
Date: Fri, 13 Jul 2018 12:42:46 -0400
Subject: [PATCH] Don't error on retcode 0 in
libcrypto.OPENSSL_init_crypto call
Fixes #46884
---
salt/utils/rsax931.py | 7 +++----
1 file changed, 3 insertions(+), 4 deletions(-)
diff --git a/salt/utils/rsax931.py b/salt/utils/rsax931.py
index 168c02734b..6bfef41bd3 100644
--- a/salt/utils/rsax931.py
+++ b/salt/utils/rsax931.py
@@ -71,10 +71,9 @@ def _init_libcrypto():
libcrypto.RSA_public_decrypt.argtypes = (c_int, c_char_p, c_char_p, c_void_p, c_int)
try:
- if libcrypto.OPENSSL_init_crypto(OPENSSL_INIT_NO_LOAD_CONFIG |
- OPENSSL_INIT_ADD_ALL_CIPHERS |
- OPENSSL_INIT_ADD_ALL_DIGESTS, None) != 1:
- raise OSError("Failed to initialize OpenSSL library (OPENSSL_init_crypto failed)")
+ libcrypto.OPENSSL_init_crypto(OPENSSL_INIT_NO_LOAD_CONFIG |
+ OPENSSL_INIT_ADD_ALL_CIPHERS |
+ OPENSSL_INIT_ADD_ALL_DIGESTS, None)
except AttributeError:
# Support for OpenSSL < 1.1 (OPENSSL_API_COMPAT < 0x10100000L)
libcrypto.OPENSSL_no_config()
--
2.19.2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,35 @@
From b276ee7373e88d05c01912a9d9d3a44a5d17bab6 Mon Sep 17 00:00:00 2001
From: Daniel Wallace <danielwallace@gtmanfred.com>
Date: Mon, 13 Aug 2018 13:55:37 -0500
Subject: [PATCH] fix async call to process manager
---
salt/minion.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/salt/minion.py b/salt/minion.py
index 9c05a646ea..8b8fd797d1 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -923,7 +923,7 @@ class MinionManager(MinionBase):
install_zmq()
self.io_loop = ZMQDefaultLoop.current()
self.process_manager = ProcessManager(name='MultiMinionProcessManager')
- self.io_loop.spawn_callback(self.process_manager.run, **{'async': True}) # Tornado backward compat
+ self.io_loop.spawn_callback(self.process_manager.run, **{'asynchronous': True}) # Tornado backward compat
def __del__(self):
self.destroy()
@@ -1120,7 +1120,7 @@ class Minion(MinionBase):
time.sleep(sleep_time)
self.process_manager = ProcessManager(name='MinionProcessManager')
- self.io_loop.spawn_callback(self.process_manager.run, **{'async': True})
+ self.io_loop.spawn_callback(self.process_manager.run, **{'asynchronous': True})
# We don't have the proxy setup yet, so we can't start engines
# Engines need to be able to access __proxy__
if not salt.utils.platform.is_proxy():
--
2.17.1

View File

@ -0,0 +1,328 @@
From 49f8f296edf4655e2be7e564745931692ae939b7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 6 Nov 2018 16:38:54 +0000
Subject: [PATCH] Fix git_pillar merging across multiple __env__
repositories (bsc#1112874)
Resolve target branch when using __env__
Test git ext_pillar across multiple repos using __env__
Remove unicode references
---
salt/utils/gitfs.py | 2 +-
tests/integration/pillar/test_git_pillar.py | 144 ++++++++++++++++++++
tests/support/gitfs.py | 66 ++++++++-
3 files changed, 209 insertions(+), 3 deletions(-)
diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py
index 6963f40226..11af741e35 100644
--- a/salt/utils/gitfs.py
+++ b/salt/utils/gitfs.py
@@ -2975,7 +2975,7 @@ class GitPillar(GitBase):
if repo.env:
env = repo.env
else:
- env = 'base' if repo.branch == repo.base else repo.branch
+ env = 'base' if repo.branch == repo.base else repo.get_checkout_target()
if repo._mountpoint:
if self.link_mountpoint(repo):
self.pillar_dirs[repo.linkdir] = env
diff --git a/tests/integration/pillar/test_git_pillar.py b/tests/integration/pillar/test_git_pillar.py
index e97e720bab..e052782311 100644
--- a/tests/integration/pillar/test_git_pillar.py
+++ b/tests/integration/pillar/test_git_pillar.py
@@ -358,6 +358,38 @@ class GitPythonMixin(object):
"available on the salt master"]}
)
+ def test_includes_enabled_solves___env___with_mountpoint(self):
+ '''
+ Test with git_pillar_includes enabled and using "__env__" as the branch
+ name for the configured repositories.
+ The "gitinfo" repository contains top.sls file with a local reference
+ and also referencing external "nowhere.foo" which is provided by "webinfo"
+ repository mounted as "nowhere".
+ '''
+ ret = self.get_pillar('''\
+ file_ignore_regex: []
+ file_ignore_glob: []
+ git_pillar_provider: gitpython
+ cachedir: {cachedir}
+ extension_modules: {extmods}
+ ext_pillar:
+ - git:
+ - __env__ {url_extra_repo}:
+ - name: gitinfo
+ - __env__ {url}:
+ - name: webinfo
+ - mountpoint: nowhere
+ ''')
+ self.assertEqual(
+ ret,
+ {'branch': 'master',
+ 'motd': 'The force will be with you. Always.',
+ 'mylist': ['master'],
+ 'mydict': {'master': True,
+ 'nested_list': ['master'],
+ 'nested_dict': {'master': True}}}
+ )
+
@destructiveTest
@skipIf(NO_MOCK, NO_MOCK_REASON)
@@ -413,7 +445,12 @@ class TestGitPythonAuthenticatedHTTP(TestGitPythonHTTP, GitPythonMixin):
username=cls.username,
password=cls.password,
port=cls.nginx_port)
+ cls.url_extra_repo = 'http://{username}:{password}@127.0.0.1:{port}/extra_repo.git'.format(
+ username=cls.username,
+ password=cls.password,
+ port=cls.nginx_port)
cls.ext_opts['url'] = cls.url
+ cls.ext_opts['url_extra_repo'] = cls.url_extra_repo
cls.ext_opts['username'] = cls.username
cls.ext_opts['password'] = cls.password
@@ -1192,6 +1229,40 @@ class TestPygit2SSH(GitPillarSSHTestBase):
''')
self.assertEqual(ret, expected)
+ def test_includes_enabled_solves___env___with_mountpoint(self):
+ '''
+ Test with git_pillar_includes enabled and using "__env__" as the branch
+ name for the configured repositories.
+ The "gitinfo" repository contains top.sls file with a local reference
+ and also referencing external "nowhere.foo" which is provided by "webinfo"
+ repository mounted as "nowhere".
+ '''
+ ret = self.get_pillar('''\
+ file_ignore_regex: []
+ file_ignore_glob: []
+ git_pillar_provider: pygit2
+ git_pillar_pubkey: {pubkey_nopass}
+ git_pillar_privkey: {privkey_nopass}
+ cachedir: {cachedir}
+ extension_modules: {extmods}
+ ext_pillar:
+ - git:
+ - __env__ {url_extra_repo}:
+ - name: gitinfo
+ - __env__ {url}:
+ - name: webinfo
+ - mountpoint: nowhere
+ ''')
+ self.assertEqual(
+ ret,
+ {'branch': 'master',
+ 'motd': 'The force will be with you. Always.',
+ 'mylist': ['master'],
+ 'mydict': {'master': True,
+ 'nested_list': ['master'],
+ 'nested_dict': {'master': True}}}
+ )
+
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(_windows_or_mac(), 'minion is windows or mac')
@@ -1439,6 +1510,38 @@ class TestPygit2HTTP(GitPillarHTTPTestBase):
''')
self.assertEqual(ret, expected)
+ def test_includes_enabled_solves___env___with_mountpoint(self):
+ '''
+ Test with git_pillar_includes enabled and using "__env__" as the branch
+ name for the configured repositories.
+ The "gitinfo" repository contains top.sls file with a local reference
+ and also referencing external "nowhere.foo" which is provided by "webinfo"
+ repository mounted as "nowhere".
+ '''
+ ret = self.get_pillar('''\
+ file_ignore_regex: []
+ file_ignore_glob: []
+ git_pillar_provider: pygit2
+ cachedir: {cachedir}
+ extension_modules: {extmods}
+ ext_pillar:
+ - git:
+ - __env__ {url_extra_repo}:
+ - name: gitinfo
+ - __env__ {url}:
+ - name: webinfo
+ - mountpoint: nowhere
+ ''')
+ self.assertEqual(
+ ret,
+ {'branch': 'master',
+ 'motd': 'The force will be with you. Always.',
+ 'mylist': ['master'],
+ 'mydict': {'master': True,
+ 'nested_list': ['master'],
+ 'nested_dict': {'master': True}}}
+ )
+
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(_windows_or_mac(), 'minion is windows or mac')
@@ -1887,3 +1990,44 @@ class TestPygit2AuthenticatedHTTP(GitPillarHTTPTestBase):
- env: base
''')
self.assertEqual(ret, expected)
+
+ def test_includes_enabled_solves___env___with_mountpoint(self):
+ '''
+ Test with git_pillar_includes enabled and using "__env__" as the branch
+ name for the configured repositories.
+ The "gitinfo" repository contains top.sls file with a local reference
+ and also referencing external "nowhere.foo" which is provided by "webinfo"
+ repository mounted as "nowhere".
+ '''
+ ret = self.get_pillar('''\
+ file_ignore_regex: []
+ file_ignore_glob: []
+ git_pillar_provider: pygit2
+ git_pillar_user: {user}
+ git_pillar_password: {password}
+ git_pillar_insecure_auth: True
+ cachedir: {cachedir}
+ extension_modules: {extmods}
+ ext_pillar:
+ - git:
+ - __env__ {url_extra_repo}:
+ - name: gitinfo
+ - user: {user}
+ - password: {password}
+ - insecure_auth: True
+ - __env__ {url}:
+ - name: webinfo
+ - mountpoint: nowhere
+ - user: {user}
+ - password: {password}
+ - insecure_auth: True
+ ''')
+ self.assertEqual(
+ ret,
+ {'branch': 'master',
+ 'motd': 'The force will be with you. Always.',
+ 'mylist': ['master'],
+ 'mydict': {'master': True,
+ 'nested_list': ['master'],
+ 'nested_dict': {'master': True}}}
+ )
diff --git a/tests/support/gitfs.py b/tests/support/gitfs.py
index 2afd31539d..e645c50a86 100644
--- a/tests/support/gitfs.py
+++ b/tests/support/gitfs.py
@@ -133,9 +133,13 @@ class SSHDMixin(ModuleCase, ProcessManager, SaltReturnAssertsMixin):
cls.url = 'ssh://{username}@127.0.0.1:{port}/~/repo.git'.format(
username=cls.username,
port=cls.sshd_port)
+ cls.url_extra_repo = 'ssh://{username}@127.0.0.1:{port}/~/extra_repo.git'.format(
+ username=cls.username,
+ port=cls.sshd_port)
home = '/root/.ssh'
cls.ext_opts = {
'url': cls.url,
+ 'url_extra_repo': cls.url_extra_repo,
'privkey_nopass': os.path.join(home, cls.id_rsa_nopass),
'pubkey_nopass': os.path.join(home, cls.id_rsa_nopass + '.pub'),
'privkey_withpass': os.path.join(home, cls.id_rsa_withpass),
@@ -193,7 +197,8 @@ class WebserverMixin(ModuleCase, ProcessManager, SaltReturnAssertsMixin):
# get_unused_localhost_port() return identical port numbers.
cls.uwsgi_port = get_unused_localhost_port()
cls.url = 'http://127.0.0.1:{port}/repo.git'.format(port=cls.nginx_port)
- cls.ext_opts = {'url': cls.url}
+ cls.url_extra_repo = 'http://127.0.0.1:{port}/extra_repo.git'.format(port=cls.nginx_port)
+ cls.ext_opts = {'url': cls.url, 'url_extra_repo': cls.url_extra_repo}
# Add auth params if present (if so this will trigger the spawned
# server to turn on HTTP basic auth).
for credential_param in ('user', 'password'):
@@ -250,7 +255,7 @@ class GitTestBase(ModuleCase):
Base class for all gitfs/git_pillar tests. Must be subclassed and paired
with either SSHDMixin or WebserverMixin to provide the server.
'''
- case = port = bare_repo = admin_repo = None
+ case = port = bare_repo = base_extra_repo = admin_repo = admin_extra_repo = None
maxDiff = None
git_opts = '-c user.name="Foo Bar" -c user.email=foo@bar.com'
ext_opts = {}
@@ -465,6 +470,61 @@ class GitPillarTestBase(GitTestBase, LoaderModuleMockMixin):
'''))
_push('top_only', 'add top_only branch')
+ def make_extra_repo(self, root_dir, user='root'):
+ self.bare_extra_repo = os.path.join(root_dir, 'extra_repo.git')
+ self.admin_extra_repo = os.path.join(root_dir, 'admin_extra')
+
+ for dirname in (self.bare_extra_repo, self.admin_extra_repo):
+ shutil.rmtree(dirname, ignore_errors=True)
+
+ # Create bare extra repo
+ self.run_function(
+ 'git.init',
+ [self.bare_extra_repo],
+ user=user,
+ bare=True)
+
+ # Clone bare repo
+ self.run_function(
+ 'git.clone',
+ [self.admin_extra_repo],
+ url=self.bare_extra_repo,
+ user=user)
+
+ def _push(branch, message):
+ self.run_function(
+ 'git.add',
+ [self.admin_extra_repo, '.'],
+ user=user)
+ self.run_function(
+ 'git.commit',
+ [self.admin_extra_repo, message],
+ user=user,
+ git_opts=self.git_opts,
+ )
+ self.run_function(
+ 'git.push',
+ [self.admin_extra_repo],
+ remote='origin',
+ ref=branch,
+ user=user,
+ )
+
+ with salt.utils.files.fopen(
+ os.path.join(self.admin_extra_repo, 'top.sls'), 'w') as fp_:
+ fp_.write(textwrap.dedent('''\
+ "{{saltenv}}":
+ '*':
+ - motd
+ - nowhere.foo
+ '''))
+ with salt.utils.files.fopen(
+ os.path.join(self.admin_extra_repo, 'motd.sls'), 'w') as fp_:
+ fp_.write(textwrap.dedent('''\
+ motd: The force will be with you. Always.
+ '''))
+ _push('master', 'initial commit')
+
class GitPillarSSHTestBase(GitPillarTestBase, SSHDMixin):
'''
@@ -533,6 +593,7 @@ class GitPillarSSHTestBase(GitPillarTestBase, SSHDMixin):
)
)
self.make_repo(root_dir, user=self.username)
+ self.make_extra_repo(root_dir, user=self.username)
def get_pillar(self, ext_pillar_conf):
'''
@@ -579,3 +640,4 @@ class GitPillarHTTPTestBase(GitPillarTestBase, WebserverMixin):
self.spawn_server() # pylint: disable=E1120
self.make_repo(self.repo_dir)
+ self.make_extra_repo(self.repo_dir)
--
2.17.1

View File

@ -0,0 +1,37 @@
From 5502f05fac89330ab26d04e29d3aa6d36ab928c5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Thu, 20 Sep 2018 11:51:58 +0100
Subject: [PATCH] Fix index error when running on Python 3
Fix wrong queryformat for zypper list_provides
---
salt/modules/zypper.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 695bce4f4e..e4423cf1fc 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -2314,7 +2314,7 @@ def list_provides(**kwargs):
'''
ret = __context__.get('pkg.list_provides')
if not ret:
- cmd = ['rpm', '-qa', '--queryformat', '[%{PROVIDES}_|-%{NAME}\n]']
+ cmd = ['rpm', '-qa', '--queryformat', '%{PROVIDES}_|-%{NAME}\n']
ret = dict()
for line in __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False).splitlines():
provide, realname = line.split('_|-')
@@ -2379,7 +2379,7 @@ def resolve_capabilities(pkgs, refresh, **kwargs):
try:
result = search(name, provides=True, match='exact')
if len(result) == 1:
- name = result.keys()[0]
+ name = next(iter(result.keys()))
elif len(result) > 1:
log.warn("Found ambiguous match for capability '%s'.", pkg)
except CommandExecutionError as exc:
--
2.17.1

View File

@ -0,0 +1,659 @@
From 0509f0b0f1e880e7651e2a33cf5b70ef1930a3ff Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 28 Sep 2018 15:22:33 +0200
Subject: [PATCH] Fix IPv6 scope (bsc#1108557)
Fix ipaddress imports
Remove unused import
Fix ipaddress import
Fix unicode imports in compat
Override standard IPv6Address class
Check version via object
Isolate Py2 and Py3 mode
Add logging
Add debugging to the ip_address method (py2 and py3)
Remove multiple returns and add check for address syntax
Remove unnecessary variable for import detection
Remove duplicated code
Remove unnecessary operator
Remove multiple returns
Use ternary operator instead
Remove duplicated code
Move docstrings to their native places
Add real exception message
Add logging to the ip_interface
Add scope on str
Lintfix: mute not called constructors
Add extra detection for hexadecimal packed bytes on Python2. This cannot be detected with type comparison, because bytes == str and at the same time bytes != str if compatibility is not around
Fix py2 case where the same class cannot initialise itself on Python2 via super.
Simplify checking clause
Do not use introspection for method swap
Fix wrong type swap
Add Py3.4 old implementation's fix
Lintfix
Lintfix refactor: remove duplicate returns as not needed
Revert method remapping with pylint updates
Remove unnecessary manipulation with IPv6 scope outside of the IPv6Address object instance
Lintfix: W0611
Reverse skipping tests: if no ipaddress
---
salt/_compat.py | 287 +++++++++++++++++++++++------
salt/cloud/clouds/saltify.py | 5 +-
salt/cloud/clouds/vagrant.py | 9 +-
salt/ext/win_inet_pton.py | 2 +-
salt/minion.py | 5 +-
salt/modules/ipset.py | 5 +-
salt/modules/network.py | 5 +-
salt/modules/vagrant.py | 6 +-
salt/utils/dns.py | 11 +-
salt/utils/minions.py | 5 +-
tests/unit/grains/test_core.py | 5 +-
tests/unit/modules/test_network.py | 15 +-
12 files changed, 245 insertions(+), 115 deletions(-)
diff --git a/salt/_compat.py b/salt/_compat.py
index 9b10646ace..0576210afc 100644
--- a/salt/_compat.py
+++ b/salt/_compat.py
@@ -2,18 +2,21 @@
'''
Salt compatibility code
'''
-# pylint: disable=import-error,unused-import,invalid-name
+# pylint: disable=import-error,unused-import,invalid-name,W0231,W0233
# Import python libs
-from __future__ import absolute_import
+from __future__ import absolute_import, unicode_literals, print_function
import sys
import types
+import logging
# Import 3rd-party libs
-from salt.ext.six import binary_type, string_types, text_type
+from salt.exceptions import SaltException
+from salt.ext.six import binary_type, string_types, text_type, integer_types
from salt.ext.six.moves import cStringIO, StringIO
-HAS_XML = True
+log = logging.getLogger(__name__)
+
try:
# Python >2.5
import xml.etree.cElementTree as ElementTree
@@ -31,11 +34,10 @@ except Exception:
import elementtree.ElementTree as ElementTree
except Exception:
ElementTree = None
- HAS_XML = False
# True if we are running on Python 3.
-PY3 = sys.version_info[0] == 3
+PY3 = sys.version_info.major == 3
if PY3:
@@ -45,13 +47,12 @@ else:
import exceptions
-if HAS_XML:
+if ElementTree is not None:
if not hasattr(ElementTree, 'ParseError'):
class ParseError(Exception):
'''
older versions of ElementTree do not have ParseError
'''
- pass
ElementTree.ParseError = ParseError
@@ -61,9 +62,7 @@ def text_(s, encoding='latin-1', errors='strict'):
If ``s`` is an instance of ``binary_type``, return
``s.decode(encoding, errors)``, otherwise return ``s``
'''
- if isinstance(s, binary_type):
- return s.decode(encoding, errors)
- return s
+ return s.decode(encoding, errors) if isinstance(s, binary_type) else s
def bytes_(s, encoding='latin-1', errors='strict'):
@@ -71,57 +70,37 @@ def bytes_(s, encoding='latin-1', errors='strict'):
If ``s`` is an instance of ``text_type``, return
``s.encode(encoding, errors)``, otherwise return ``s``
'''
- if isinstance(s, text_type):
- return s.encode(encoding, errors)
- return s
+ return s.encode(encoding, errors) if isinstance(s, text_type) else s
-if PY3:
- def ascii_native_(s):
- if isinstance(s, text_type):
- s = s.encode('ascii')
- return str(s, 'ascii', 'strict')
-else:
- def ascii_native_(s):
- if isinstance(s, text_type):
- s = s.encode('ascii')
- return str(s)
+def ascii_native_(s):
+ '''
+ Python 3: If ``s`` is an instance of ``text_type``, return
+ ``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')``
-ascii_native_.__doc__ = '''
-Python 3: If ``s`` is an instance of ``text_type``, return
-``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')``
+ Python 2: If ``s`` is an instance of ``text_type``, return
+ ``s.encode('ascii')``, otherwise return ``str(s)``
+ '''
+ if isinstance(s, text_type):
+ s = s.encode('ascii')
-Python 2: If ``s`` is an instance of ``text_type``, return
-``s.encode('ascii')``, otherwise return ``str(s)``
-'''
+ return str(s, 'ascii', 'strict') if PY3 else s
-if PY3:
- def native_(s, encoding='latin-1', errors='strict'):
- '''
- If ``s`` is an instance of ``text_type``, return
- ``s``, otherwise return ``str(s, encoding, errors)``
- '''
- if isinstance(s, text_type):
- return s
- return str(s, encoding, errors)
-else:
- def native_(s, encoding='latin-1', errors='strict'):
- '''
- If ``s`` is an instance of ``text_type``, return
- ``s.encode(encoding, errors)``, otherwise return ``str(s)``
- '''
- if isinstance(s, text_type):
- return s.encode(encoding, errors)
- return str(s)
+def native_(s, encoding='latin-1', errors='strict'):
+ '''
+ Python 3: If ``s`` is an instance of ``text_type``, return ``s``, otherwise
+ return ``str(s, encoding, errors)``
-native_.__doc__ = '''
-Python 3: If ``s`` is an instance of ``text_type``, return ``s``, otherwise
-return ``str(s, encoding, errors)``
+ Python 2: If ``s`` is an instance of ``text_type``, return
+ ``s.encode(encoding, errors)``, otherwise return ``str(s)``
+ '''
+ if PY3:
+ out = s if isinstance(s, text_type) else str(s, encoding, errors)
+ else:
+ out = s.encode(encoding, errors) if isinstance(s, text_type) else str(s)
-Python 2: If ``s`` is an instance of ``text_type``, return
-``s.encode(encoding, errors)``, otherwise return ``str(s)``
-'''
+ return out
def string_io(data=None): # cStringIO can't handle unicode
@@ -133,7 +112,199 @@ def string_io(data=None): # cStringIO can't handle unicode
except (UnicodeEncodeError, TypeError):
return StringIO(data)
-if PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+
+try:
+ if PY3:
+ import ipaddress
+ else:
+ import salt.ext.ipaddress as ipaddress
+except ImportError:
+ ipaddress = None
+
+
+class IPv6AddressScoped(ipaddress.IPv6Address):
+ '''
+ Represent and manipulate single IPv6 Addresses.
+ Scope-aware version
+ '''
+ def __init__(self, address):
+ '''
+ Instantiate a new IPv6 address object. Scope is moved to an attribute 'scope'.
+
+ Args:
+ address: A string or integer representing the IP
+
+ Additionally, an integer can be passed, so
+ IPv6Address('2001:db8::') == IPv6Address(42540766411282592856903984951653826560)
+ or, more generally
+ IPv6Address(int(IPv6Address('2001:db8::'))) == IPv6Address('2001:db8::')
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+
+ :param address:
+ '''
+ # pylint: disable-all
+ if not hasattr(self, '_is_packed_binary'):
+ # This method (below) won't be around for some Python 3 versions
+ # and we need check this differently anyway
+ self._is_packed_binary = lambda p: isinstance(p, bytes)
+ # pylint: enable-all
+
+ if isinstance(address, string_types) and '%' in address:
+ buff = address.split('%')
+ if len(buff) != 2:
+ raise SaltException('Invalid IPv6 address: "{}"'.format(address))
+ address, self.__scope = buff
+ else:
+ self.__scope = None
+
+ if sys.version_info.major == 2:
+ ipaddress._BaseAddress.__init__(self, address)
+ ipaddress._BaseV6.__init__(self, address)
+ else:
+ # Python 3.4 fix. Versions higher are simply not affected
+ # https://github.com/python/cpython/blob/3.4/Lib/ipaddress.py#L543-L544
+ self._version = 6
+ self._max_prefixlen = ipaddress.IPV6LENGTH
+
+ # Efficient constructor from integer.
+ if isinstance(address, integer_types):
+ self._check_int_address(address)
+ self._ip = address
+ elif self._is_packed_binary(address):
+ self._check_packed_address(address, 16)
+ self._ip = ipaddress._int_from_bytes(address, 'big')
+ else:
+ address = str(address)
+ if '/' in address:
+ raise ipaddress.AddressValueError("Unexpected '/' in {}".format(address))
+ self._ip = self._ip_int_from_string(address)
+
+ def _is_packed_binary(self, data):
+ '''
+ Check if data is hexadecimal packed
+
+ :param data:
+ :return:
+ '''
+ packed = False
+ if len(data) == 16 and ':' not in data:
+ try:
+ packed = bool(int(str(bytearray(data)).encode('hex'), 16))
+ except ValueError:
+ pass
+
+ return packed
+
+ @property
+ def scope(self):
+ '''
+ Return scope of IPv6 address.
+
+ :return:
+ '''
+ return self.__scope
+
+ def __str__(self):
+ return text_type(self._string_from_ip_int(self._ip) +
+ ('%' + self.scope if self.scope is not None else ''))
+
+
+class IPv6InterfaceScoped(ipaddress.IPv6Interface, IPv6AddressScoped):
+ '''
+ Update
+ '''
+ def __init__(self, address):
+ if isinstance(address, (bytes, int)):
+ IPv6AddressScoped.__init__(self, address)
+ self.network = ipaddress.IPv6Network(self._ip)
+ self._prefixlen = self._max_prefixlen
+ return
+
+ addr = ipaddress._split_optional_netmask(address)
+ IPv6AddressScoped.__init__(self, addr[0])
+ self.network = ipaddress.IPv6Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self._prefixlen = self.network._prefixlen
+ self.hostmask = self.network.hostmask
+
+
+def ip_address(address):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Address or IPv6Address object.
+
+ Raises:
+ ValueError: if the *address* passed isn't either a v4 or a v6
+ address
+
+ """
+ try:
+ return ipaddress.IPv4Address(address)
+ except (ipaddress.AddressValueError, ipaddress.NetmaskValueError) as err:
+ log.debug('Error while parsing IPv4 address: %s', address)
+ log.debug(err)
+
+ try:
+ return IPv6AddressScoped(address)
+ except (ipaddress.AddressValueError, ipaddress.NetmaskValueError) as err:
+ log.debug('Error while parsing IPv6 address: %s', address)
+ log.debug(err)
+
+ if isinstance(address, bytes):
+ raise ipaddress.AddressValueError('{} does not appear to be an IPv4 or IPv6 address. '
+ 'Did you pass in a bytes (str in Python 2) instead '
+ 'of a unicode object?'.format(repr(address)))
+
+ raise ValueError('{} does not appear to be an IPv4 or IPv6 address'.format(repr(address)))
+
+
+def ip_interface(address):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Interface or IPv6Interface object.
+
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address.
+
+ Notes:
+ The IPv?Interface classes describe an Address on a particular
+ Network, so they're basically a combination of both the Address
+ and Network classes.
+
+ """
+ try:
+ return ipaddress.IPv4Interface(address)
+ except (ipaddress.AddressValueError, ipaddress.NetmaskValueError) as err:
+ log.debug('Error while getting IPv4 interface for address %s', address)
+ log.debug(err)
+
+ try:
+ return ipaddress.IPv6Interface(address)
+ except (ipaddress.AddressValueError, ipaddress.NetmaskValueError) as err:
+ log.debug('Error while getting IPv6 interface for address %s', address)
+ log.debug(err)
+
+ raise ValueError('{} does not appear to be an IPv4 or IPv6 interface'.format(address))
+
+
+if ipaddress:
+ ipaddress.IPv6Address = IPv6AddressScoped
+ if sys.version_info.major == 2:
+ ipaddress.IPv6Interface = IPv6InterfaceScoped
+ ipaddress.ip_address = ip_address
+ ipaddress.ip_interface = ip_interface
diff --git a/salt/cloud/clouds/saltify.py b/salt/cloud/clouds/saltify.py
index c9cc281b42..e0e56349a0 100644
--- a/salt/cloud/clouds/saltify.py
+++ b/salt/cloud/clouds/saltify.py
@@ -27,10 +27,7 @@ import salt.utils.cloud
import salt.config as config
import salt.client
import salt.ext.six as six
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
from salt.exceptions import SaltCloudException, SaltCloudSystemExit
diff --git a/salt/cloud/clouds/vagrant.py b/salt/cloud/clouds/vagrant.py
index a24170c78a..0fe410eb91 100644
--- a/salt/cloud/clouds/vagrant.py
+++ b/salt/cloud/clouds/vagrant.py
@@ -25,13 +25,8 @@ import tempfile
import salt.utils
import salt.config as config
import salt.client
-import salt.ext.six as six
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
-from salt.exceptions import SaltCloudException, SaltCloudSystemExit, \
- SaltInvocationError
+from salt._compat import ipaddress
+from salt.exceptions import SaltCloudException, SaltCloudSystemExit, SaltInvocationError
# Get logging started
log = logging.getLogger(__name__)
diff --git a/salt/ext/win_inet_pton.py b/salt/ext/win_inet_pton.py
index 1204bede10..89aba14ce9 100644
--- a/salt/ext/win_inet_pton.py
+++ b/salt/ext/win_inet_pton.py
@@ -9,7 +9,7 @@ from __future__ import absolute_import
import socket
import ctypes
import os
-import ipaddress
+from salt._compat import ipaddress
import salt.ext.six as six
diff --git a/salt/minion.py b/salt/minion.py
index 17e11c0ebe..9c05a646ea 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -26,10 +26,7 @@ from binascii import crc32
# Import Salt Libs
# pylint: disable=import-error,no-name-in-module,redefined-builtin
from salt.ext import six
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
from salt.ext.six.moves import range
from salt.utils.zeromq import zmq, ZMQDefaultLoop, install_zmq, ZMQ_VERSION_INFO
diff --git a/salt/modules/ipset.py b/salt/modules/ipset.py
index 7047e84c29..1a0fa0044d 100644
--- a/salt/modules/ipset.py
+++ b/salt/modules/ipset.py
@@ -13,10 +13,7 @@ from salt.ext.six.moves import map, range
import salt.utils.path
# Import third-party libs
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
# Set up logging
log = logging.getLogger(__name__)
diff --git a/salt/modules/network.py b/salt/modules/network.py
index 92893572a6..60f586f6bc 100644
--- a/salt/modules/network.py
+++ b/salt/modules/network.py
@@ -26,10 +26,7 @@ from salt.exceptions import CommandExecutionError
# Import 3rd-party libs
from salt.ext import six
from salt.ext.six.moves import range # pylint: disable=import-error,no-name-in-module,redefined-builtin
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
log = logging.getLogger(__name__)
diff --git a/salt/modules/vagrant.py b/salt/modules/vagrant.py
index 0592dede55..0f518c2602 100644
--- a/salt/modules/vagrant.py
+++ b/salt/modules/vagrant.py
@@ -39,11 +39,7 @@ import salt.utils.path
import salt.utils.stringutils
from salt.exceptions import CommandExecutionError, SaltInvocationError
import salt.ext.six as six
-
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
log = logging.getLogger(__name__)
diff --git a/salt/utils/dns.py b/salt/utils/dns.py
index db08bcb7ac..40011016fd 100644
--- a/salt/utils/dns.py
+++ b/salt/utils/dns.py
@@ -1029,18 +1029,13 @@ def parse_resolv(src='/etc/resolv.conf'):
try:
(directive, arg) = (line[0].lower(), line[1:])
# Drop everything after # or ; (comments)
- arg = list(itertools.takewhile(
- lambda x: x[0] not in ('#', ';'), arg))
-
+ arg = list(itertools.takewhile(lambda x: x[0] not in ('#', ';'), arg))
if directive == 'nameserver':
- # Split the scope (interface) if it is present
- addr, scope = arg[0].split('%', 1) if '%' in arg[0] else (arg[0], '')
+ addr = arg[0]
try:
ip_addr = ipaddress.ip_address(addr)
version = ip_addr.version
- # Rejoin scope after address validation
- if scope:
- ip_addr = '%'.join((str(ip_addr), scope))
+ ip_addr = str(ip_addr)
if ip_addr not in nameservers:
nameservers.append(ip_addr)
if version == 4 and ip_addr not in ip4_nameservers:
diff --git a/salt/utils/minions.py b/salt/utils/minions.py
index bb0cbaa589..f282464eee 100644
--- a/salt/utils/minions.py
+++ b/salt/utils/minions.py
@@ -26,10 +26,7 @@ import salt.cache
from salt.ext import six
# Import 3rd-party libs
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
HAS_RANGE = False
try:
import seco.range # pylint: disable=import-error
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index dd7d5b06f8..e973428add 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -32,10 +32,7 @@ import salt.grains.core as core
# Import 3rd-party libs
from salt.ext import six
-if six.PY3:
- import ipaddress
-else:
- import salt.ext.ipaddress as ipaddress
+from salt._compat import ipaddress
log = logging.getLogger(__name__)
diff --git a/tests/unit/modules/test_network.py b/tests/unit/modules/test_network.py
index 865f15f3e3..50fa629276 100644
--- a/tests/unit/modules/test_network.py
+++ b/tests/unit/modules/test_network.py
@@ -20,20 +20,11 @@ from tests.support.mock import (
)
# Import Salt Libs
-from salt.ext import six
import salt.utils.network
import salt.utils.path
import salt.modules.network as network
from salt.exceptions import CommandExecutionError
-if six.PY2:
- import salt.ext.ipaddress as ipaddress
- HAS_IPADDRESS = True
-else:
- try:
- import ipaddress
- HAS_IPADDRESS = True
- except ImportError:
- HAS_IPADDRESS = False
+from salt._compat import ipaddress
@skipIf(NO_MOCK, NO_MOCK_REASON)
@@ -278,7 +269,7 @@ class NetworkTestCase(TestCase, LoaderModuleMockMixin):
self.assertDictEqual(network.connect('host', 'port'),
{'comment': ret, 'result': True})
- @skipIf(HAS_IPADDRESS is False, 'unable to import \'ipaddress\'')
+ @skipIf(not bool(ipaddress), 'unable to import \'ipaddress\'')
def test_is_private(self):
'''
Test for Check if the given IP address is a private address
@@ -290,7 +281,7 @@ class NetworkTestCase(TestCase, LoaderModuleMockMixin):
return_value=True):
self.assertTrue(network.is_private('::1'))
- @skipIf(HAS_IPADDRESS is False, 'unable to import \'ipaddress\'')
+ @skipIf(not bool(ipaddress), 'unable to import \'ipaddress\'')
def test_is_loopback(self):
'''
Test for Check if the given IP address is a loopback address
--
2.19.0

52
fix-issue-2068-test.patch Normal file
View File

@ -0,0 +1,52 @@
From 2916f2f3e7c6af07148863281ffaf07df21f21da Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 9 Jan 2019 16:08:19 +0100
Subject: [PATCH] Fix issue #2068 test
Skip injecting `__call__` if chunk is not dict.
This also fixes `integration/modules/test_state.py:StateModuleTest.test_exclude` that tests `include` and `exclude` state directives containing the only list of strings.
Minor update: more correct is-dict check.
---
salt/state.py | 9 ++++++---
1 file changed, 6 insertions(+), 3 deletions(-)
diff --git a/salt/state.py b/salt/state.py
index b4b2a00601..815ebaec24 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -25,6 +25,7 @@ import traceback
import re
import time
import random
+import collections
# Import salt libs
import salt.loader
@@ -2743,16 +2744,18 @@ class State(object):
'''
for chunk in high:
state = high[chunk]
+ if not isinstance(state, collections.Mapping):
+ continue
for state_ref in state:
needs_default = True
+ if not isinstance(state[state_ref], list):
+ continue
for argset in state[state_ref]:
if isinstance(argset, six.string_types):
needs_default = False
break
if needs_default:
- order = state[state_ref].pop(-1)
- state[state_ref].append('__call__')
- state[state_ref].append(order)
+ state[state_ref].insert(-1, '__call__')
def call_high(self, high, orchestration_jid=None):
'''
--
2.20.1

View File

@ -0,0 +1,941 @@
From 140388e51e5b5b7ee33b776269bce67046cce32f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 4 Dec 2018 16:16:18 +0000
Subject: [PATCH] Fix latin1 encoding problems on file module
(bsc#1116837)
_get_line_indent renamed to _set_line_indent
_regex_to_static refactored to work on lists
line function refactored to work on list
Added _set_line_eol and _get_eol functions
Setting end of line
Make tests green
test_line_insert_end fixed
/sr.* pattern should raise exception
file.line function refactored
Make integration green. Added test for mode ensure insert before first line
Fixed file permissions
Removed regex compilation
Comprehensions converting to unicode replaced by salt.utils.data.decode_list
Empty match on delete or replace not causing IndexError exception
List comprehension replaced
Added comments
Add get_diff to salt.utils.stringutils
Make to_unicode/to_str/to_bytes helpers attempt latin-1
Also allow for multiple encodings to be passed
Use new get_diff helper in file module
Use BASE_FILES instead of redundant STATE_DIR
Add integration test for latin-1 file diffs
PY3 scoping fix
In PY3 the caught exceptions now drop out of scope when leaving the for
loop.
Add unit test for latin-1 fallback, multi-encoding
Skip pylint false-positives
Fix incorrect use of __salt__ when __utils__ is needed
Add stringutils.get_diff to mocks
Only try latin-1 from get_diff instead of by default
Fix to_unicode test
Since latin-1 is not being automatically decoded, we need to explicitly
pass it on the test.
Revert "Use BASE_FILES instead of redundant STATE_DIR"
This reverts commit ba524c81b6ae6091259157cec1259f5a7fb776c0.
---
salt/modules/file.py | 224 +++++++++---------
salt/modules/win_file.py | 14 +-
salt/utils/stringutils.py | 118 ++++++---
.../files/file/base/issue-48777/new.html | 5 +
.../files/file/base/issue-48777/old.html | 4 +
tests/integration/states/test_file.py | 23 ++
tests/unit/modules/test_file.py | 102 +++++++-
tests/unit/utils/test_stringutils.py | 14 ++
8 files changed, 348 insertions(+), 156 deletions(-)
create mode 100644 tests/integration/files/file/base/issue-48777/new.html
create mode 100644 tests/integration/files/file/base/issue-48777/old.html
diff --git a/salt/modules/file.py b/salt/modules/file.py
index 1b4b7e0e46..1ad0fef1ea 100644
--- a/salt/modules/file.py
+++ b/salt/modules/file.py
@@ -12,7 +12,6 @@ from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import datetime
-import difflib
import errno
import fileinput
import fnmatch
@@ -61,6 +60,7 @@ import salt.utils.stringutils
import salt.utils.templates
import salt.utils.url
import salt.utils.user
+import salt.utils.data
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError, get_error_message as _get_error_message
from salt.utils.files import HASHES, HASHES_REVMAP
@@ -1570,7 +1570,7 @@ def comment_line(path,
check_perms(path, None, pre_user, pre_group, pre_mode)
# Return a diff using the two dictionaries
- return ''.join(difflib.unified_diff(orig_file, new_file))
+ return __utils__['stringutils.get_diff'](orig_file, new_file)
def _get_flags(flags):
@@ -1722,18 +1722,19 @@ def _regex_to_static(src, regex):
return None
try:
- src = re.search(regex, src, re.M)
+ compiled = re.compile(regex, re.DOTALL)
+ src = [line for line in src if compiled.search(line) or line.count(regex)]
except Exception as ex:
raise CommandExecutionError("{0}: '{1}'".format(_get_error_message(ex), regex))
- return src and src.group().rstrip('\r') or regex
+ return src and src or []
-def _assert_occurrence(src, probe, target, amount=1):
+def _assert_occurrence(probe, target, amount=1):
'''
Raise an exception, if there are different amount of specified occurrences in src.
'''
- occ = src.count(probe)
+ occ = len(probe)
if occ > amount:
msg = 'more than'
elif occ < amount:
@@ -1749,7 +1750,7 @@ def _assert_occurrence(src, probe, target, amount=1):
return occ
-def _get_line_indent(src, line, indent):
+def _set_line_indent(src, line, indent):
'''
Indent the line with the source line.
'''
@@ -1762,7 +1763,36 @@ def _get_line_indent(src, line, indent):
break
idt.append(c)
- return ''.join(idt) + line.strip()
+ return ''.join(idt) + line.lstrip()
+
+
+def _get_eol(line):
+ match = re.search('((?<!\r)\n|\r(?!\n)|\r\n)$', line)
+ return match and match.group() or ''
+
+
+def _set_line_eol(src, line):
+ '''
+ Add line ending
+ '''
+ line_ending = _get_eol(src) or os.linesep
+ return line.rstrip() + line_ending
+
+
+def _insert_line_before(idx, body, content, indent):
+ if not idx or (idx and _starts_till(body[idx - 1], content) < 0):
+ cnd = _set_line_indent(body[idx], content, indent)
+ body.insert(idx, cnd)
+ return body
+
+
+def _insert_line_after(idx, body, content, indent):
+ # No duplicates or append, if "after" is the last line
+ next_line = idx + 1 < len(body) and body[idx + 1] or None
+ if next_line is None or _starts_till(next_line, content) < 0:
+ cnd = _set_line_indent(body[idx], content, indent)
+ body.insert(idx + 1, cnd)
+ return body
def line(path, content=None, match=None, mode=None, location=None,
@@ -1893,132 +1923,110 @@ def line(path, content=None, match=None, mode=None, location=None,
match = content
with salt.utils.files.fopen(path, mode='r') as fp_:
- body = salt.utils.stringutils.to_unicode(fp_.read())
- body_before = hashlib.sha256(salt.utils.stringutils.to_bytes(body)).hexdigest()
+ body = salt.utils.data.decode_list(fp_.readlines())
+ body_before = hashlib.sha256(salt.utils.stringutils.to_bytes(''.join(body))).hexdigest()
+ # Add empty line at the end if last line ends with eol.
+ # Allows simpler code
+ if body and _get_eol(body[-1]):
+ body.append('')
+
after = _regex_to_static(body, after)
before = _regex_to_static(body, before)
match = _regex_to_static(body, match)
if os.stat(path).st_size == 0 and mode in ('delete', 'replace'):
log.warning('Cannot find text to {0}. File \'{1}\' is empty.'.format(mode, path))
- body = ''
- elif mode == 'delete':
- body = os.linesep.join([line for line in body.split(os.linesep) if line.find(match) < 0])
- elif mode == 'replace':
- body = os.linesep.join([(_get_line_indent(file_line, content, indent)
- if (file_line.find(match) > -1 and not file_line == content) else file_line)
- for file_line in body.split(os.linesep)])
+ body = []
+ elif mode == 'delete' and match:
+ body = [line for line in body if line != match[0]]
+ elif mode == 'replace' and match:
+ idx = body.index(match[0])
+ file_line = body.pop(idx)
+ body.insert(idx, _set_line_indent(file_line, content, indent))
elif mode == 'insert':
if not location and not before and not after:
raise CommandExecutionError('On insert must be defined either "location" or "before/after" conditions.')
if not location:
if before and after:
- _assert_occurrence(body, before, 'before')
- _assert_occurrence(body, after, 'after')
+ _assert_occurrence(before, 'before')
+ _assert_occurrence(after, 'after')
+
out = []
- lines = body.split(os.linesep)
in_range = False
- for line in lines:
- if line.find(after) > -1:
+ for line in body:
+ if line == after[0]:
in_range = True
- elif line.find(before) > -1 and in_range:
- out.append(_get_line_indent(line, content, indent))
+ elif line == before[0] and in_range:
+ cnd = _set_line_indent(line, content, indent)
+ out.append(cnd)
out.append(line)
- body = os.linesep.join(out)
+ body = out
if before and not after:
- _assert_occurrence(body, before, 'before')
- out = []
- lines = body.split(os.linesep)
- for idx in range(len(lines)):
- _line = lines[idx]
- if _line.find(before) > -1:
- cnd = _get_line_indent(_line, content, indent)
- if not idx or (idx and _starts_till(lines[idx - 1], cnd) < 0): # Job for replace instead
- out.append(cnd)
- out.append(_line)
- body = os.linesep.join(out)
+ _assert_occurrence(before, 'before')
+
+ idx = body.index(before[0])
+ body = _insert_line_before(idx, body, content, indent)
elif after and not before:
- _assert_occurrence(body, after, 'after')
- out = []
- lines = body.split(os.linesep)
- for idx, _line in enumerate(lines):
- out.append(_line)
- cnd = _get_line_indent(_line, content, indent)
- # No duplicates or append, if "after" is the last line
- if (_line.find(after) > -1 and
- (lines[((idx + 1) < len(lines)) and idx + 1 or idx].strip() != cnd or
- idx + 1 == len(lines))):
- out.append(cnd)
- body = os.linesep.join(out)
+ _assert_occurrence(after, 'after')
+
+ idx = body.index(after[0])
+ body = _insert_line_after(idx, body, content, indent)
else:
if location == 'start':
- body = os.linesep.join((content, body))
+ if body:
+ body.insert(0, _set_line_eol(body[0], content))
+ else:
+ body.append(content + os.linesep)
elif location == 'end':
- body = os.linesep.join((body, _get_line_indent(body[-1], content, indent) if body else content))
+ body.append(_set_line_indent(body[-1], content, indent) if body else content)
elif mode == 'ensure':
- after = after and after.strip()
- before = before and before.strip()
if before and after:
- _assert_occurrence(body, before, 'before')
- _assert_occurrence(body, after, 'after')
+ _assert_occurrence(before, 'before')
+ _assert_occurrence(after, 'after')
- is_there = bool(body.count(content))
+ is_there = bool([l for l in body if l.count(content)])
if not is_there:
- out = []
- body = body.split(os.linesep)
- for idx, line in enumerate(body):
- out.append(line)
- if line.find(content) > -1:
- is_there = True
- if not is_there:
- if idx < (len(body) - 1) and line.find(after) > -1 and body[idx + 1].find(before) > -1:
- out.append(content)
- elif line.find(after) > -1:
- raise CommandExecutionError('Found more than one line between '
- 'boundaries "before" and "after".')
- body = os.linesep.join(out)
+ idx = body.index(after[0])
+ if idx < (len(body) - 1) and body[idx + 1] == before[0]:
+ cnd = _set_line_indent(body[idx], content, indent)
+ body.insert(idx + 1, cnd)
+ else:
+ raise CommandExecutionError('Found more than one line between '
+ 'boundaries "before" and "after".')
elif before and not after:
- _assert_occurrence(body, before, 'before')
- body = body.split(os.linesep)
- out = []
- for idx in range(len(body)):
- if body[idx].find(before) > -1:
- prev = (idx > 0 and idx or 1) - 1
- out.append(_get_line_indent(body[idx], content, indent))
- if _starts_till(out[prev], content) > -1:
- del out[prev]
- out.append(body[idx])
- body = os.linesep.join(out)
+ _assert_occurrence(before, 'before')
+
+ idx = body.index(before[0])
+ body = _insert_line_before(idx, body, content, indent)
elif not before and after:
- _assert_occurrence(body, after, 'after')
- body = body.split(os.linesep)
- skip = None
- out = []
- for idx in range(len(body)):
- if skip != body[idx]:
- out.append(body[idx])
-
- if body[idx].find(after) > -1:
- next_line = idx + 1 < len(body) and body[idx + 1] or None
- if next_line is not None and _starts_till(next_line, content) > -1:
- skip = next_line
- out.append(_get_line_indent(body[idx], content, indent))
- body = os.linesep.join(out)
+ _assert_occurrence(after, 'after')
+
+ idx = body.index(after[0])
+ body = _insert_line_after(idx, body, content, indent)
else:
raise CommandExecutionError("Wrong conditions? "
"Unable to ensure line without knowing "
"where to put it before and/or after.")
- changed = body_before != hashlib.sha256(salt.utils.stringutils.to_bytes(body)).hexdigest()
+ if body:
+ for idx, line in enumerate(body):
+ if not _get_eol(line) and idx+1 < len(body):
+ prev = idx and idx-1 or 1
+ body[idx] = _set_line_eol(body[prev], line)
+ # We do not need empty line at the end anymore
+ if '' == body[-1]:
+ body.pop()
+
+ changed = body_before != hashlib.sha256(salt.utils.stringutils.to_bytes(''.join(body))).hexdigest()
if backup and changed and __opts__['test'] is False:
try:
@@ -2032,20 +2040,15 @@ def line(path, content=None, match=None, mode=None, location=None,
if changed:
if show_changes:
with salt.utils.files.fopen(path, 'r') as fp_:
- path_content = [salt.utils.stringutils.to_unicode(x)
- for x in fp_.read().splitlines(True)]
- changes_diff = ''.join(difflib.unified_diff(
- path_content,
- [salt.utils.stringutils.to_unicode(x)
- for x in body.splitlines(True)]
- ))
+ path_content = salt.utils.data.decode_list(fp_.read().splitlines(True))
+ changes_diff = __utils__['stringutils.get_diff'](path_content, body)
if __opts__['test'] is False:
fh_ = None
try:
# Make sure we match the file mode from salt.utils.files.fopen
mode = 'wb' if six.PY2 and salt.utils.platform.is_windows() else 'w'
fh_ = salt.utils.atomicfile.atomic_open(path, mode)
- fh_.write(body)
+ fh_.write(''.join(body))
finally:
if fh_:
fh_.close()
@@ -2419,18 +2422,15 @@ def replace(path,
if not dry_run and not salt.utils.platform.is_windows():
check_perms(path, None, pre_user, pre_group, pre_mode)
- def get_changes():
- orig_file_as_str = [salt.utils.stringutils.to_unicode(x) for x in orig_file]
- new_file_as_str = [salt.utils.stringutils.to_unicode(x) for x in new_file]
- return ''.join(difflib.unified_diff(orig_file_as_str, new_file_as_str))
+ differences = __utils__['stringutils.get_diff'](orig_file, new_file)
if show_changes:
- return get_changes()
+ return differences
# We may have found a regex line match but don't need to change the line
# (for situations where the pattern also matches the repl). Revert the
# has_changes flag to False if the final result is unchanged.
- if not get_changes():
+ if not differences:
has_changes = False
return has_changes
@@ -2684,7 +2684,7 @@ def blockreplace(path,
)
if block_found:
- diff = ''.join(difflib.unified_diff(orig_file, new_file))
+ diff = __utils__['stringutils.get_diff'](orig_file, new_file)
has_changes = diff is not ''
if has_changes and not dry_run:
# changes detected
@@ -5003,11 +5003,7 @@ def get_diff(file1,
else:
if show_filenames:
args.extend(files)
- ret = ''.join(
- difflib.unified_diff(
- *salt.utils.data.decode(args)
- )
- )
+ ret = __utils__['stringutils.get_diff'](*args)
return ret
return ''
diff --git a/salt/modules/win_file.py b/salt/modules/win_file.py
index d321bd538e..0f5c908c8f 100644
--- a/salt/modules/win_file.py
+++ b/salt/modules/win_file.py
@@ -58,8 +58,9 @@ from salt.modules.file import (check_hash, # pylint: disable=W0611
RE_FLAG_TABLE, blockreplace, prepend, seek_read, seek_write, rename,
lstat, path_exists_glob, write, pardir, join, HASHES, HASHES_REVMAP,
comment, uncomment, _add_flags, comment_line, _regex_to_static,
- _get_line_indent, apply_template_on_contents, dirname, basename,
- list_backups_dir, _assert_occurrence, _starts_till)
+ _set_line_indent, apply_template_on_contents, dirname, basename,
+ list_backups_dir, _assert_occurrence, _starts_till, _set_line_eol, _get_eol,
+ _insert_line_after, _insert_line_before)
from salt.modules.file import normpath as normpath_
from salt.utils.functools import namespaced_function as _namespaced_function
@@ -116,8 +117,9 @@ def __virtual__():
global blockreplace, prepend, seek_read, seek_write, rename, lstat
global write, pardir, join, _add_flags, apply_template_on_contents
global path_exists_glob, comment, uncomment, _mkstemp_copy
- global _regex_to_static, _get_line_indent, dirname, basename
+ global _regex_to_static, _set_line_indent, dirname, basename
global list_backups_dir, normpath_, _assert_occurrence, _starts_till
+ global _insert_line_before, _insert_line_after, _set_line_eol, _get_eol
replace = _namespaced_function(replace, globals())
search = _namespaced_function(search, globals())
@@ -172,7 +174,11 @@ def __virtual__():
uncomment = _namespaced_function(uncomment, globals())
comment_line = _namespaced_function(comment_line, globals())
_regex_to_static = _namespaced_function(_regex_to_static, globals())
- _get_line_indent = _namespaced_function(_get_line_indent, globals())
+ _set_line_indent = _namespaced_function(_set_line_indent, globals())
+ _set_line_eol = _namespaced_function(_set_line_eol, globals())
+ _get_eol = _namespaced_function(_get_eol, globals())
+ _insert_line_after = _namespaced_function(_insert_line_after, globals())
+ _insert_line_before = _namespaced_function(_insert_line_before, globals())
_mkstemp_copy = _namespaced_function(_mkstemp_copy, globals())
_add_flags = _namespaced_function(_add_flags, globals())
apply_template_on_contents = _namespaced_function(apply_template_on_contents, globals())
diff --git a/salt/utils/stringutils.py b/salt/utils/stringutils.py
index 2909d4aebe..f84fda70a5 100644
--- a/salt/utils/stringutils.py
+++ b/salt/utils/stringutils.py
@@ -6,6 +6,7 @@ Functions for manipulating or otherwise processing strings
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import base64
+import difflib
import errno
import fnmatch
import logging
@@ -31,21 +32,32 @@ def to_bytes(s, encoding=None, errors='strict'):
Given bytes, bytearray, str, or unicode (python 2), return bytes (str for
python 2)
'''
+ if encoding is None:
+ # Try utf-8 first, and fall back to detected encoding
+ encoding = ('utf-8', __salt_system_encoding__)
+ if not isinstance(encoding, (tuple, list)):
+ encoding = (encoding,)
+
+ if not encoding:
+ raise ValueError('encoding cannot be empty')
+
+ exc = None
if six.PY3:
if isinstance(s, bytes):
return s
if isinstance(s, bytearray):
return bytes(s)
if isinstance(s, six.string_types):
- if encoding:
- return s.encode(encoding, errors)
- else:
+ for enc in encoding:
try:
- # Try UTF-8 first
- return s.encode('utf-8', errors)
- except UnicodeEncodeError:
- # Fall back to detected encoding
- return s.encode(__salt_system_encoding__, errors)
+ return s.encode(enc, errors)
+ except UnicodeEncodeError as err:
+ exc = err
+ continue
+ # The only way we get this far is if a UnicodeEncodeError was
+ # raised, otherwise we would have already returned (or raised some
+ # other exception).
+ raise exc # pylint: disable=raising-bad-type
raise TypeError('expected bytes, bytearray, or str')
else:
return to_str(s, encoding, errors)
@@ -61,35 +73,48 @@ def to_str(s, encoding=None, errors='strict', normalize=False):
except TypeError:
return s
+ if encoding is None:
+ # Try utf-8 first, and fall back to detected encoding
+ encoding = ('utf-8', __salt_system_encoding__)
+ if not isinstance(encoding, (tuple, list)):
+ encoding = (encoding,)
+
+ if not encoding:
+ raise ValueError('encoding cannot be empty')
+
# This shouldn't be six.string_types because if we're on PY2 and we already
# have a string, we should just return it.
if isinstance(s, str):
return _normalize(s)
+
+ exc = None
if six.PY3:
if isinstance(s, (bytes, bytearray)):
- if encoding:
- return _normalize(s.decode(encoding, errors))
- else:
+ for enc in encoding:
try:
- # Try UTF-8 first
- return _normalize(s.decode('utf-8', errors))
- except UnicodeDecodeError:
- # Fall back to detected encoding
- return _normalize(s.decode(__salt_system_encoding__, errors))
+ return _normalize(s.decode(enc, errors))
+ except UnicodeDecodeError as err:
+ exc = err
+ continue
+ # The only way we get this far is if a UnicodeDecodeError was
+ # raised, otherwise we would have already returned (or raised some
+ # other exception).
+ raise exc # pylint: disable=raising-bad-type
raise TypeError('expected str, bytes, or bytearray not {}'.format(type(s)))
else:
if isinstance(s, bytearray):
return str(s) # future lint: disable=blacklisted-function
if isinstance(s, unicode): # pylint: disable=incompatible-py3-code,undefined-variable
- if encoding:
- return _normalize(s).encode(encoding, errors)
- else:
+ for enc in encoding:
try:
- # Try UTF-8 first
- return _normalize(s).encode('utf-8', errors)
- except UnicodeEncodeError:
- # Fall back to detected encoding
- return _normalize(s).encode(__salt_system_encoding__, errors)
+ return _normalize(s).encode(enc, errors)
+ except UnicodeEncodeError as err:
+ exc = err
+ continue
+ # The only way we get this far is if a UnicodeDecodeError was
+ # raised, otherwise we would have already returned (or raised some
+ # other exception).
+ raise exc # pylint: disable=raising-bad-type
raise TypeError('expected str, bytearray, or unicode')
@@ -100,6 +125,16 @@ def to_unicode(s, encoding=None, errors='strict', normalize=False):
def _normalize(s):
return unicodedata.normalize('NFC', s) if normalize else s
+ if encoding is None:
+ # Try utf-8 first, and fall back to detected encoding
+ encoding = ('utf-8', __salt_system_encoding__)
+ if not isinstance(encoding, (tuple, list)):
+ encoding = (encoding,)
+
+ if not encoding:
+ raise ValueError('encoding cannot be empty')
+
+ exc = None
if six.PY3:
if isinstance(s, str):
return _normalize(s)
@@ -113,15 +148,16 @@ def to_unicode(s, encoding=None, errors='strict', normalize=False):
if isinstance(s, unicode): # pylint: disable=incompatible-py3-code
return _normalize(s)
elif isinstance(s, (str, bytearray)):
- if encoding:
- return _normalize(s.decode(encoding, errors))
- else:
+ for enc in encoding:
try:
- # Try UTF-8 first
- return _normalize(s.decode('utf-8', errors))
- except UnicodeDecodeError:
- # Fall back to detected encoding
- return _normalize(s.decode(__salt_system_encoding__, errors))
+ return _normalize(s.decode(enc, errors))
+ except UnicodeDecodeError as err:
+ exc = err
+ continue
+ # The only way we get this far is if a UnicodeDecodeError was
+ # raised, otherwise we would have already returned (or raised some
+ # other exception).
+ raise exc # pylint: disable=raising-bad-type
raise TypeError('expected str or bytearray')
@@ -513,3 +549,21 @@ def get_context(template, line, num_lines=5, marker=None):
buf[error_line_in_context] += marker
return '---\n{0}\n---'.format('\n'.join(buf))
+
+
+def get_diff(a, b, *args, **kwargs):
+ '''
+ Perform diff on two iterables containing lines from two files, and return
+ the diff as as string. Lines are normalized to str types to avoid issues
+ with unicode on PY2.
+ '''
+ encoding = ('utf-8', 'latin-1', __salt_system_encoding__)
+ # Late import to avoid circular import
+ import salt.utils.data
+ return ''.join(
+ difflib.unified_diff(
+ salt.utils.data.decode_list(a, encoding=encoding),
+ salt.utils.data.decode_list(b, encoding=encoding),
+ *args, **kwargs
+ )
+ )
diff --git a/tests/integration/files/file/base/issue-48777/new.html b/tests/integration/files/file/base/issue-48777/new.html
new file mode 100644
index 0000000000..2d5c1ae744
--- /dev/null
+++ b/tests/integration/files/file/base/issue-48777/new.html
@@ -0,0 +1,5 @@
+<html>
+<body>
+räksmörgås
+</body>
+</html>
diff --git a/tests/integration/files/file/base/issue-48777/old.html b/tests/integration/files/file/base/issue-48777/old.html
new file mode 100644
index 0000000000..7879e1ce9f
--- /dev/null
+++ b/tests/integration/files/file/base/issue-48777/old.html
@@ -0,0 +1,4 @@
+<html>
+<body>
+</body>
+</html>
diff --git a/tests/integration/states/test_file.py b/tests/integration/states/test_file.py
index 9064ba7cc1..30ad39de6b 100644
--- a/tests/integration/states/test_file.py
+++ b/tests/integration/states/test_file.py
@@ -656,6 +656,29 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertIn(
'does not exist', ret['comment'])
+ def test_managed_latin1_diff(self):
+ '''
+ Tests that latin-1 file contents are represented properly in the diff
+ '''
+ name = os.path.join(TMP, 'local_latin1_diff')
+ # Lay down the initial file
+ ret = self.run_state(
+ 'file.managed',
+ name=name,
+ source='salt://issue-48777/old.html')
+ ret = ret[next(iter(ret))]
+ assert ret['result'] is True, ret
+
+ # Replace it with the new file and check the diff
+ ret = self.run_state(
+ 'file.managed',
+ name=name,
+ source='salt://issue-48777/new.html')
+ ret = ret[next(iter(ret))]
+ assert ret['result'] is True, ret
+ diff_lines = ret['changes']['diff'].split('\n')
+ assert '+räksmörgås' in diff_lines, diff_lines
+
def test_directory(self):
'''
file.directory
diff --git a/tests/unit/modules/test_file.py b/tests/unit/modules/test_file.py
index b157a577e5..66acaf9cb6 100644
--- a/tests/unit/modules/test_file.py
+++ b/tests/unit/modules/test_file.py
@@ -57,7 +57,10 @@ class FileReplaceTestCase(TestCase, LoaderModuleMockMixin):
'grains': {},
},
'__grains__': {'kernel': 'Linux'},
- '__utils__': {'files.is_text': MagicMock(return_value=True)},
+ '__utils__': {
+ 'files.is_text': MagicMock(return_value=True),
+ 'stringutils.get_diff': salt.utils.stringutils.get_diff,
+ },
}
}
@@ -235,7 +238,12 @@ class FileBlockReplaceTestCase(TestCase, LoaderModuleMockMixin):
'grains': {},
},
'__grains__': {'kernel': 'Linux'},
- '__utils__': {'files.is_text': MagicMock(return_value=True)},
+ '__utils__': {
+ 'files.is_binary': MagicMock(return_value=False),
+ 'files.is_text': MagicMock(return_value=True),
+ 'files.get_encoding': MagicMock(return_value='utf-8'),
+ 'stringutils.get_diff': salt.utils.stringutils.get_diff,
+ },
}
}
@@ -528,7 +536,10 @@ class FileModuleTestCase(TestCase, LoaderModuleMockMixin):
'cachedir': 'tmp',
'grains': {},
},
- '__grains__': {'kernel': 'Linux'}
+ '__grains__': {'kernel': 'Linux'},
+ '__utils__': {
+ 'stringutils.get_diff': salt.utils.stringutils.get_diff,
+ },
}
}
@@ -907,7 +918,10 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
'cachedir': 'tmp',
'grains': {},
},
- '__grains__': {'kernel': 'Linux'}
+ '__grains__': {'kernel': 'Linux'},
+ '__utils__': {
+ 'stringutils.get_diff': salt.utils.stringutils.get_diff,
+ },
}
}
@@ -930,6 +944,29 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
self.assertIn('Cannot find text to {0}'.format(mode),
_log.warning.call_args_list[0][0][0])
+ @patch('os.path.realpath', MagicMock())
+ @patch('os.path.isfile', MagicMock(return_value=True))
+ @patch('os.stat', MagicMock())
+ def test_line_delete_no_match(self):
+ '''
+ Tests that when calling file.line with ``mode=delete``,
+ with not matching pattern to delete returns False
+ :return:
+ '''
+ file_content = os.linesep.join([
+ 'file_roots:',
+ ' base:',
+ ' - /srv/salt',
+ ' - /srv/custom'
+ ])
+ match = 'not matching'
+ for mode in ['delete', 'replace']:
+ files_fopen = mock_open(read_data=file_content)
+ with patch('salt.utils.files.fopen', files_fopen):
+ atomic_opener = mock_open()
+ with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
+ self.assertFalse(filemod.line('foo', content='foo', match=match, mode=mode))
+
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
def test_line_modecheck_failure(self):
@@ -1082,7 +1119,7 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
' - /srv/sugar'
])
cfg_content = '- /srv/custom'
- for before_line in ['/srv/salt', '/srv/sa.*t', '/sr.*']:
+ for before_line in ['/srv/salt', '/srv/sa.*t']:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
@@ -1092,6 +1129,32 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
+ @patch('os.path.realpath', MagicMock())
+ @patch('os.path.isfile', MagicMock(return_value=True))
+ @patch('os.stat', MagicMock())
+ def test_line_assert_exception_pattern(self):
+ '''
+ Test for file.line for exception on insert with too general pattern.
+
+ :return:
+ '''
+ file_content = os.linesep.join([
+ 'file_roots:',
+ ' base:',
+ ' - /srv/salt',
+ ' - /srv/sugar'
+ ])
+ cfg_content = '- /srv/custom'
+ for before_line in ['/sr.*']:
+ files_fopen = mock_open(read_data=file_content)
+ with patch('salt.utils.files.fopen', files_fopen):
+ atomic_opener = mock_open()
+ with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
+ with self.assertRaises(CommandExecutionError) as cm:
+ filemod.line('foo', content=cfg_content, before=before_line, mode='insert')
+ self.assertEqual(cm.exception.strerror,
+ 'Found more than expected occurrences in "before" expression')
+
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
@@ -1179,7 +1242,7 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
' base:',
' - /srv/salt',
' - /srv/sugar',
- cfg_content
+ ' ' + cfg_content
])
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
@@ -1273,6 +1336,33 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
file_modified)
+ @patch('os.path.realpath', MagicMock())
+ @patch('os.path.isfile', MagicMock(return_value=True))
+ @patch('os.stat', MagicMock())
+ def test_line_insert_ensure_before_first_line(self):
+ '''
+ Test for file.line for insertion ensuring the line is before first line
+ :return:
+ '''
+ cfg_content = '#!/bin/bash'
+ file_content = os.linesep.join([
+ '/etc/init.d/someservice restart',
+ 'exit 0'
+ ])
+ file_modified = os.linesep.join([
+ cfg_content,
+ '/etc/init.d/someservice restart',
+ 'exit 0'
+ ])
+ files_fopen = mock_open(read_data=file_content)
+ with patch('salt.utils.files.fopen', files_fopen):
+ atomic_opener = mock_open()
+ with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
+ filemod.line('foo', content=cfg_content, before='/etc/init.d/someservice restart', mode='ensure')
+ self.assertEqual(len(atomic_opener().write.call_args_list), 1)
+ self.assertEqual(atomic_opener().write.call_args_list[0][0][0],
+ file_modified)
+
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
diff --git a/tests/unit/utils/test_stringutils.py b/tests/unit/utils/test_stringutils.py
index 9c8fd4f7c3..852f558793 100644
--- a/tests/unit/utils/test_stringutils.py
+++ b/tests/unit/utils/test_stringutils.py
@@ -18,6 +18,9 @@ STR = BYTES = UNICODE.encode('utf-8')
# code points. Do not modify it.
EGGS = '\u044f\u0438\u0306\u0446\u0430'
+LATIN1_UNICODE = 'räksmörgås'
+LATIN1_BYTES = LATIN1_UNICODE.encode('latin-1')
+
class StringutilsTestCase(TestCase):
def test_contains_whitespace(self):
@@ -134,6 +137,13 @@ class StringutilsTestCase(TestCase):
<>йца'
)
+ self.assertEqual(
+ salt.utils.stringutils.to_unicode(
+ LATIN1_BYTES, encoding='latin-1'
+ ),
+ LATIN1_UNICODE
+ )
+
if six.PY3:
self.assertEqual(salt.utils.stringutils.to_unicode('plugh'), 'plugh')
self.assertEqual(salt.utils.stringutils.to_unicode('áéíóúý'), 'áéíóúý')
@@ -150,6 +160,10 @@ class StringutilsTestCase(TestCase):
with patch.object(builtins, '__salt_system_encoding__', 'CP1252'):
self.assertEqual(salt.utils.stringutils.to_unicode('Ψ'.encode('utf-8')), 'Ψ')
+ def test_to_unicode_multi_encoding(self):
+ result = salt.utils.stringutils.to_unicode(LATIN1_BYTES, encoding=('utf-8', 'latin1'))
+ assert result == LATIN1_UNICODE
+
def test_build_whitespace_split_regex(self):
expected_regex = '(?m)^(?:[\\s]+)?Lorem(?:[\\s]+)?ipsum(?:[\\s]+)?dolor(?:[\\s]+)?sit(?:[\\s]+)?amet\\,' \
'(?:[\\s]+)?$'
--
2.17.1

View File

@ -0,0 +1,42 @@
From 7ffa39cd80393f2a3ed5cd75793b134b9d939cf9 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 11 Oct 2018 16:20:40 +0200
Subject: [PATCH] Fix unit test for grains core
---
tests/unit/grains/test_core.py | 11 +++++------
1 file changed, 5 insertions(+), 6 deletions(-)
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
index 2ab32ef41b..4923ee00b0 100644
--- a/tests/unit/grains/test_core.py
+++ b/tests/unit/grains/test_core.py
@@ -62,11 +62,10 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
def test_parse_etc_os_release(self, path_isfile_mock):
path_isfile_mock.side_effect = lambda x: x == "/usr/lib/os-release"
with salt.utils.files.fopen(os.path.join(OS_RELEASE_DIR, "ubuntu-17.10")) as os_release_file:
- os_release_content = os_release_file.read()
- with patch("salt.utils.files.fopen", mock_open(read_data=os_release_content)):
- os_release = core._parse_os_release(
- '/etc/os-release',
- '/usr/lib/os-release')
+ os_release_content = os_release_file.readlines()
+ with patch("salt.utils.files.fopen", mock_open()) as os_release_file:
+ os_release_file.return_value.__iter__.return_value = os_release_content
+ os_release = core._parse_os_release(["/etc/os-release", "/usr/lib/os-release"])
self.assertEqual(os_release, {
"NAME": "Ubuntu",
"VERSION": "17.10 (Artful Aardvark)",
@@ -128,7 +127,7 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
def test_missing_os_release(self):
with patch('salt.utils.files.fopen', mock_open(read_data={})):
- os_release = core._parse_os_release('/etc/os-release', '/usr/lib/os-release')
+ os_release = core._parse_os_release(['/etc/os-release', '/usr/lib/os-release'])
self.assertEqual(os_release, {})
@skipIf(not salt.utils.platform.is_linux(), 'System is not Linux')
--
2.19.0

View File

@ -0,0 +1,143 @@
From 43b1f8fb6608c944812bc5bcd9da407624409ac7 Mon Sep 17 00:00:00 2001
From: Erik Johnson <palehose@gmail.com>
Date: Fri, 24 Aug 2018 10:35:55 -0500
Subject: [PATCH] Fixes: CVE-2018-15750, CVE-2018-15751
Ensure that tokens are hex to avoid hanging/errors in cherrypy
Add empty token salt-api integration tests
Handle Auth exceptions in run_job
Update tornado test to correct authentication message
---
salt/client/__init__.py | 8 ++++
salt/netapi/rest_cherrypy/app.py | 13 ++++++-
.../netapi/rest_cherrypy/test_app.py | 39 +++++++++++++++++++
.../netapi/rest_tornado/test_app.py | 2 +-
4 files changed, 60 insertions(+), 2 deletions(-)
diff --git a/salt/client/__init__.py b/salt/client/__init__.py
index dcbc1473e1..77f2a963f7 100644
--- a/salt/client/__init__.py
+++ b/salt/client/__init__.py
@@ -349,6 +349,10 @@ class LocalClient(object):
raise SaltClientError(
'The salt master could not be contacted. Is master running?'
)
+ except AuthenticationError as err:
+ raise AuthenticationError(err)
+ except AuthorizationError as err:
+ raise AuthorizationError(err)
except Exception as general_exception:
# Convert to generic client error and pass along message
raise SaltClientError(general_exception)
@@ -415,6 +419,10 @@ class LocalClient(object):
raise SaltClientError(
'The salt master could not be contacted. Is master running?'
)
+ except AuthenticationError as err:
+ raise AuthenticationError(err)
+ except AuthorizationError as err:
+ raise AuthorizationError(err)
except Exception as general_exception:
# Convert to generic client error and pass along message
raise SaltClientError(general_exception)
diff --git a/salt/netapi/rest_cherrypy/app.py b/salt/netapi/rest_cherrypy/app.py
index 78ea3c3fef..c272674146 100644
--- a/salt/netapi/rest_cherrypy/app.py
+++ b/salt/netapi/rest_cherrypy/app.py
@@ -1167,6 +1167,13 @@ class LowDataAdapter(object):
if token:
chunk['token'] = token
+ if 'token' in chunk:
+ # Make sure that auth token is hex
+ try:
+ int(chunk['token'], 16)
+ except (TypeError, ValueError):
+ raise cherrypy.HTTPError(401, 'Invalid token')
+
if client:
chunk['client'] = client
@@ -2167,7 +2174,11 @@ class Events(object):
:return bool: True if valid, False if not valid.
'''
- if auth_token is None:
+ # Make sure that auth token is hex. If it's None, or something other
+ # than hex, this will raise a ValueError.
+ try:
+ int(auth_token, 16)
+ except ValueError:
return False
# First check if the given token is in our session table; if so it's a
diff --git a/tests/integration/netapi/rest_cherrypy/test_app.py b/tests/integration/netapi/rest_cherrypy/test_app.py
index 000b7418bf..5865510fd7 100644
--- a/tests/integration/netapi/rest_cherrypy/test_app.py
+++ b/tests/integration/netapi/rest_cherrypy/test_app.py
@@ -124,6 +124,45 @@ class TestRun(cptc.BaseRestCherryPyTest):
})
self.assertEqual(response.status, '401 Unauthorized')
+ def test_run_empty_token(self):
+ '''
+ Test the run URL with empty token
+ '''
+ cmd = dict(self.low, **{'token': ''})
+ body = urlencode(cmd)
+
+ request, response = self.request('/run', method='POST', body=body,
+ headers={
+ 'content-type': 'application/x-www-form-urlencoded'
+ })
+ assert response.status == '401 Unauthorized'
+
+ def test_run_empty_token_upercase(self):
+ '''
+ Test the run URL with empty token with upercase characters
+ '''
+ cmd = dict(self.low, **{'ToKen': ''})
+ body = urlencode(cmd)
+
+ request, response = self.request('/run', method='POST', body=body,
+ headers={
+ 'content-type': 'application/x-www-form-urlencoded'
+ })
+ assert response.status == '401 Unauthorized'
+
+ def test_run_wrong_token(self):
+ '''
+ Test the run URL with incorrect token
+ '''
+ cmd = dict(self.low, **{'token': 'bad'})
+ body = urlencode(cmd)
+
+ request, response = self.request('/run', method='POST', body=body,
+ headers={
+ 'content-type': 'application/x-www-form-urlencoded'
+ })
+ assert response.status == '401 Unauthorized'
+
class TestWebhookDisableAuth(cptc.BaseRestCherryPyTest):
diff --git a/tests/integration/netapi/rest_tornado/test_app.py b/tests/integration/netapi/rest_tornado/test_app.py
index beb085db1e..01abd354a7 100644
--- a/tests/integration/netapi/rest_tornado/test_app.py
+++ b/tests/integration/netapi/rest_tornado/test_app.py
@@ -237,7 +237,7 @@ class TestSaltAPIHandler(_SaltnadoIntegrationTestCase):
self.assertEqual(len(ret), 3) # make sure we got 3 responses
self.assertIn('jid', ret[0]) # the first 2 are regular returns
self.assertIn('jid', ret[1])
- self.assertIn('Authentication error occurred.', ret[2]) # bad auth
+ self.assertIn('Failed to authenticate', ret[2]) # bad auth
self.assertEqual(ret[0]['minions'], sorted(['minion', 'sub_minion', 'localhost']))
self.assertEqual(ret[1]['minions'], sorted(['minion', 'sub_minion', 'localhost']))
--
2.17.1

View File

@ -0,0 +1,37 @@
From 6c85da9a53e9dd022c96a199be4e3bdd280543d6 Mon Sep 17 00:00:00 2001
From: "Gareth J. Greenaway" <gareth@wiked.org>
Date: Thu, 2 Aug 2018 15:35:24 -0700
Subject: [PATCH] Fixing issue when a valid token is generated even when
invalid user credentials are passed. This change verifies that the binddn
credentials are valid, then verifies that the username & password (if not
None) are also valid.
---
salt/auth/ldap.py | 8 +++++++-
1 file changed, 7 insertions(+), 1 deletion(-)
diff --git a/salt/auth/ldap.py b/salt/auth/ldap.py
index cbfb03a2f2..0b9aa69fe4 100644
--- a/salt/auth/ldap.py
+++ b/salt/auth/ldap.py
@@ -283,9 +283,15 @@ def auth(username, password):
log.error('LDAP authentication requires python-ldap module')
return False
- # If bind credentials are configured, use them instead of user's
+ # If bind credentials are configured, verify that we can a valid bind
if _config('binddn', mandatory=False) and _config('bindpw', mandatory=False):
bind = _bind_for_search(anonymous=_config('anonymous', mandatory=False))
+
+ # If username & password are not None, attempt to verify they are valid
+ if bind and username and password:
+ bind = _bind(username, password,
+ anonymous=_config('auth_by_group_membership_only', mandatory=False)
+ and _config('anonymous', mandatory=False))
else:
bind = _bind(username, password,
anonymous=_config('auth_by_group_membership_only', mandatory=False)
--
2.19.0

View File

@ -0,0 +1,143 @@
From 2e0abe6d12aa2657a4febed3a80b8c4cf104487a Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 14 Nov 2018 17:36:23 +0100
Subject: [PATCH] Get os_arch also without RPM package installed
backport pkg.rpm test
Add pkg.rpm unit test case
Fix docstring
Add UT for getting OS architecture fallback, when no RPM found (initrd, e.g.)
Add UT for OS architecture detection on fallback, when no CPU arch can be determined
Add UT for OS arch detection when no CPU arch or machine can be determined
Remove unsupported testcase
---
salt/utils/pkg/rpm.py | 18 ++++++---
tests/unit/utils/test_pkg.py | 72 ++++++++++++++++++++++++++++++++++++
2 files changed, 84 insertions(+), 6 deletions(-)
create mode 100644 tests/unit/utils/test_pkg.py
diff --git a/salt/utils/pkg/rpm.py b/salt/utils/pkg/rpm.py
index 94e231da4b..bb8c3fb589 100644
--- a/salt/utils/pkg/rpm.py
+++ b/salt/utils/pkg/rpm.py
@@ -9,7 +9,9 @@ import collections
import datetime
import logging
import subprocess
+import platform
import salt.utils.stringutils
+import salt.utils.path
# Import 3rd-party libs
from salt.ext import six
@@ -42,12 +44,16 @@ def get_osarch():
'''
Get the os architecture using rpm --eval
'''
- ret = subprocess.Popen(
- 'rpm --eval "%{_host_cpu}"',
- shell=True,
- close_fds=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE).communicate()[0]
+ if salt.utils.path.which('rpm'):
+ ret = subprocess.Popen(
+ 'rpm --eval "%{_host_cpu}"',
+ shell=True,
+ close_fds=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE).communicate()[0]
+ else:
+ ret = ''.join(list(filter(None, platform.uname()[-2:]))[-1:])
+
return salt.utils.stringutils.to_str(ret).strip() or 'unknown'
diff --git a/tests/unit/utils/test_pkg.py b/tests/unit/utils/test_pkg.py
new file mode 100644
index 0000000000..361e0bf92f
--- /dev/null
+++ b/tests/unit/utils/test_pkg.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+
+from __future__ import absolute_import, unicode_literals, print_function
+
+from tests.support.unit import TestCase, skipIf
+from tests.support.mock import Mock, MagicMock, patch, NO_MOCK, NO_MOCK_REASON
+import salt.utils.pkg
+from salt.utils.pkg import rpm
+
+try:
+ import pytest
+except ImportError:
+ pytest = None
+
+
+@skipIf(NO_MOCK, NO_MOCK_REASON)
+@skipIf(pytest is None, 'PyTest is missing')
+class PkgRPMTestCase(TestCase):
+ '''
+ Test case for pkg.rpm utils
+ '''
+
+ @patch('salt.utils.path.which', MagicMock(return_value=True))
+ def test_get_osarch_by_rpm(self):
+ '''
+ Get os_arch if RPM package is installed.
+ :return:
+ '''
+ subprocess_mock = MagicMock()
+ subprocess_mock.Popen = MagicMock()
+ subprocess_mock.Popen().communicate = MagicMock(return_value=['Z80'])
+ with patch('salt.utils.pkg.rpm.subprocess', subprocess_mock):
+ assert rpm.get_osarch() == 'Z80'
+ assert subprocess_mock.Popen.call_count == 2 # One within the mock
+ assert subprocess_mock.Popen.call_args[1]['close_fds']
+ assert subprocess_mock.Popen.call_args[1]['shell']
+ assert len(subprocess_mock.Popen.call_args_list) == 2
+ assert subprocess_mock.Popen.call_args[0][0] == 'rpm --eval "%{_host_cpu}"'
+
+ @patch('salt.utils.path.which', MagicMock(return_value=False))
+ @patch('salt.utils.pkg.rpm.subprocess', MagicMock(return_value=False))
+ @patch('salt.utils.pkg.rpm.platform.uname', MagicMock(
+ return_value=('Sinclair BASIC', 'motophone', '1982 Sinclair Research Ltd', '1.0', 'ZX81', 'Z80')))
+ def test_get_osarch_by_platform(self):
+ '''
+ Get os_arch if RPM package is not installed (inird image, for example).
+ :return:
+ '''
+ assert rpm.get_osarch() == 'Z80'
+
+ @patch('salt.utils.path.which', MagicMock(return_value=False))
+ @patch('salt.utils.pkg.rpm.subprocess', MagicMock(return_value=False))
+ @patch('salt.utils.pkg.rpm.platform.uname', MagicMock(
+ return_value=('Sinclair BASIC', 'motophone', '1982 Sinclair Research Ltd', '1.0', 'ZX81', '')))
+ def test_get_osarch_by_platform_no_cpu_arch(self):
+ '''
+ Get os_arch if RPM package is not installed (inird image, for example) but cpu arch cannot be determined.
+ :return:
+ '''
+ assert rpm.get_osarch() == 'ZX81'
+
+ @patch('salt.utils.path.which', MagicMock(return_value=False))
+ @patch('salt.utils.pkg.rpm.subprocess', MagicMock(return_value=False))
+ @patch('salt.utils.pkg.rpm.platform.uname', MagicMock(
+ return_value=('Sinclair BASIC', 'motophone', '1982 Sinclair Research Ltd', '1.0', '', '')))
+ def test_get_osarch_by_platform_no_cpu_arch_no_machine(self):
+ '''
+ Get os_arch if RPM package is not installed (inird image, for example)
+ where both cpu arch and machine cannot be determined.
+ :return:
+ '''
+ assert rpm.get_osarch() == 'unknown'
--
2.19.1

View File

@ -0,0 +1,65 @@
From 943a258da3ed460f173968b0a92b95f2e63ab669 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Mon, 8 Oct 2018 12:48:24 +0200
Subject: [PATCH] Get os_family for RPM distros from the RPM macros.
(U#49930)
Strip and stringify the return for the osarch
Fix imports
---
salt/grains/core.py | 8 +++++---
salt/utils/pkg/rpm.py | 3 ++-
2 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 6aaf38096d..80eebd1c05 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -49,6 +49,8 @@ import salt.utils.path
import salt.utils.platform
import salt.utils.stringutils
import salt.utils.versions
+import salt.utils.pkg.rpm
+
from salt.ext import six
from salt.ext.six.moves import range
@@ -1776,9 +1778,9 @@ def os_data():
# architecture.
if grains.get('os_family') == 'Debian':
osarch = __salt__['cmd.run']('dpkg --print-architecture').strip()
- elif grains.get('os_family') == 'RedHat':
- osarch = __salt__['cmd.run']('rpm --eval %{_host_cpu}').strip()
- elif grains.get('os_family') == 'NILinuxRT':
+ elif grains.get('os_family') in ['RedHat', 'Suse']:
+ osarch = salt.utils.pkg.rpm.get_osarch()
+ elif grains.get('os_family') in ('NILinuxRT', 'Poky'):
archinfo = {}
for line in __salt__['cmd.run']('opkg print-architecture').splitlines():
if line.startswith('arch'):
diff --git a/salt/utils/pkg/rpm.py b/salt/utils/pkg/rpm.py
index 987edab894..94e231da4b 100644
--- a/salt/utils/pkg/rpm.py
+++ b/salt/utils/pkg/rpm.py
@@ -9,6 +9,7 @@ import collections
import datetime
import logging
import subprocess
+import salt.utils.stringutils
# Import 3rd-party libs
from salt.ext import six
@@ -47,7 +48,7 @@ def get_osarch():
close_fds=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()[0]
- return ret or 'unknown'
+ return salt.utils.stringutils.to_str(ret).strip() or 'unknown'
def check_32(arch, osarch=None):
--
2.19.0

View File

@ -0,0 +1,38 @@
From d0234ed977ca860b3a3a6a587a6972bbaf5ae345 Mon Sep 17 00:00:00 2001
From: Raine Curtis <rcurtis@suse.com>
Date: Mon, 9 Jul 2018 09:55:30 -0600
Subject: [PATCH] Improved handling of LDAP group id
gid is casted to int, which should be the case. Otherwise an error
is returned.
---
salt/states/group.py | 11 +++++++++--
1 file changed, 9 insertions(+), 2 deletions(-)
diff --git a/salt/states/group.py b/salt/states/group.py
index 6a720757e8..acf775134c 100644
--- a/salt/states/group.py
+++ b/salt/states/group.py
@@ -72,9 +72,16 @@ def _changes(name,
delusers = [salt.utils.win_functions.get_sam_name(user).lower() for user in delusers]
change = {}
+ ret = {}
if gid:
- if lgrp['gid'] != gid:
- change['gid'] = gid
+ try:
+ gid = int(gid)
+ if lgrp['gid'] != gid:
+ change['gid'] = gid
+ except (TypeError, ValueError):
+ ret['result'] = False
+ ret['comment'] = 'Invalid gid'
+ return ret
if members:
# -- if new member list if different than the current
--
2.19.1

View File

@ -0,0 +1,63 @@
From 9d9fb3fd787b40d9d27ad7c5eb69fa0cd4f5a304 Mon Sep 17 00:00:00 2001
From: Joachim Gleissner <jgleissner@suse.com>
Date: Tue, 18 Sep 2018 15:07:13 +0200
Subject: [PATCH] loosen azure sdk dependencies in azurearm cloud driver
Remove dependency to azure-cli, which is not used at all.
Use azure-storage-sdk as fallback if multiapi version is not available.
remove unused import from azurearm driver
---
salt/cloud/clouds/azurearm.py | 14 ++++++++------
1 file changed, 8 insertions(+), 6 deletions(-)
diff --git a/salt/cloud/clouds/azurearm.py b/salt/cloud/clouds/azurearm.py
index 8b9a9e8903..50e5ce1f62 100644
--- a/salt/cloud/clouds/azurearm.py
+++ b/salt/cloud/clouds/azurearm.py
@@ -67,6 +67,7 @@ import logging
import pprint
import base64
import collections
+import pkgutil
import salt.cache
import salt.config as config
import salt.utils.cloud
@@ -74,7 +75,6 @@ import salt.utils.data
import salt.utils.files
import salt.utils.stringutils
import salt.utils.yaml
-from salt.utils.versions import LooseVersion
from salt.ext import six
import salt.version
from salt.exceptions import (
@@ -125,9 +125,12 @@ try:
from azure.mgmt.storage import StorageManagementClient
from azure.mgmt.web import WebSiteManagementClient
from msrestazure.azure_exceptions import CloudError
- from azure.multiapi.storage.v2016_05_31 import CloudStorageAccount
- from azure.cli import core
- HAS_LIBS = LooseVersion(core.__version__) >= LooseVersion("2.0.12")
+ if pkgutil.find_loader('azure.multiapi'):
+ # use multiapi version if available
+ from azure.multiapi.storage.v2016_05_31 import CloudStorageAccount
+ else:
+ from azure.storage import CloudStorageAccount
+ HAS_LIBS = True
except ImportError:
pass
# pylint: enable=wrong-import-position,wrong-import-order
@@ -160,8 +163,7 @@ def __virtual__():
False,
'The following dependencies are required to use the AzureARM driver: '
'Microsoft Azure SDK for Python >= 2.0rc5, '
- 'Microsoft Azure Storage SDK for Python >= 0.32, '
- 'Microsoft Azure CLI >= 2.0.12'
+ 'Microsoft Azure Storage SDK for Python >= 0.32'
)
global cache # pylint: disable=global-statement,invalid-name
--
2.17.1

View File

@ -0,0 +1,26 @@
From 350b0aa4ead80ac50047c08121bc09bddc05341d Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 16 Nov 2018 10:54:12 +0100
Subject: [PATCH] Make aptpkg.list_repos compatible on enabled/disabled
output
---
salt/modules/aptpkg.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 175ef2ed06..90b99c44b9 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -1719,6 +1719,7 @@ def list_repos():
repo['file'] = source.file
repo['comps'] = getattr(source, 'comps', [])
repo['disabled'] = source.disabled
+ repo['enabled'] = not repo['disabled'] # This is for compatibility with the other modules
repo['dist'] = source.dist
repo['type'] = source.type
repo['uri'] = source.uri.rstrip('/')
--
2.19.1

View File

@ -0,0 +1,27 @@
From 155aa52dca9272db492990ad737256dada1c4364 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Mon, 8 Oct 2018 17:52:07 +0200
Subject: [PATCH] Make profiles a package.
Add UTF-8 encoding
Add a docstring
---
salt/cli/support/profiles/__init__.py | 4 ++++
1 file changed, 4 insertions(+)
create mode 100644 salt/cli/support/profiles/__init__.py
diff --git a/salt/cli/support/profiles/__init__.py b/salt/cli/support/profiles/__init__.py
new file mode 100644
index 0000000000..b86aef30b8
--- /dev/null
+++ b/salt/cli/support/profiles/__init__.py
@@ -0,0 +1,4 @@
+# coding=utf-8
+'''
+Profiles for salt-support.
+'''
--
2.19.0

View File

@ -0,0 +1,89 @@
From 318b4e0cd2efb02f26392bfe2d354a3ff5d21cbc Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Mon, 15 Oct 2018 17:26:16 +0200
Subject: [PATCH] Preserving signature in "module.run" state (U#50049)
Add unit test for _call_function on signature aligning named arguments
Add unit test for _call_function routine for unnamed positional arguments
Remove redundant docstrings
Add different test function signature with the same outcome
Replace standalone function with lambda-proxy for signatures only
---
salt/states/module.py | 7 +++++--
tests/unit/states/test_module.py | 27 +++++++++++++++++++++++++++
2 files changed, 32 insertions(+), 2 deletions(-)
diff --git a/salt/states/module.py b/salt/states/module.py
index 2190ffa3d2..90b1d0a5f5 100644
--- a/salt/states/module.py
+++ b/salt/states/module.py
@@ -323,7 +323,7 @@ def _call_function(name, returner=None, **kwargs):
# func_args is initialized to a list of positional arguments that the function to be run accepts
func_args = argspec.args[:len(argspec.args or []) - len(argspec.defaults or [])]
- arg_type, na_type, kw_type = [], {}, False
+ arg_type, kw_to_arg_type, na_type, kw_type = [], {}, {}, False
for funcset in reversed(kwargs.get('func_args') or []):
if not isinstance(funcset, dict):
# We are just receiving a list of args to the function to be run, so just append
@@ -334,13 +334,16 @@ def _call_function(name, returner=None, **kwargs):
# We are going to pass in a keyword argument. The trick here is to make certain
# that if we find that in the *args* list that we pass it there and not as a kwarg
if kwarg_key in func_args:
- arg_type.append(funcset[kwarg_key])
+ kw_to_arg_type[kwarg_key] = funcset[kwarg_key]
continue
else:
# Otherwise, we're good and just go ahead and pass the keyword/value pair into
# the kwargs list to be run.
func_kw.update(funcset)
arg_type.reverse()
+ for arg in func_args:
+ if arg in kw_to_arg_type:
+ arg_type.append(kw_to_arg_type[arg])
_exp_prm = len(argspec.args or []) - len(argspec.defaults or [])
_passed_prm = len(arg_type)
missing = []
diff --git a/tests/unit/states/test_module.py b/tests/unit/states/test_module.py
index bf4ddcc5b4..25082d4bb4 100644
--- a/tests/unit/states/test_module.py
+++ b/tests/unit/states/test_module.py
@@ -324,3 +324,30 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
self.assertIn(comment, ret['comment'])
self.assertIn('world', ret['comment'])
self.assertIn('hello', ret['comment'])
+
+ def test_call_function_named_args(self):
+ '''
+ Test _call_function routine when params are named. Their position ordering should not matter.
+
+ :return:
+ '''
+ with patch.dict(module.__salt__,
+ {'testfunc': lambda a, b, c, *args, **kwargs: (a, b, c, args, kwargs)}, clear=True):
+ assert module._call_function('testfunc', func_args=[{'a': 1}, {'b': 2}, {'c': 3}]) == (1, 2, 3, (), {})
+ assert module._call_function('testfunc', func_args=[{'c': 3}, {'a': 1}, {'b': 2}]) == (1, 2, 3, (), {})
+
+ with patch.dict(module.__salt__,
+ {'testfunc': lambda c, a, b, *args, **kwargs: (a, b, c, args, kwargs)}, clear=True):
+ assert module._call_function('testfunc', func_args=[{'a': 1}, {'b': 2}, {'c': 3}]) == (1, 2, 3, (), {})
+ assert module._call_function('testfunc', func_args=[{'c': 3}, {'a': 1}, {'b': 2}]) == (1, 2, 3, (), {})
+
+ def test_call_function_ordered_args(self):
+ '''
+ Test _call_function routine when params are not named. Their position should matter.
+
+ :return:
+ '''
+ with patch.dict(module.__salt__,
+ {'testfunc': lambda a, b, c, *args, **kwargs: (a, b, c, args, kwargs)}, clear=True):
+ assert module._call_function('testfunc', func_args=[1, 2, 3]) == (1, 2, 3, (), {})
+ assert module._call_function('testfunc', func_args=[3, 1, 2]) == (3, 1, 2, (), {})
--
2.19.0

View File

@ -0,0 +1,744 @@
From 6488d91acb6f470bfa2b66ac8100cb67d6367612 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Mon, 19 Nov 2018 11:46:26 +0000
Subject: [PATCH] Remove arch from name when pkg.list_pkgs is called with
'attr' (bsc#1114029)
Add unit tests for pkg_resource.format_pkg_list
Fix pylint issues
Refactor: Return requested attr even if empty
Add corner cases on package names to unit tests
Fix Zypper/Yum unit test after returning empty requested attrs
Add Yum/Zypper list_pkgs unit tests for multiple versions reported
Compare testing items properly to avoid unwanted failures
Use assertCountEqual when running on Python3
Add missing import for the six module
Strip architecture from package name in aptpkg module
Use parse_arch_from_name if available on the virtual pkg module
Adapt unit tests after introducing parse_arch_from_name
Use PKG_ARCH_SEPARATOR in pkg.normalize_name method
Add pkg_resource to setup loader modules. Fix pylint
Remove unnecessary lambda
Return None instead empty string for arch and release in pkg.list_pkgs
---
salt/modules/aptpkg.py | 38 ++++++++
salt/modules/pkg_resource.py | 17 +++-
salt/modules/yumpkg.py | 32 ++++++-
salt/modules/zypper.py | 29 +++++-
tests/unit/modules/test_pkg_resource.py | 116 ++++++++++++++++++++++++
tests/unit/modules/test_yumpkg.py | 85 ++++++++++++++++-
tests/unit/modules/test_zypper.py | 81 ++++++++++++++++-
7 files changed, 382 insertions(+), 16 deletions(-)
diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py
index 42d606926f..1fd4883f2c 100644
--- a/salt/modules/aptpkg.py
+++ b/salt/modules/aptpkg.py
@@ -77,6 +77,7 @@ except ImportError:
# pylint: enable=import-error
APT_LISTS_PATH = "/var/lib/apt/lists"
+PKG_ARCH_SEPARATOR = ':'
# Source format for urllib fallback on PPA handling
LP_SRC_FORMAT = 'deb http://ppa.launchpad.net/{0}/{1}/ubuntu {2} main'
@@ -218,6 +219,43 @@ def _warn_software_properties(repo):
log.warning('Best guess at ppa format: %s', repo)
+def normalize_name(name):
+ '''
+ Strips the architecture from the specified package name, if necessary.
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' pkg.normalize_name zsh:amd64
+ '''
+ try:
+ name, arch = name.rsplit(PKG_ARCH_SEPARATOR, 1)
+ except ValueError:
+ return name
+ return name
+
+
+def parse_arch_from_name(name):
+ '''
+ Parse name and architecture from the specified package name.
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' pkg.parse_arch_from_name zsh:amd64
+ '''
+ try:
+ _name, _arch = name.rsplit(PKG_ARCH_SEPARATOR, 1)
+ except ValueError:
+ _name, _arch = name, None
+ return {
+ 'name': _name,
+ 'arch': _arch
+ }
+
+
def latest_version(*names, **kwargs):
'''
Return the latest version of the named package available for upgrade or
diff --git a/salt/modules/pkg_resource.py b/salt/modules/pkg_resource.py
index 9b0a8287f5..0c872f1805 100644
--- a/salt/modules/pkg_resource.py
+++ b/salt/modules/pkg_resource.py
@@ -311,22 +311,31 @@ def format_pkg_list(packages, versions_as_list, attr):
'''
ret = copy.deepcopy(packages)
if attr:
+ ret_attr = {}
requested_attr = set(['epoch', 'version', 'release', 'arch',
'install_date', 'install_date_time_t'])
if attr != 'all':
- requested_attr &= set(attr + ['version'])
+ requested_attr &= set(attr + ['version'] + ['arch'])
for name in ret:
+ _parse_arch_from_name = __salt__.get('pkg.parse_arch_from_name', lambda pkgname: {'name': pkgname, 'arch': None})
+ name_arch_d = _parse_arch_from_name(name)
+ _name = name_arch_d['name']
+ _arch = name_arch_d['arch']
+
versions = []
+ pkgname = None
for all_attr in ret[name]:
filtered_attr = {}
for key in requested_attr:
- if all_attr[key]:
+ if key in all_attr:
filtered_attr[key] = all_attr[key]
versions.append(filtered_attr)
- ret[name] = versions
- return ret
+ if _name and filtered_attr.get('arch', None) == _arch:
+ pkgname = _name
+ ret_attr.setdefault(pkgname or name, []).extend(versions)
+ return ret_attr
for name in ret:
ret[name] = [format_version(d['epoch'], d['version'], d['release'])
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index 51832bf883..cf50d1a4c4 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -65,6 +65,8 @@ log = logging.getLogger(__name__)
__HOLD_PATTERN = r'[\w+]+(?:[.-][^-]+)*'
+PKG_ARCH_SEPARATOR = '.'
+
# Define the module's virtual name
__virtualname__ = 'pkg'
@@ -397,7 +399,7 @@ def normalize_name(name):
salt '*' pkg.normalize_name zsh.x86_64
'''
try:
- arch = name.rsplit('.', 1)[-1]
+ arch = name.rsplit(PKG_ARCH_SEPARATOR, 1)[-1]
if arch not in salt.utils.pkg.rpm.ARCHES + ('noarch',):
return name
except ValueError:
@@ -408,6 +410,30 @@ def normalize_name(name):
return name
+def parse_arch_from_name(name):
+ '''
+ Parse name and architecture from the specified package name.
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' pkg.parse_arch_from_name zsh.x86_64
+ '''
+ _name, _arch = None, None
+ try:
+ _name, _arch = name.rsplit(PKG_ARCH_SEPARATOR, 1)
+ except ValueError:
+ pass
+ if _arch not in salt.utils.pkg.rpm.ARCHES + ('noarch',):
+ _name = name
+ _arch = None
+ return {
+ 'name': _name,
+ 'arch': _arch
+ }
+
+
def latest_version(*names, **kwargs):
'''
Return the latest version of the named package available for upgrade or
@@ -647,8 +673,8 @@ def list_pkgs(versions_as_list=False, **kwargs):
if pkginfo is not None:
# see rpm version string rules available at https://goo.gl/UGKPNd
pkgver = pkginfo.version
- epoch = ''
- release = ''
+ epoch = None
+ release = None
if ':' in pkgver:
epoch, pkgver = pkgver.split(":", 1)
if '-' in pkgver:
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 773354b2f3..ae66e4709d 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -52,6 +52,7 @@ ZYPP_HOME = '/etc/zypp'
LOCKS = '{0}/locks'.format(ZYPP_HOME)
REPOS = '{0}/repos.d'.format(ZYPP_HOME)
DEFAULT_PRIORITY = 99
+PKG_ARCH_SEPARATOR = '.'
# Define the module's virtual name
__virtualname__ = 'pkg'
@@ -588,6 +589,30 @@ def info_available(*names, **kwargs):
return ret
+def parse_arch_from_name(name):
+ '''
+ Parse name and architecture from the specified package name.
+
+ CLI Example:
+
+ .. code-block:: bash
+
+ salt '*' pkg.parse_arch_from_name zsh.x86_64
+ '''
+ _name, _arch = None, None
+ try:
+ _name, _arch = name.rsplit(PKG_ARCH_SEPARATOR, 1)
+ except ValueError:
+ pass
+ if _arch not in salt.utils.pkg.rpm.ARCHES + ('noarch',):
+ _name = name
+ _arch = None
+ return {
+ 'name': _name,
+ 'arch': _arch
+ }
+
+
def latest_version(*names, **kwargs):
'''
Return the latest version of the named package available for upgrade or
@@ -756,8 +781,8 @@ def list_pkgs(versions_as_list=False, **kwargs):
if pkginfo is not None:
# see rpm version string rules available at https://goo.gl/UGKPNd
pkgver = pkginfo.version
- epoch = ''
- release = ''
+ epoch = None
+ release = None
if ':' in pkgver:
epoch, pkgver = pkgver.split(":", 1)
if '-' in pkgver:
diff --git a/tests/unit/modules/test_pkg_resource.py b/tests/unit/modules/test_pkg_resource.py
index dd3ae9a1ac..2cfd6bb16a 100644
--- a/tests/unit/modules/test_pkg_resource.py
+++ b/tests/unit/modules/test_pkg_resource.py
@@ -129,6 +129,122 @@ class PkgresTestCase(TestCase, LoaderModuleMockMixin):
'''
self.assertIsNone(pkg_resource.sort_pkglist({}))
+ def test_format_pkg_list_no_attr(self):
+ '''
+ Test to output format of the package list with no attr parameter.
+ '''
+ packages = {
+ 'glibc': [{'version': '2.12', 'epoch': None, 'release': '1.212.el6', 'arch': 'x86_64'}],
+ 'glibc.i686': [{'version': '2.12', 'epoch': None, 'release': '1.212.el6', 'arch': 'i686'}],
+ 'foobar': [
+ {'version': '1.2.0', 'epoch': '2', 'release': '7', 'arch': 'x86_64'},
+ {'version': '1.2.3', 'epoch': '2', 'release': '27', 'arch': 'x86_64'},
+ ],
+ 'foobar.something': [{'version': '1.1', 'epoch': '3', 'release': '23.1', 'arch': 'i686'}],
+ 'foobar.': [{'version': '1.1', 'epoch': '3', 'release': '23.1', 'arch': 'i686'}]
+ }
+ expected_pkg_list = {
+ 'glibc': '2.12-1.212.el6',
+ 'glibc.i686': '2.12-1.212.el6',
+ 'foobar': '2:1.2.0-7,2:1.2.3-27',
+ 'foobar.something': '3:1.1-23.1',
+ 'foobar.': '3:1.1-23.1',
+ }
+ if six.PY3:
+ self.assertCountEqual(pkg_resource.format_pkg_list(packages, False, None), expected_pkg_list)
+ else:
+ self.assertItemsEqual(pkg_resource.format_pkg_list(packages, False, None), expected_pkg_list)
+
+ def test_format_pkg_list_with_attr(self):
+ '''
+ Test to output format of the package list with attr parameter.
+ In this case, any redundant "arch" reference will be removed from the package name since it's
+ include as part of the requested attr.
+ '''
+ NAME_ARCH_MAPPING = {
+ 'glibc': {
+ 'name': 'glibc',
+ 'arch': None
+ },
+ 'glibc.i686': {
+ 'name': 'glibc',
+ 'arch': 'i686'
+ },
+ 'foobar': {
+ 'name': 'foobar',
+ 'arch': None
+ },
+ 'foobar.something': {
+ 'name': 'foobar.something',
+ 'arch': None
+ },
+ 'foobar.': {
+ 'name': 'foobar.',
+ 'arch': None
+ }
+ }
+ packages = {
+ 'glibc': [{'version': '2.12', 'epoch': None, 'release': '1.212.el6', 'arch': 'x86_64'}],
+ 'glibc.i686': [{'version': '2.12', 'epoch': None, 'release': '1.212.el6', 'arch': 'i686'}],
+ 'foobar': [
+ {'version': '1.2.0', 'epoch': '2', 'release': '7', 'arch': 'x86_64'},
+ {'version': '1.2.3', 'epoch': '2', 'release': '27', 'arch': 'x86_64'},
+ ],
+ 'foobar.something': [{'version': '1.1', 'epoch': '3', 'release': '23.1', 'arch': 'i686'}],
+ 'foobar.': [{'version': '1.1', 'epoch': '3', 'release': '23.1', 'arch': 'i686'}]
+ }
+ expected_pkg_list = {
+ 'glibc': [
+ {
+ 'arch': 'x86_64',
+ 'release': '1.212.el6',
+ 'epoch': None,
+ 'version': '2.12'
+ },
+ {
+ 'arch': 'i686',
+ 'release': '1.212.el6',
+ 'epoch': None,
+ 'version': '2.12'
+ }
+ ],
+ 'foobar': [
+ {
+ 'arch': 'x86_64',
+ 'release': '7',
+ 'epoch': '2',
+ 'version': '1.2.0'
+ },
+ {
+ 'arch': 'x86_64',
+ 'release': '27',
+ 'epoch': '2',
+ 'version': '1.2.3'
+ }
+ ],
+ 'foobar.': [
+ {
+ 'arch': 'i686',
+ 'release': '23.1',
+ 'epoch': '3',
+ 'version': '1.1'
+ }
+ ],
+ 'foobar.something': [
+ {
+ 'arch': 'i686',
+ 'release': '23.1',
+ 'epoch': '3',
+ 'version': '1.1'
+ }
+ ]
+ }
+ with patch.dict(pkg_resource.__salt__, {'pkg.parse_arch_from_name': NAME_ARCH_MAPPING.get}):
+ if six.PY3:
+ self.assertCountEqual(pkg_resource.format_pkg_list(packages, False, attr=['epoch', 'release']), expected_pkg_list)
+ else:
+ self.assertItemsEqual(pkg_resource.format_pkg_list(packages, False, attr=['epoch', 'release']), expected_pkg_list)
+
def test_stringify(self):
'''
Test to takes a dict of package name/version information
diff --git a/tests/unit/modules/test_yumpkg.py b/tests/unit/modules/test_yumpkg.py
index c73f2582b9..324c2c8b66 100644
--- a/tests/unit/modules/test_yumpkg.py
+++ b/tests/unit/modules/test_yumpkg.py
@@ -16,6 +16,7 @@ from tests.support.mock import (
)
# Import Salt libs
+from salt.ext import six
import salt.modules.yumpkg as yumpkg
import salt.modules.pkg_resource as pkg_resource
@@ -69,7 +70,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
'os_family': 'RedHat',
'osmajorrelease': 7,
},
- }
+ },
+ pkg_resource: {}
}
def test_list_pkgs(self):
@@ -100,7 +102,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
patch.dict(yumpkg.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
patch.dict(yumpkg.__salt__, {'pkg_resource.add_pkg': _add_data}), \
patch.dict(yumpkg.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
- patch.dict(yumpkg.__salt__, {'pkg_resource.stringify': MagicMock()}):
+ patch.dict(yumpkg.__salt__, {'pkg_resource.stringify': MagicMock()}), \
+ patch.dict(pkg_resource.__salt__, {'pkg.parse_arch_from_name': yumpkg.parse_arch_from_name}):
pkgs = yumpkg.list_pkgs(versions_as_list=True)
for pkg_name, pkg_version in {
'python-urlgrabber': '3.10-8.el7',
@@ -147,7 +150,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
patch.dict(yumpkg.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
patch.dict(yumpkg.__salt__, {'pkg_resource.add_pkg': _add_data}), \
patch.dict(yumpkg.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
- patch.dict(yumpkg.__salt__, {'pkg_resource.stringify': MagicMock()}):
+ patch.dict(yumpkg.__salt__, {'pkg_resource.stringify': MagicMock()}), \
+ patch.dict(pkg_resource.__salt__, {'pkg.parse_arch_from_name': yumpkg.parse_arch_from_name}):
pkgs = yumpkg.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
for pkg_name, pkg_attr in {
'python-urlgrabber': {
@@ -155,54 +159,63 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
'release': '8.el7',
'arch': 'noarch',
'install_date_time_t': 1487838471,
+ 'epoch': None
},
'alsa-lib': {
'version': '1.1.1',
'release': '1.el7',
'arch': 'x86_64',
'install_date_time_t': 1487838475,
+ 'epoch': None
},
'gnupg2': {
'version': '2.0.22',
'release': '4.el7',
'arch': 'x86_64',
'install_date_time_t': 1487838477,
+ 'epoch': None
},
'rpm-python': {
'version': '4.11.3',
'release': '21.el7',
'arch': 'x86_64',
'install_date_time_t': 1487838477,
+ 'epoch': None
},
'pygpgme': {
'version': '0.3',
'release': '9.el7',
'arch': 'x86_64',
'install_date_time_t': 1487838478,
+ 'epoch': None
},
'yum': {
'version': '3.4.3',
'release': '150.el7.centos',
'arch': 'noarch',
'install_date_time_t': 1487838479,
+ 'epoch': None
},
'lzo': {
'version': '2.06',
'release': '8.el7',
'arch': 'x86_64',
'install_date_time_t': 1487838479,
+ 'epoch': None
},
'qrencode-libs': {
'version': '3.4.1',
'release': '3.el7',
'arch': 'x86_64',
'install_date_time_t': 1487838480,
+ 'epoch': None
},
'ustr': {
'version': '1.0.4',
'release': '16.el7',
'arch': 'x86_64',
'install_date_time_t': 1487838480,
+ 'epoch': None
},
'shadow-utils': {
'epoch': '2',
@@ -216,22 +229,88 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
'release': '33.el7',
'arch': 'x86_64',
'install_date_time_t': 1487838484,
+ 'epoch': None
},
'openssh': {
'version': '6.6.1p1',
'release': '33.el7_3',
'arch': 'x86_64',
'install_date_time_t': 1487838485,
+ 'epoch': None
},
'virt-what': {
'version': '1.13',
'release': '8.el7',
'install_date_time_t': 1487838486,
'arch': 'x86_64',
+ 'epoch': None
}}.items():
+
self.assertTrue(pkgs.get(pkg_name))
self.assertEqual(pkgs[pkg_name], [pkg_attr])
+ def test_list_pkgs_with_attr_multiple_versions(self):
+ '''
+ Test packages listing with the attr parameter reporting multiple version installed
+
+ :return:
+ '''
+ def _add_data(data, key, value):
+ data.setdefault(key, []).append(value)
+
+ rpm_out = [
+ 'glibc_|-(none)_|-2.12_|-1.212.el6_|-i686_|-(none)_|-1542394210'
+ 'glibc_|-(none)_|-2.12_|-1.212.el6_|-x86_64_|-(none)_|-1542394204',
+ 'virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486',
+ 'virt-what_|-(none)_|-1.10_|-2.el7_|-x86_64_|-(none)_|-1387838486',
+ ]
+ with patch.dict(yumpkg.__grains__, {'osarch': 'x86_64'}), \
+ patch.dict(yumpkg.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
+ patch.dict(yumpkg.__salt__, {'pkg_resource.add_pkg': _add_data}), \
+ patch.dict(yumpkg.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
+ patch.dict(yumpkg.__salt__, {'pkg_resource.stringify': MagicMock()}), \
+ patch.dict(pkg_resource.__salt__, {'pkg.parse_arch_from_name': yumpkg.parse_arch_from_name}):
+ pkgs = yumpkg.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
+ expected_pkg_list = {
+ 'glibc': [
+ {
+ 'version': '2.12',
+ 'release': '1.212.el6',
+ 'install_date_time_t': 1542394210,
+ 'arch': 'i686',
+ 'epoch': None
+ },
+ {
+ 'version': '2.12',
+ 'release': '1.212.el6',
+ 'install_date_time_t': 1542394204,
+ 'arch': 'x86_64',
+ 'epoch': None
+ }
+ ],
+ 'virt-what': [
+ {
+ 'version': '1.10',
+ 'release': '2.el7',
+ 'install_date_time_t': 1387838486,
+ 'arch': 'x86_64',
+ 'epoch': None
+ },
+ {
+ 'version': '1.13',
+ 'release': '8.el7',
+ 'install_date_time_t': 1487838486,
+ 'arch': 'x86_64',
+ 'epoch': None
+ }
+ ]
+ }
+ for pkgname, pkginfo in pkgs.items():
+ if six.PY3:
+ self.assertCountEqual(pkginfo, expected_pkg_list[pkgname])
+ else:
+ self.assertItemsEqual(pkginfo, expected_pkg_list[pkgname])
+
def test_latest_version_with_options(self):
with patch.object(yumpkg, 'list_pkgs', MagicMock(return_value={})):
diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py
index 424438c8bf..a60e209b2c 100644
--- a/tests/unit/modules/test_zypper.py
+++ b/tests/unit/modules/test_zypper.py
@@ -61,7 +61,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
'''
def setup_loader_modules(self):
- return {zypper: {'rpm': None}}
+ return {zypper: {'rpm': None}, pkg_resource: {}}
def setUp(self):
self.new_repo_config = dict(
@@ -603,7 +603,8 @@ Repository 'DUMMY' not found by its alias, number, or URI.
patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \
patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \
patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
- patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}):
+ patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}), \
+ patch.dict(pkg_resource.__salt__, {'pkg.parse_arch_from_name': zypper.parse_arch_from_name}):
pkgs = zypper.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
self.assertFalse(pkgs.get('gpg-pubkey', False))
for pkg_name, pkg_attr in {
@@ -612,58 +613,130 @@ Repository 'DUMMY' not found by its alias, number, or URI.
'release': '129.686',
'arch': 'noarch',
'install_date_time_t': 1498636511,
+ 'epoch': None,
}],
'yast2-ftp-server': [{
'version': '3.1.8',
'release': '8.1',
'arch': 'x86_64',
'install_date_time_t': 1499257798,
+ 'epoch': None,
}],
'protobuf-java': [{
'version': '2.6.1',
'release': '3.1.develHead',
'install_date_time_t': 1499257756,
'arch': 'noarch',
+ 'epoch': None,
}],
'susemanager-build-keys-web': [{
'version': '12.0',
'release': '5.1.develHead',
'arch': 'noarch',
'install_date_time_t': 1498636510,
+ 'epoch': None,
}],
'apache-commons-cli': [{
'version': '1.2',
'release': '1.233',
'arch': 'noarch',
'install_date_time_t': 1498636510,
+ 'epoch': None,
}],
'kernel-default': [{
'version': '4.4.138',
'release': '94.39.1',
'arch': 'x86_64',
- 'install_date_time_t': 1529936067
+ 'install_date_time_t': 1529936067,
+ 'epoch': None,
},
{
'version': '4.4.73',
'release': '5.1',
'arch': 'x86_64',
'install_date_time_t': 1503572639,
+ 'epoch': None,
}],
- 'perseus-dummy.i586': [{
+ 'perseus-dummy': [{
'version': '1.1',
'release': '1.1',
'arch': 'i586',
'install_date_time_t': 1529936062,
+ 'epoch': None,
}],
'jose4j': [{
'arch': 'noarch',
'version': '0.4.4',
'release': '2.1.develHead',
'install_date_time_t': 1499257756,
+ 'epoch': None,
}]}.items():
self.assertTrue(pkgs.get(pkg_name))
self.assertEqual(pkgs[pkg_name], pkg_attr)
+ def test_list_pkgs_with_attr_multiple_versions(self):
+ '''
+ Test packages listing with the attr parameter reporting multiple version installed
+
+ :return:
+ '''
+ def _add_data(data, key, value):
+ data.setdefault(key, []).append(value)
+
+ rpm_out = [
+ 'glibc_|-2.12_|-1.212.el6_|-i686_|-_|-1542394210',
+ 'glibc_|-2.12_|-1.212.el6_|-x86_64_|-_|-1542394204',
+ 'virt-what_|-1.13_|-8.el7_|-x86_64_|-_|-1487838486',
+ 'virt-what_|-1.10_|-2.el7_|-x86_64_|-_|-1387838486',
+ ]
+
+ with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
+ patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \
+ patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \
+ patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
+ patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}), \
+ patch.dict(pkg_resource.__salt__, {'pkg.parse_arch_from_name': zypper.parse_arch_from_name}):
+ pkgs = zypper.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
+ expected_pkg_list = {
+ 'glibc': [
+ {
+ 'version': '2.12',
+ 'release': '1.212.el6',
+ 'install_date_time_t': 1542394210,
+ 'arch': 'i686',
+ 'epoch': None
+ },
+ {
+ 'version': '2.12',
+ 'release': '1.212.el6',
+ 'install_date_time_t': 1542394204,
+ 'arch': 'x86_64',
+ 'epoch': None
+ }
+ ],
+ 'virt-what': [
+ {
+ 'version': '1.10',
+ 'release': '2.el7',
+ 'install_date_time_t': 1387838486,
+ 'arch': 'x86_64',
+ 'epoch': None
+ },
+ {
+ 'version': '1.13',
+ 'release': '8.el7',
+ 'install_date_time_t': 1487838486,
+ 'arch': 'x86_64',
+ 'epoch': None
+ }
+ ]
+ }
+ for pkgname, pkginfo in pkgs.items():
+ if six.PY3:
+ self.assertCountEqual(pkginfo, expected_pkg_list[pkgname])
+ else:
+ self.assertItemsEqual(pkginfo, expected_pkg_list[pkgname])
+
def test_list_patches(self):
'''
Test advisory patches listing.
--
2.17.1

View File

@ -0,0 +1,43 @@
From 0908344fae3edda3372ee03820ea30ebcfe8980e Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 13 Sep 2018 12:00:55 +0200
Subject: [PATCH] Retire MD5 checksum for pkg mgmt plugins
Use SHA256 algorithm for zyppnotify plugin
Remove an empty line
---
scripts/suse/yum/plugins/yumnotify.py | 2 +-
scripts/suse/zypper/plugins/commit/zyppnotify | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/scripts/suse/yum/plugins/yumnotify.py b/scripts/suse/yum/plugins/yumnotify.py
index 268e1e9531..dd2485c886 100644
--- a/scripts/suse/yum/plugins/yumnotify.py
+++ b/scripts/suse/yum/plugins/yumnotify.py
@@ -32,7 +32,7 @@ def _get_checksum():
Returns:
hexdigest
"""
- digest = hashlib.md5()
+ digest = hashlib.sha256()
with open(RPM_PATH, "rb") as rpm_db_fh:
while True:
buff = rpm_db_fh.read(0x1000)
diff --git a/scripts/suse/zypper/plugins/commit/zyppnotify b/scripts/suse/zypper/plugins/commit/zyppnotify
index 268298b108..b64badb119 100755
--- a/scripts/suse/zypper/plugins/commit/zyppnotify
+++ b/scripts/suse/zypper/plugins/commit/zyppnotify
@@ -35,7 +35,7 @@ class DriftDetector(Plugin):
Returns:
hexdigest
'''
- digest = hashlib.md5()
+ digest = hashlib.sha256()
with open(self.rpm_path, "rb") as rpm_db_fh:
while True:
buff = rpm_db_fh.read(0x1000)
--
2.20.1

View File

@ -0,0 +1,31 @@
From 2cbc403b422a699cd948ed6218fce28fa901f5fa Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Thu, 13 Dec 2018 12:17:35 +0100
Subject: [PATCH] Return the expected powerpc os arch (bsc#1117995)
---
salt/utils/pkg/rpm.py | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/salt/utils/pkg/rpm.py b/salt/utils/pkg/rpm.py
index bb8c3fb589..828b0cecda 100644
--- a/salt/utils/pkg/rpm.py
+++ b/salt/utils/pkg/rpm.py
@@ -53,8 +53,11 @@ def get_osarch():
stderr=subprocess.PIPE).communicate()[0]
else:
ret = ''.join(list(filter(None, platform.uname()[-2:]))[-1:])
-
- return salt.utils.stringutils.to_str(ret).strip() or 'unknown'
+ ret = salt.utils.stringutils.to_str(ret).strip() or 'unknown'
+ ARCH_FIXES_MAPPING = {
+ "powerpc64le": "ppc64le"
+ }
+ return ARCH_FIXES_MAPPING.get(ret, ret)
def check_32(arch, osarch=None):
--
2.20.1

View File

@ -1,3 +1,253 @@
-------------------------------------------------------------------
Wed Jan 16 16:28:09 UTC 2019 - psuarezhernandez@suse.com
- Do not restrict the Python version to < 3.7
-------------------------------------------------------------------
Tue Jan 15 09:47:12 UTC 2019 - bo@suse.de
- Fix integration tests in state compiler (U#2068)
- Added:
* fix-issue-2068-test.patch
-------------------------------------------------------------------
Fri Jan 11 13:23:13 UTC 2019 - psuarezhernandez@suse.com
- Fix "pkg.list_pkgs" output when using "attr" to take the arch into account (bsc#1114029)
- Added:
* remove-arch-from-name-when-pkg.list_pkgs-is-called-w.patch
-------------------------------------------------------------------
Thu Jan 10 12:52:09 UTC 2019 - mdinca <mdinca@suse.de>
- Fix powerpc null server_id_arch (bsc#1117995)
- Added:
* return-the-expected-powerpc-os-arch-bsc-1117995.patch
-------------------------------------------------------------------
Thu Jan 10 09:53:33 UTC 2019 - bo@suse.de
- Fix module 'azure.storage' has no attribute '__version__'
(bsc#1121091)
- Added:
* azurefs-gracefully-handle-attributeerror.patch
-------------------------------------------------------------------
Fri Jan 4 13:29:50 UTC 2019 - bo@suse.de
- Add supportconfig module and states for minions and SaltSSH
- Added:
* add-supportconfig-module-for-remote-calls-and-saltss.patch
-------------------------------------------------------------------
Thu Jan 3 16:35:30 UTC 2019 - bo@suse.de
- Fix FIPS enabled RES clients (bsc#1099887)
- Added:
* retire-md5-checksum-for-pkg-mgmt-plugins.patch
-------------------------------------------------------------------
Thu Jan 3 15:48:20 UTC 2019 - bo@suse.de
- Add hold/unhold functions. Fix Debian repo "signed-by".
- Added:
* decide-if-the-source-should-be-actually-skipped.patch
* add-hold-unhold-functions.patch
-------------------------------------------------------------------
Tue Dec 4 16:28:21 UTC 2018 - psuarezhernandez@suse.com
- Fix latin1 encoding problems on file module (bsc#1116837)
- Added:
* fix-latin1-encoding-problems-on-file-module-bsc-1116.patch
-------------------------------------------------------------------
Fri Nov 30 13:14:19 UTC 2018 - bo@suse.de
- Don't error on retcode 0 in libcrypto.OPENSSL_init_crypto
- Added:
* don-t-error-on-retcode-0-in-libcrypto.openssl_init_c.patch
-------------------------------------------------------------------
Tue Nov 20 15:33:39 UTC 2018 - bo@suse.de
- Debian info_installed compatibility (U#50453)
- Added:
* debian-info_installed-compatibility-50453.patch
-------------------------------------------------------------------
Fri Nov 16 14:17:45 UTC 2018 - bo@suse.de
- Add compatibility with other package modules for "list_repos" function
- Bugfix: unable to detect os arch when RPM is not installed (bsc#1114197)
- Added:
* make-aptpkg.list_repos-compatible-on-enabled-disable.patch
* get-os_arch-also-without-rpm-package-installed.patch
-------------------------------------------------------------------
Thu Nov 8 09:32:49 UTC 2018 - psuarezhernandez@suse.com
- Fix git_pillar merging across multiple __env__ repositories (bsc#1112874)
- Added:
* fix-git_pillar-merging-across-multiple-__env__-repos.patch
-------------------------------------------------------------------
Wed Oct 31 14:52:31 UTC 2018 - bo@suse.de
- Fix LDAP authentication issue when a valid token is generated
by the salt-api even when invalid user credentials are passed.
(U#48901)
- Added:
* fixing-issue-when-a-valid-token-is-generated-even-wh.patch
-------------------------------------------------------------------
Tue Oct 30 10:48:23 UTC 2018 - Jochen Breuer <jbreuer@suse.de>
- Improved handling of LDAP group id. gid is no longer treated as a
string, which could have lead to faulty group creations. (bsc#1113784)
- Added:
* improved-handling-of-ldap-group-id.patch
-------------------------------------------------------------------
Thu Oct 25 13:04:42 UTC 2018 - psuarezhernandez@suse.com
- Fix remote command execution and incorrect access control
when using salt-api. (bsc#1113699) (CVE-2018-15751)
- Fix Directory traversal vulnerability when using salt-api.
Allows an attacker to determine what files exist on
a server when querying /run or /events. (bsc#1113698) (CVE-2018-15750)
- Added:
* fixes-cve-2018-15750-cve-2018-15751.patch
-------------------------------------------------------------------
Thu Oct 18 13:17:33 UTC 2018 - bo@suse.de
- Add multi-file support and globbing to the filetree (U#50018)
- Added:
* add-multi-file-support-and-globbing-to-the-filetree-.patch
-------------------------------------------------------------------
Wed Oct 17 15:21:17 UTC 2018 - bo@suse.de
- Bugfix: supportconfig non-root permission issues (U#50095)
- Added:
* support-config-non-root-permission-issues-fixes-u-50.patch
-------------------------------------------------------------------
Wed Oct 17 14:18:09 UTC 2018 - bo@suse.de
- Open profiles permissions to everyone for read-only
-------------------------------------------------------------------
Tue Oct 16 15:26:16 UTC 2018 - bo@suse.de
- Preserving signature in "module.run" state (U#50049)
- Added:
* preserving-signature-in-module.run-state-u-50049.patch
-------------------------------------------------------------------
Fri Oct 12 11:48:40 UTC 2018 - bo@suse.de
- Install default salt-support profiles
-------------------------------------------------------------------
Thu Oct 11 15:04:30 UTC 2018 - bo@suse.de
- Fix unit tests due to merger failure
- Add CPE_NAME for osversion* grain parsing
- Get os_family for RPM distros from the RPM macros
- Install support profiles
- Added:
* get-os_family-for-rpm-distros-from-the-rpm-macros.-u.patch
* add-cpe_name-for-osversion-grain-parsing-u-49946.patch
* make-profiles-a-package.patch
* fix-unit-test-for-grains-core.patch
-------------------------------------------------------------------
Tue Oct 9 14:50:25 UTC 2018 - psuarezhernandez@suse.com
- Bugfix: any unicode string of length 16 will raise TypeError
- Added:
* bugfix-any-unicode-string-of-length-16-will-raise-ty.patch
-------------------------------------------------------------------
Mon Oct 8 08:52:23 UTC 2018 - psuarezhernandez@suse.com
- Fix async call to process manager (bsc#1110938)
- Early feature: Salt support-config (salt-support)
- Added:
* fix-async-call-to-process-manager.patch
* early-feature-support-config.patch
-------------------------------------------------------------------
Mon Oct 1 16:03:27 UTC 2018 - bo@suse.de
- Fix IPv6 scope (bsc#1108557)
- Added:
* fix-ipv6-scope-bsc-1108557.patch
-------------------------------------------------------------------
Fri Sep 28 12:37:02 UTC 2018 - bo@suse.de
- Handle zypper ZYPPER_EXIT_NO_REPOS exit code (bsc#1108834, bsc#1109893)
- Added:
* update-error-list-for-zypper.patch
-------------------------------------------------------------------
Mon Sep 24 15:49:47 UTC 2018 - bo@suse.de
- Bugfix for pkg_resources crash (bsc#1104491)
- Added:
* do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch
-------------------------------------------------------------------
Fri Sep 21 15:39:49 UTC 2018 - psuarezhernandez@suse.com
- Fix loosen azure sdk dependencies in azurearm cloud driver (bsc#1107333)
- Added:
* loosen-azure-sdk-dependencies-in-azurearm-cloud-driv.patch
-------------------------------------------------------------------
Thu Sep 20 11:25:57 UTC 2018 - psuarezhernandez@suse.com
- Fix broken "resolve_capabilities" on Python 3 (bsc#1108995)
- Added:
* fix-index-error-when-running-on-python-3.patch
-------------------------------------------------------------------
Wed Sep 19 13:06:21 UTC 2018 - psuarezhernandez@suse.com
- Allow empty service_account_private_key in GCE driver (bsc#1108969)
- Added:
* support-use-of-gce-instance-credentials-109.patch
-------------------------------------------------------------------
Tue Sep 18 14:28:13 UTC 2018 - mihai.dinca@suse.com

128
salt.spec
View File

@ -15,7 +15,6 @@
# Please submit bugfixes or comments via http://bugs.opensuse.org/
#
%if 0%{?suse_version} >= 1320
# SLE15
%global build_py3 1
@ -150,6 +149,76 @@ Patch41: change-stringio-import-in-python2-to-import-the-clas.patch
Patch42: use-adler32-algorithm-to-compute-string-checksums.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49497
Patch43: x509-fixes-111.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49605
Patch44: support-use-of-gce-instance-credentials-109.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49722
Patch45: fix-index-error-when-running-on-python-3.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49696
Patch46: loosen-azure-sdk-dependencies-in-azurearm-cloud-driv.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49737
Patch47: do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49806
Patch48: update-error-list-for-zypper.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49815
Patch49: fix-ipv6-scope-bsc-1108557.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49480
Patch50: early-feature-support-config.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49085
Patch51: fix-async-call-to-process-manager.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49908
Patch52: bugfix-any-unicode-string-of-length-16-will-raise-ty.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49936
Patch53: make-profiles-a-package.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49930
Patch54: get-os_family-for-rpm-distros-from-the-rpm-macros.-u.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49946
Patch55: add-cpe_name-for-osversion-grain-parsing-u-49946.patch
# PATCH-FIX_OPENSUSE: Fix unit test for grains core
Patch56: fix-unit-test-for-grains-core.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50049
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50072
Patch57: preserving-signature-in-module.run-state-u-50049.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50095
Patch58: support-config-non-root-permission-issues-fixes-u-50.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50018
Patch59: add-multi-file-support-and-globbing-to-the-filetree-.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49761
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50201
Patch60: fixes-cve-2018-15750-cve-2018-15751.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48491
Patch61: improved-handling-of-ldap-group-id.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48901
Patch62: fixing-issue-when-a-valid-token-is-generated-even-wh.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50417
Patch63: fix-git_pillar-merging-across-multiple-__env__-repos.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50523
Patch64: get-os_arch-also-without-rpm-package-installed.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50392
Patch65: make-aptpkg.list_repos-compatible-on-enabled-disable.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50453
Patch66: debian-info_installed-compatibility-50453.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48580
Patch67: don-t-error-on-retcode-0-in-libcrypto.openssl_init_c.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48503
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/48934
Patch68: fix-latin1-encoding-problems-on-file-module-bsc-1116.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50742
Patch69: decide-if-the-source-should-be-actually-skipped.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50773
Patch70: add-hold-unhold-functions.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/49639
Patch71: retire-md5-checksum-for-pkg-mgmt-plugins.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50401
# NOTE: This is a techpreview as well as in Fluorine! Release only in Neon.
Patch72: add-supportconfig-module-for-remote-calls-and-saltss.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/50567
Patch73: azurefs-gracefully-handle-attributeerror.patch
# PATCH_FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/116
Patch74: return-the-expected-powerpc-os-arch-bsc-1117995.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51108
Patch75: remove-arch-from-name-when-pkg.list_pkgs-is-called-w.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/51119
Patch76: fix-issue-2068-test.patch
# BuildRoot: %{_tmppath}/%{name}-%{version}-build
BuildRoot: %{_tmppath}/%{name}-%{version}-build
@ -235,12 +304,12 @@ BuildRequires: python-devel >= 2.7
# requirements/base.txt
%if 0%{?rhel}
BuildRequires: python-jinja2
BuildRequires: python-markupsafe
BuildRequires: python-yaml
BuildRequires: python-markupsafe
%else
BuildRequires: python-Jinja2
BuildRequires: python-MarkupSafe
BuildRequires: python-PyYAML
BuildRequires: python-MarkupSafe
%endif
BuildRequires: python-futures >= 2.0
@ -273,16 +342,16 @@ Requires: python-certifi
# requirements/base.txt
%if 0%{?rhel}
Requires: python-jinja2
Requires: python-markupsafe
Requires: python-yaml
Requires: python-markupsafe
Requires: yum
%if 0%{?rhel} == 6
Requires: yum-plugin-security
%endif
%else
Requires: python-Jinja2
Requires: python-MarkupSafe
Requires: python-PyYAML
Requires: python-MarkupSafe
%endif
Requires: python-futures >= 2.0
@ -322,7 +391,7 @@ Summary: python3 library for salt
Group: System/Management
Requires: %{name} = %{version}-%{release}
BuildRequires: python-rpm-macros
BuildRequires: python3 < 3.7
BuildRequires: python3
BuildRequires: python3-devel
# requirements/base.txt
%if 0%{?rhel}
@ -353,7 +422,7 @@ BuildRequires: python3-xml
%if %{with builddocs}
BuildRequires: python3-sphinx
%endif
Requires: python3 < 3.7
Requires: python3
#
%if ! 0%{?suse_version} > 1110
Requires: python3-certifi
@ -651,6 +720,39 @@ cp %{S:5} ./.travis.yml
%patch41 -p1
%patch42 -p1
%patch43 -p1
%patch44 -p1
%patch45 -p1
%patch46 -p1
%patch47 -p1
%patch48 -p1
%patch49 -p1
%patch50 -p1
%patch51 -p1
%patch52 -p1
%patch53 -p1
%patch54 -p1
%patch55 -p1
%patch56 -p1
%patch57 -p1
%patch58 -p1
%patch59 -p1
%patch60 -p1
%patch61 -p1
%patch62 -p1
%patch63 -p1
%patch64 -p1
%patch65 -p1
%patch66 -p1
%patch67 -p1
%patch68 -p1
%patch69 -p1
%patch70 -p1
%patch71 -p1
%patch72 -p1
%patch73 -p1
%patch74 -p1
%patch75 -p1
%patch76 -p1
%build
%if 0%{?build_py2}
@ -739,6 +841,15 @@ install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/master/minions_pre
install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/master/minions_rejected
install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/minion
# Install salt-support profiles
%if 0%{?build_py2}
install -Dpm 0644 salt/cli/support/profiles/* %{buildroot}%{python_sitelib}/salt/cli/support/profiles
%endif
%if 0%{?build_py3}
install -Dpm 0644 salt/cli/support/profiles/* %{buildroot}%{python3_sitelib}/salt/cli/support/profiles
%endif
## Install Zypper plugins only on SUSE machines
%if 0%{?suse_version}
install -Dd -m 0750 %{buildroot}%{_prefix}/lib/zypp/plugins/commit
@ -1287,6 +1398,7 @@ rm -f %{_localstatedir}/cache/salt/minion/thin/version
%defattr(-,root,root,-)
%{_bindir}/spm
%{_bindir}/salt-call
%{_bindir}/salt-support
%{_bindir}/salt-unity
%{_mandir}/man1/salt-unity.1.gz
%{_mandir}/man1/salt-call.1.gz
@ -1353,3 +1465,5 @@ rm -f %{_localstatedir}/cache/salt/minion/thin/version
%endif
%changelog

View File

@ -0,0 +1,106 @@
From 1113909fe9ab0509ebe439051238d6a4f95d3c54 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 17 Oct 2018 14:10:47 +0200
Subject: [PATCH] Support-config non-root permission issues fixes
(U#50095)
Do not crash if there is no configuration available at all
Handle CLI and log errors
Catch overwriting exiting archive error by other users
Suppress excessive tracebacks on error log level
---
salt/cli/support/collector.py | 39 ++++++++++++++++++++++++++++++++---
salt/utils/parsers.py | 2 +-
2 files changed, 37 insertions(+), 4 deletions(-)
diff --git a/salt/cli/support/collector.py b/salt/cli/support/collector.py
index 478d07e13b..a4343297b6 100644
--- a/salt/cli/support/collector.py
+++ b/salt/cli/support/collector.py
@@ -125,6 +125,31 @@ class SupportDataCollector(object):
self.__current_section = []
self.__current_section_name = name
+ def _printout(self, data, output):
+ '''
+ Use salt outputter to printout content.
+
+ :return:
+ '''
+ opts = {'extension_modules': '', 'color': False}
+ try:
+ printout = salt.output.get_printout(output, opts)(data)
+ if printout is not None:
+ return printout.rstrip()
+ except (KeyError, AttributeError, TypeError) as err:
+ log.debug(err, exc_info=True)
+ try:
+ printout = salt.output.get_printout('nested', opts)(data)
+ if printout is not None:
+ return printout.rstrip()
+ except (KeyError, AttributeError, TypeError) as err:
+ log.debug(err, exc_info=True)
+ printout = salt.output.get_printout('raw', opts)(data)
+ if printout is not None:
+ return printout.rstrip()
+
+ return salt.output.try_printout(data, output, opts)
+
def write(self, title, data, output=None):
'''
Add a data to the current opened section.
@@ -138,7 +163,7 @@ class SupportDataCollector(object):
try:
if isinstance(data, dict) and 'return' in data:
data = data['return']
- content = salt.output.try_printout(data, output, {'extension_modules': '', 'color': False})
+ content = self._printout(data, output)
except Exception: # Fall-back to just raw YAML
content = None
else:
@@ -406,7 +431,11 @@ class SaltSupport(salt.utils.parsers.SaltSupportOptionParser):
and self.config.get('support_archive')
and os.path.exists(self.config['support_archive'])):
self.out.warning('Terminated earlier, cleaning up')
- os.unlink(self.config['support_archive'])
+ try:
+ os.unlink(self.config['support_archive'])
+ except Exception as err:
+ log.debug(err)
+ self.out.error('{} while cleaning up.'.format(err))
def _check_existing_archive(self):
'''
@@ -418,7 +447,11 @@ class SaltSupport(salt.utils.parsers.SaltSupportOptionParser):
if os.path.exists(self.config['support_archive']):
if self.config['support_archive_force_overwrite']:
self.out.warning('Overwriting existing archive: {}'.format(self.config['support_archive']))
- os.unlink(self.config['support_archive'])
+ try:
+ os.unlink(self.config['support_archive'])
+ except Exception as err:
+ log.debug(err)
+ self.out.error('{} while trying to overwrite existing archive.'.format(err))
ret = True
else:
self.out.warning('File {} already exists.'.format(self.config['support_archive']))
diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py
index 56a8961c3a..058346a9f4 100644
--- a/salt/utils/parsers.py
+++ b/salt/utils/parsers.py
@@ -1922,7 +1922,7 @@ class SaltSupportOptionParser(six.with_metaclass(OptionParserMeta, OptionParser,
'''
_opts, _args = optparse.OptionParser.parse_args(self)
configs = self.find_existing_configs(_opts.support_unit)
- if cfg not in configs:
+ if configs and cfg not in configs:
cfg = configs[0]
return config.master_config(self.get_config_file_path(cfg))
--
2.19.0

View File

@ -0,0 +1,33 @@
From 4571116a54ff51683cb695ce795f04f8b318b440 Mon Sep 17 00:00:00 2001
From: jgleissner <jgleissner@suse.com>
Date: Wed, 19 Sep 2018 14:37:12 +0200
Subject: [PATCH] Support use of GCE instance credentials (#109)
* Integration of MSI authentication with azurearm cloud driver (#105)
* allow empty service_account_private_key in GCE driver
Passing an emoty service_account_private_key to libcloud will enable
authentication using instance credentials, which is used by CaaSP in GCE.
---
salt/cloud/clouds/gce.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/salt/cloud/clouds/gce.py b/salt/cloud/clouds/gce.py
index 75109491be..1018e36ed5 100644
--- a/salt/cloud/clouds/gce.py
+++ b/salt/cloud/clouds/gce.py
@@ -134,7 +134,8 @@ def __virtual__():
parameters = details['gce']
pathname = os.path.expanduser(parameters['service_account_private_key'])
- if salt.utils.cloud.check_key_path_and_mode(
+ # empty pathname will tell libcloud to use instance credentials
+ if pathname and salt.utils.cloud.check_key_path_and_mode(
provider, pathname
) is False:
return False
--
2.17.1

View File

@ -0,0 +1,62 @@
From 71e7ecfbb07cf14680a2a39de48a6e60cd20cb07 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 26 Sep 2018 17:54:53 +0200
Subject: [PATCH] Update error list for zypper
Add error logging
---
salt/modules/zypper.py | 30 ++++++++++++++++++++++++++++--
1 file changed, 28 insertions(+), 2 deletions(-)
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index e4423cf1fc..6845e44ab6 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -75,7 +75,25 @@ class _Zypper(object):
Allows serial zypper calls (first came, first won).
'''
- SUCCESS_EXIT_CODES = [0, 100, 101, 102, 103]
+ SUCCESS_EXIT_CODES = {
+ 0: 'Successful run of zypper with no special info.',
+ 100: 'Patches are available for installation.',
+ 101: 'Security patches are available for installation.',
+ 102: 'Installation successful, reboot required.',
+ 103: 'Installation succesful, restart of the package manager itself required.',
+ }
+
+ WARNING_EXIT_CODES = {
+ 6: 'No repositories are defined.',
+ 7: 'The ZYPP library is locked.',
+ 106: 'Some repository had to be disabled temporarily because it failed to refresh. '
+ 'You should check your repository configuration (e.g. zypper ref -f).',
+ 107: 'Installation basically succeeded, but some of the packages %post install scripts returned an error. '
+ 'These packages were successfully unpacked to disk and are registered in the rpm database, '
+ 'but due to the failed install script they may not work as expected. The failed scripts output might '
+ 'reveal what actually went wrong. Any scripts output is also logged to /var/log/zypp/history.'
+ }
+
LOCK_EXIT_CODE = 7
XML_DIRECTIVES = ['-x', '--xmlout']
ZYPPER_LOCK = '/var/run/zypp.pid'
@@ -188,7 +206,15 @@ class _Zypper(object):
:return:
'''
- return self.exit_code not in self.SUCCESS_EXIT_CODES
+ if self.exit_code:
+ msg = self.SUCCESS_EXIT_CODES.get(self.exit_code)
+ if msg:
+ log.info(msg)
+ msg = self.WARNING_EXIT_CODES.get(self.exit_code)
+ if msg:
+ log.warning(msg)
+
+ return self.exit_code not in self.SUCCESS_EXIT_CODES and self.exit_code not in self.WARNING_EXIT_CODES
def _is_lock(self):
'''
--
2.19.0