Accepting request 581637 from systemsmanagement:saltstack
OBS-URL: https://build.opensuse.org/request/show/581637 OBS-URL: https://build.opensuse.org/package/show/openSUSE:Factory/salt?expand=0&rev=72
This commit is contained in:
commit
d136a9c449
3
2018.3.0rc1.tar.gz
Normal file
3
2018.3.0rc1.tar.gz
Normal file
@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7b81d27aa8c93aee69dc27990a76aee60dfaadda118b8303e717c5168b041b42
|
||||
size 13576729
|
18
_service
Normal file
18
_service
Normal file
@ -0,0 +1,18 @@
|
||||
<services>
|
||||
<service name="tar_scm" mode="localonly">
|
||||
<param name="url">https://github.com/openSUSE/salt-packaging.git</param>
|
||||
<param name="subdir">salt</param>
|
||||
<param name="filename">package</param>
|
||||
<param name="revision">oxygen-rc1</param>
|
||||
<param name="scm">git</param>
|
||||
</service>
|
||||
<service name="extract_file" mode="localonly">
|
||||
<param name="archive">*package*.tar</param>
|
||||
<param name="files">*/*</param>
|
||||
</service>
|
||||
<service name="download_url" mode="localonly">
|
||||
<param name="host">codeload.github.com</param>
|
||||
<param name="path">saltstack/salt/tar.gz/2018.3.0rc1</param>
|
||||
<param name="filename">2018.3.0rc1.tar.gz</param>
|
||||
</service>
|
||||
</services>
|
4
_servicedata
Normal file
4
_servicedata
Normal file
@ -0,0 +1,4 @@
|
||||
<servicedata>
|
||||
<service name="tar_scm">
|
||||
<param name="url">https://github.com/openSUSE/salt-packaging.git</param>
|
||||
<param name="changesrevision">b8859cfa3f978b24c1dba6aefa5c37547e09a1b0</param></service></servicedata>
|
@ -1,4 +1,4 @@
|
||||
From 2a88378d88c2f56b152ef048214728995476244a Mon Sep 17 00:00:00 2001
|
||||
From 9ba410b90abccad0359ceb96dbc17a357d141e94 Mon Sep 17 00:00:00 2001
|
||||
From: Bo Maryniuk <bo@suse.de>
|
||||
Date: Tue, 17 Oct 2017 16:52:33 +0200
|
||||
Subject: [PATCH] Activate all beacons sources: config/pillar/grains
|
||||
@ -8,10 +8,10 @@ Subject: [PATCH] Activate all beacons sources: config/pillar/grains
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/salt/minion.py b/salt/minion.py
|
||||
index 33cbb8fa0a..10b608cc7a 100644
|
||||
index df69d3c7bd..4a30e70be5 100644
|
||||
--- a/salt/minion.py
|
||||
+++ b/salt/minion.py
|
||||
@@ -404,7 +404,7 @@ class MinionBase(object):
|
||||
@@ -439,7 +439,7 @@ class MinionBase(object):
|
||||
the pillar or grains changed
|
||||
'''
|
||||
if 'config.merge' in functions:
|
||||
@ -21,6 +21,6 @@ index 33cbb8fa0a..10b608cc7a 100644
|
||||
return self.beacons.process(b_conf, self.opts['grains']) # pylint: disable=no-member
|
||||
return []
|
||||
--
|
||||
2.13.6
|
||||
2.16.1
|
||||
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
From 35984b8055beccc94b3bd92b637c5435db822cc9 Mon Sep 17 00:00:00 2001
|
||||
From d1150dafa305a15813f7d08756d086814fa1a61f Mon Sep 17 00:00:00 2001
|
||||
From: Hubert Mantel <mantel@suse.de>
|
||||
Date: Mon, 27 Nov 2017 13:55:13 +0100
|
||||
Subject: [PATCH] avoid excessive syslogging by watchdog cronjob (#58)
|
||||
@ -21,6 +21,6 @@ index 2e418094ed..73a91ebd62 100755
|
||||
/usr/bin/salt-daemon-watcher --with-init & disown
|
||||
fi
|
||||
--
|
||||
2.13.6
|
||||
2.16.1
|
||||
|
||||
|
||||
|
@ -1,69 +0,0 @@
|
||||
From 49a4e807fb1cb844cec7b7c11b37f6c276f899e4 Mon Sep 17 00:00:00 2001
|
||||
From: Bo Maryniuk <bo@suse.de>
|
||||
Date: Mon, 9 Oct 2017 17:57:48 +0200
|
||||
Subject: [PATCH] Bugfix: always return a string "list" on unknown job
|
||||
target type.
|
||||
|
||||
---
|
||||
salt/returners/couchbase_return.py | 2 +-
|
||||
salt/returners/postgres_local_cache.py | 2 +-
|
||||
salt/runners/jobs.py | 2 +-
|
||||
salt/utils/jid.py | 2 +-
|
||||
4 files changed, 4 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/salt/returners/couchbase_return.py b/salt/returners/couchbase_return.py
|
||||
index 24c3a9105a..f5adecc2e7 100644
|
||||
--- a/salt/returners/couchbase_return.py
|
||||
+++ b/salt/returners/couchbase_return.py
|
||||
@@ -309,7 +309,7 @@ def _format_job_instance(job):
|
||||
'Arguments': list(job.get('arg', [])),
|
||||
# unlikely but safeguard from invalid returns
|
||||
'Target': job.get('tgt', 'unknown-target'),
|
||||
- 'Target-type': job.get('tgt_type', []),
|
||||
+ 'Target-type': job.get('tgt_type', 'list'),
|
||||
'User': job.get('user', 'root')}
|
||||
|
||||
if 'metadata' in job:
|
||||
diff --git a/salt/returners/postgres_local_cache.py b/salt/returners/postgres_local_cache.py
|
||||
index 422f8c77c7..28dc2f565c 100644
|
||||
--- a/salt/returners/postgres_local_cache.py
|
||||
+++ b/salt/returners/postgres_local_cache.py
|
||||
@@ -180,7 +180,7 @@ def _format_job_instance(job):
|
||||
'Arguments': json.loads(job.get('arg', '[]')),
|
||||
# unlikely but safeguard from invalid returns
|
||||
'Target': job.get('tgt', 'unknown-target'),
|
||||
- 'Target-type': job.get('tgt_type', []),
|
||||
+ 'Target-type': job.get('tgt_type', 'list'),
|
||||
'User': job.get('user', 'root')}
|
||||
# TODO: Add Metadata support when it is merged from develop
|
||||
return ret
|
||||
diff --git a/salt/runners/jobs.py b/salt/runners/jobs.py
|
||||
index 82abd56eae..fae7942e38 100644
|
||||
--- a/salt/runners/jobs.py
|
||||
+++ b/salt/runners/jobs.py
|
||||
@@ -542,7 +542,7 @@ def _format_job_instance(job):
|
||||
'Arguments': list(job.get('arg', [])),
|
||||
# unlikely but safeguard from invalid returns
|
||||
'Target': job.get('tgt', 'unknown-target'),
|
||||
- 'Target-type': job.get('tgt_type', []),
|
||||
+ 'Target-type': job.get('tgt_type', 'list'),
|
||||
'User': job.get('user', 'root')}
|
||||
|
||||
if 'metadata' in job:
|
||||
diff --git a/salt/utils/jid.py b/salt/utils/jid.py
|
||||
index 3f4ef296a2..4dbf0d2c6f 100644
|
||||
--- a/salt/utils/jid.py
|
||||
+++ b/salt/utils/jid.py
|
||||
@@ -65,7 +65,7 @@ def format_job_instance(job):
|
||||
'Arguments': list(job.get('arg', [])),
|
||||
# unlikely but safeguard from invalid returns
|
||||
'Target': job.get('tgt', 'unknown-target'),
|
||||
- 'Target-type': job.get('tgt_type', []),
|
||||
+ 'Target-type': job.get('tgt_type', 'list'),
|
||||
'User': job.get('user', 'root')}
|
||||
|
||||
if 'metadata' in job:
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,316 +0,0 @@
|
||||
From 2388f136acc805f1ec605206d38db650a877be1c Mon Sep 17 00:00:00 2001
|
||||
From: Bo Maryniuk <bo@suse.de>
|
||||
Date: Tue, 21 Nov 2017 12:53:11 +0100
|
||||
Subject: [PATCH] Bugfix the logic according to the exact described
|
||||
purpose of the function.
|
||||
|
||||
Rename function from ambiguous name
|
||||
|
||||
Fix and clarify docstring.
|
||||
|
||||
Remove unused variable (no exception, within the try/finally block)
|
||||
|
||||
Bugfix: do not pull '_errors' from unchecked objects
|
||||
|
||||
Bugfix: unit test mistakenly expects pillar errors as a string, while it is a list
|
||||
|
||||
Fix unit test: wrong error types in side effect
|
||||
|
||||
Add unit test for _get_pillar_errors when external and internal pillars are clean
|
||||
|
||||
Add unit test for _get_pillar_errors when external pillar has errors and internal is clean
|
||||
|
||||
Add unit test for _get_pillar_errors when both external and internal pillars contains errors
|
||||
|
||||
Add unit test for _get_pillar_errors when external pillar is clean and internal contains errors
|
||||
|
||||
Use variable, instead of direct value
|
||||
---
|
||||
salt/modules/state.py | 82 +++++++++++++++++++---------------------
|
||||
tests/unit/modules/test_state.py | 75 +++++++++++++++++++++++++++++++-----
|
||||
2 files changed, 103 insertions(+), 54 deletions(-)
|
||||
|
||||
diff --git a/salt/modules/state.py b/salt/modules/state.py
|
||||
index fa5b997ef7..31ffc25dfe 100644
|
||||
--- a/salt/modules/state.py
|
||||
+++ b/salt/modules/state.py
|
||||
@@ -99,17 +99,16 @@ def _set_retcode(ret, highstate=None):
|
||||
__context__['retcode'] = 2
|
||||
|
||||
|
||||
-def _check_pillar(kwargs, pillar=None):
|
||||
+def _get_pillar_errors(kwargs, pillar=None):
|
||||
'''
|
||||
- Check the pillar for errors, refuse to run the state if there are errors
|
||||
- in the pillar and return the pillar errors
|
||||
+ Checks all pillars (external and internal) for errors.
|
||||
+ Return an error message, if anywhere or None.
|
||||
+
|
||||
+ :param kwargs: dictionary of options
|
||||
+ :param pillar: external pillar
|
||||
+ :return: None or an error message
|
||||
'''
|
||||
- if kwargs.get('force'):
|
||||
- return True
|
||||
- pillar_dict = pillar if pillar is not None else __pillar__
|
||||
- if '_errors' in pillar_dict:
|
||||
- return False
|
||||
- return True
|
||||
+ return None if kwargs.get('force') else (pillar or {}).get('_errors', __pillar__.get('_errors')) or None
|
||||
|
||||
|
||||
def _wait(jid):
|
||||
@@ -411,10 +410,10 @@ def template(tem, queue=False, **kwargs):
|
||||
context=__context__,
|
||||
initial_pillar=_get_initial_pillar(opts))
|
||||
|
||||
- if not _check_pillar(kwargs, st_.opts['pillar']):
|
||||
+ errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
|
||||
+ if errors:
|
||||
__context__['retcode'] = 5
|
||||
- raise CommandExecutionError('Pillar failed to render',
|
||||
- info=st_.opts['pillar']['_errors'])
|
||||
+ raise CommandExecutionError('Pillar failed to render', info=errors)
|
||||
|
||||
if not tem.endswith('.sls'):
|
||||
tem = '{sls}.sls'.format(sls=tem)
|
||||
@@ -872,11 +871,10 @@ def highstate(test=None, queue=False, **kwargs):
|
||||
mocked=kwargs.get('mock', False),
|
||||
initial_pillar=_get_initial_pillar(opts))
|
||||
|
||||
- if not _check_pillar(kwargs, st_.opts['pillar']):
|
||||
+ errors = _get_pillar_errors(kwargs, st_.opts['pillar'])
|
||||
+ if errors:
|
||||
__context__['retcode'] = 5
|
||||
- err = ['Pillar failed to render with the following messages:']
|
||||
- err += __pillar__['_errors']
|
||||
- return err
|
||||
+ return ['Pillar failed to render with the following messages:'] + errors
|
||||
|
||||
st_.push_active()
|
||||
ret = {}
|
||||
@@ -1071,11 +1069,10 @@ def sls(mods, test=None, exclude=None, queue=False, **kwargs):
|
||||
mocked=kwargs.get('mock', False),
|
||||
initial_pillar=_get_initial_pillar(opts))
|
||||
|
||||
- if not _check_pillar(kwargs, st_.opts['pillar']):
|
||||
+ errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
|
||||
+ if errors:
|
||||
__context__['retcode'] = 5
|
||||
- err = ['Pillar failed to render with the following messages:']
|
||||
- err += __pillar__['_errors']
|
||||
- return err
|
||||
+ return ['Pillar failed to render with the following messages:'] + errors
|
||||
|
||||
orchestration_jid = kwargs.get('orchestration_jid')
|
||||
umask = os.umask(0o77)
|
||||
@@ -1090,7 +1087,6 @@ def sls(mods, test=None, exclude=None, queue=False, **kwargs):
|
||||
mods = mods.split(',')
|
||||
|
||||
st_.push_active()
|
||||
- ret = {}
|
||||
try:
|
||||
high_, errors = st_.render_highstate({opts['environment']: mods})
|
||||
|
||||
@@ -1197,11 +1193,10 @@ def top(topfn, test=None, queue=False, **kwargs):
|
||||
pillar_enc=pillar_enc,
|
||||
context=__context__,
|
||||
initial_pillar=_get_initial_pillar(opts))
|
||||
- if not _check_pillar(kwargs, st_.opts['pillar']):
|
||||
+ errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
|
||||
+ if errors:
|
||||
__context__['retcode'] = 5
|
||||
- err = ['Pillar failed to render with the following messages:']
|
||||
- err += __pillar__['_errors']
|
||||
- return err
|
||||
+ return ['Pillar failed to render with the following messages:'] + errors
|
||||
|
||||
st_.push_active()
|
||||
st_.opts['state_top'] = salt.utils.url.create(topfn)
|
||||
@@ -1259,10 +1254,10 @@ def show_highstate(queue=False, **kwargs):
|
||||
pillar_enc=pillar_enc,
|
||||
initial_pillar=_get_initial_pillar(opts))
|
||||
|
||||
- if not _check_pillar(kwargs, st_.opts['pillar']):
|
||||
+ errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
|
||||
+ if errors:
|
||||
__context__['retcode'] = 5
|
||||
- raise CommandExecutionError('Pillar failed to render',
|
||||
- info=st_.opts['pillar']['_errors'])
|
||||
+ raise CommandExecutionError('Pillar failed to render', info=errors)
|
||||
|
||||
st_.push_active()
|
||||
try:
|
||||
@@ -1293,10 +1288,10 @@ def show_lowstate(queue=False, **kwargs):
|
||||
st_ = salt.state.HighState(opts,
|
||||
initial_pillar=_get_initial_pillar(opts))
|
||||
|
||||
- if not _check_pillar(kwargs, st_.opts['pillar']):
|
||||
+ errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
|
||||
+ if errors:
|
||||
__context__['retcode'] = 5
|
||||
- raise CommandExecutionError('Pillar failed to render',
|
||||
- info=st_.opts['pillar']['_errors'])
|
||||
+ raise CommandExecutionError('Pillar failed to render', info=errors)
|
||||
|
||||
st_.push_active()
|
||||
try:
|
||||
@@ -1394,11 +1389,10 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs):
|
||||
st_ = salt.state.HighState(opts,
|
||||
initial_pillar=_get_initial_pillar(opts))
|
||||
|
||||
- if not _check_pillar(kwargs, st_.opts['pillar']):
|
||||
+ errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
|
||||
+ if errors:
|
||||
__context__['retcode'] = 5
|
||||
- err = ['Pillar failed to render with the following messages:']
|
||||
- err += __pillar__['_errors']
|
||||
- return err
|
||||
+ return ['Pillar failed to render with the following messages:'] + errors
|
||||
|
||||
if isinstance(mods, six.string_types):
|
||||
split_mods = mods.split(',')
|
||||
@@ -1474,10 +1468,10 @@ def show_low_sls(mods, test=None, queue=False, **kwargs):
|
||||
|
||||
st_ = salt.state.HighState(opts, initial_pillar=_get_initial_pillar(opts))
|
||||
|
||||
- if not _check_pillar(kwargs, st_.opts['pillar']):
|
||||
+ errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
|
||||
+ if errors:
|
||||
__context__['retcode'] = 5
|
||||
- raise CommandExecutionError('Pillar failed to render',
|
||||
- info=st_.opts['pillar']['_errors'])
|
||||
+ raise CommandExecutionError('Pillar failed to render', info=errors)
|
||||
|
||||
if isinstance(mods, six.string_types):
|
||||
mods = mods.split(',')
|
||||
@@ -1561,10 +1555,10 @@ def show_sls(mods, test=None, queue=False, **kwargs):
|
||||
pillar_enc=pillar_enc,
|
||||
initial_pillar=_get_initial_pillar(opts))
|
||||
|
||||
- if not _check_pillar(kwargs, st_.opts['pillar']):
|
||||
+ errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
|
||||
+ if errors:
|
||||
__context__['retcode'] = 5
|
||||
- raise CommandExecutionError('Pillar failed to render',
|
||||
- info=st_.opts['pillar']['_errors'])
|
||||
+ raise CommandExecutionError('Pillar failed to render', info=errors)
|
||||
|
||||
if isinstance(mods, six.string_types):
|
||||
mods = mods.split(',')
|
||||
@@ -1610,10 +1604,10 @@ def show_top(queue=False, **kwargs):
|
||||
|
||||
st_ = salt.state.HighState(opts, initial_pillar=_get_initial_pillar(opts))
|
||||
|
||||
- if not _check_pillar(kwargs, st_.opts['pillar']):
|
||||
+ errors = _get_pillar_errors(kwargs, pillar=st_.opts['pillar'])
|
||||
+ if errors:
|
||||
__context__['retcode'] = 5
|
||||
- raise CommandExecutionError('Pillar failed to render',
|
||||
- info=st_.opts['pillar']['_errors'])
|
||||
+ raise CommandExecutionError('Pillar failed to render', info=errors)
|
||||
|
||||
errors = []
|
||||
top_ = st_.get_top()
|
||||
diff --git a/tests/unit/modules/test_state.py b/tests/unit/modules/test_state.py
|
||||
index 7f4f361c26..e5d10493da 100644
|
||||
--- a/tests/unit/modules/test_state.py
|
||||
+++ b/tests/unit/modules/test_state.py
|
||||
@@ -695,9 +695,9 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
|
||||
with patch.object(state, '_check_queue', mock):
|
||||
self.assertEqual(state.top("reverse_top.sls"), "A")
|
||||
|
||||
- mock = MagicMock(side_effect=[False, True, True])
|
||||
- with patch.object(state, '_check_pillar', mock):
|
||||
- with patch.dict(state.__pillar__, {"_errors": "E"}):
|
||||
+ mock = MagicMock(side_effect=[['E'], None, None])
|
||||
+ with patch.object(state, '_get_pillar_errors', mock):
|
||||
+ with patch.dict(state.__pillar__, {"_errors": ['E']}):
|
||||
self.assertListEqual(state.top("reverse_top.sls"), ret)
|
||||
|
||||
with patch.dict(state.__opts__, {"test": "A"}):
|
||||
@@ -854,14 +854,10 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
|
||||
True),
|
||||
["A"])
|
||||
|
||||
- mock = MagicMock(side_effect=[False,
|
||||
- True,
|
||||
- True,
|
||||
- True,
|
||||
- True])
|
||||
- with patch.object(state, '_check_pillar', mock):
|
||||
+ mock = MagicMock(side_effect=[['E', '1'], None, None, None, None])
|
||||
+ with patch.object(state, '_get_pillar_errors', mock):
|
||||
with patch.dict(state.__context__, {"retcode": 5}):
|
||||
- with patch.dict(state.__pillar__, {"_errors": "E1"}):
|
||||
+ with patch.dict(state.__pillar__, {"_errors": ['E', '1']}):
|
||||
self.assertListEqual(state.sls("core,edit.vim dev",
|
||||
None,
|
||||
None,
|
||||
@@ -979,3 +975,62 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
|
||||
with patch('salt.utils.fopen', mock_open()):
|
||||
self.assertTrue(state.pkg("/tmp/state_pkg.tgz",
|
||||
0, "md5"))
|
||||
+
|
||||
+ def test_get_pillar_errors_CC(self):
|
||||
+ '''
|
||||
+ Test _get_pillar_errors function.
|
||||
+ CC: External clean, Internal clean
|
||||
+ :return:
|
||||
+ '''
|
||||
+ for int_pillar, ext_pillar in [({'foo': 'bar'}, {'fred': 'baz'}),
|
||||
+ ({'foo': 'bar'}, None),
|
||||
+ ({}, {'fred': 'baz'})]:
|
||||
+ with patch('salt.modules.state.__pillar__', int_pillar):
|
||||
+ for opts, res in [({'force': True}, None),
|
||||
+ ({'force': False}, None),
|
||||
+ ({}, None)]:
|
||||
+ assert res == state._get_pillar_errors(kwargs=opts, pillar=ext_pillar)
|
||||
+
|
||||
+ def test_get_pillar_errors_EC(self):
|
||||
+ '''
|
||||
+ Test _get_pillar_errors function.
|
||||
+ EC: External erroneous, Internal clean
|
||||
+ :return:
|
||||
+ '''
|
||||
+ errors = ['failure', 'everywhere']
|
||||
+ for int_pillar, ext_pillar in [({'foo': 'bar'}, {'fred': 'baz', '_errors': errors}),
|
||||
+ ({}, {'fred': 'baz', '_errors': errors})]:
|
||||
+ with patch('salt.modules.state.__pillar__', int_pillar):
|
||||
+ for opts, res in [({'force': True}, None),
|
||||
+ ({'force': False}, errors),
|
||||
+ ({}, errors)]:
|
||||
+ assert res == state._get_pillar_errors(kwargs=opts, pillar=ext_pillar)
|
||||
+
|
||||
+ def test_get_pillar_errors_EE(self):
|
||||
+ '''
|
||||
+ Test _get_pillar_errors function.
|
||||
+ CC: External erroneous, Internal erroneous
|
||||
+ :return:
|
||||
+ '''
|
||||
+ errors = ['failure', 'everywhere']
|
||||
+ for int_pillar, ext_pillar in [({'foo': 'bar', '_errors': errors}, {'fred': 'baz', '_errors': errors})]:
|
||||
+ with patch('salt.modules.state.__pillar__', int_pillar):
|
||||
+ for opts, res in [({'force': True}, None),
|
||||
+ ({'force': False}, errors),
|
||||
+ ({}, errors)]:
|
||||
+ assert res == state._get_pillar_errors(kwargs=opts, pillar=ext_pillar)
|
||||
+
|
||||
+ def test_get_pillar_errors_CE(self):
|
||||
+ '''
|
||||
+ Test _get_pillar_errors function.
|
||||
+ CC: External clean, Internal erroneous
|
||||
+ :return:
|
||||
+ '''
|
||||
+ errors = ['failure', 'everywhere']
|
||||
+ for int_pillar, ext_pillar in [({'foo': 'bar', '_errors': errors}, {'fred': 'baz'}),
|
||||
+ ({'foo': 'bar', '_errors': errors}, None)]:
|
||||
+ with patch('salt.modules.state.__pillar__', int_pillar):
|
||||
+ for opts, res in [({'force': True}, None),
|
||||
+ ({'force': False}, errors),
|
||||
+ ({}, errors)]:
|
||||
+ assert res == state._get_pillar_errors(kwargs=opts, pillar=ext_pillar)
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,115 +0,0 @@
|
||||
From 16dc0e5ef2c86a315a0d09fb186dd1616df1444e Mon Sep 17 00:00:00 2001
|
||||
From: Jochen Breuer <jbreuer@suse.de>
|
||||
Date: Wed, 6 Sep 2017 10:16:51 +0200
|
||||
Subject: [PATCH] Catching error when PIDfile cannot be deleted
|
||||
|
||||
Usually the PIDfile is locate in /run. If Salt is not started with root
|
||||
permissions, it is not able to delete the PIDfile in /run. It should
|
||||
be safe to just ignore this error, since Salt overwrites the PIDfile on
|
||||
the next start.
|
||||
---
|
||||
salt/utils/parsers.py | 8 +++++-
|
||||
tests/unit/utils/test_parsers.py | 54 ++++++++++++++++++++++++++++++++++++++++
|
||||
2 files changed, 61 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py
|
||||
index 925ec9f4e4..d8c5068874 100644
|
||||
--- a/salt/utils/parsers.py
|
||||
+++ b/salt/utils/parsers.py
|
||||
@@ -966,7 +966,13 @@ class DaemonMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
# We've loaded and merged options into the configuration, it's safe
|
||||
# to query about the pidfile
|
||||
if self.check_pidfile():
|
||||
- os.unlink(self.config['pidfile'])
|
||||
+ try:
|
||||
+ os.unlink(self.config['pidfile'])
|
||||
+ except OSError as err:
|
||||
+ # This happens when running salt-master as a non-root user
|
||||
+ # and can be ignored, since salt-master is able to
|
||||
+ # overwrite the PIDfile on the next start.
|
||||
+ pass
|
||||
|
||||
def set_pidfile(self):
|
||||
from salt.utils.process import set_pidfile
|
||||
diff --git a/tests/unit/utils/test_parsers.py b/tests/unit/utils/test_parsers.py
|
||||
index 43488e894c..8168b3e6e3 100644
|
||||
--- a/tests/unit/utils/test_parsers.py
|
||||
+++ b/tests/unit/utils/test_parsers.py
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
+import logging
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.unit import skipIf, TestCase
|
||||
@@ -21,6 +22,7 @@ import salt.utils.parsers
|
||||
import salt.log.setup as log
|
||||
import salt.config
|
||||
import salt.syspaths
|
||||
+from salt.utils.parsers import DaemonMixIn
|
||||
|
||||
|
||||
class ErrorMock(object): # pylint: disable=too-few-public-methods
|
||||
@@ -958,5 +960,57 @@ class SaltAPIParserTestCase(LogSettingsParserTests):
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
|
||||
+@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
+class DaemonMixInTestCase(TestCase):
|
||||
+ '''
|
||||
+ Tests the PIDfile deletion in the DaemonMixIn.
|
||||
+ '''
|
||||
+
|
||||
+ def setUp(self):
|
||||
+ '''
|
||||
+ Setting up
|
||||
+ '''
|
||||
+ # Set PID
|
||||
+ self.pid = '/some/fake.pid'
|
||||
+
|
||||
+ # Setup mixin
|
||||
+ self.mixin = salt.utils.parsers.DaemonMixIn()
|
||||
+ self.mixin.config = {}
|
||||
+ self.mixin.config['pidfile'] = self.pid
|
||||
+
|
||||
+ # logger
|
||||
+ self.logger = logging.getLogger('salt.utils.parsers')
|
||||
+
|
||||
+ def test_pid_file_deletion(self):
|
||||
+ '''
|
||||
+ PIDfile deletion without exception.
|
||||
+ '''
|
||||
+ with patch('os.unlink', MagicMock()) as os_unlink:
|
||||
+ with patch('os.path.isfile', MagicMock(return_value=True)):
|
||||
+ with patch.object(self.logger, 'info') as mock_logger:
|
||||
+ self.mixin._mixin_before_exit()
|
||||
+ assert mock_logger.call_count == 0
|
||||
+ assert os_unlink.call_count == 1
|
||||
+
|
||||
+
|
||||
# Hide the class from unittest framework when it searches for TestCase classes in the module
|
||||
del LogSettingsParserTests
|
||||
+
|
||||
+
|
||||
+if __name__ == '__main__':
|
||||
+ from integration import run_tests # pylint: disable=import-error,wrong-import-position
|
||||
+ run_tests(MasterOptionParserTestCase,
|
||||
+ MinionOptionParserTestCase,
|
||||
+ ProxyMinionOptionParserTestCase,
|
||||
+ SyndicOptionParserTestCase,
|
||||
+ SaltCMDOptionParserTestCase,
|
||||
+ SaltCPOptionParserTestCase,
|
||||
+ SaltKeyOptionParserTestCase,
|
||||
+ SaltCallOptionParserTestCase,
|
||||
+ SaltRunOptionParserTestCase,
|
||||
+ SaltSSHOptionParserTestCase,
|
||||
+ SaltCloudParserTestCase,
|
||||
+ SPMParserTestCase,
|
||||
+ SaltAPIParserTestCase,
|
||||
+ DaemonMixInTestCase,
|
||||
+ needs_daemon=False)
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,117 +0,0 @@
|
||||
From c586654ed4c20a69f164b208458163611408c54a Mon Sep 17 00:00:00 2001
|
||||
From: Michael Calmer <mc@suse.de>
|
||||
Date: Fri, 15 Dec 2017 09:53:10 +0100
|
||||
Subject: [PATCH] cherrypy read() reads bytes from the wire and write
|
||||
them into contents var
|
||||
|
||||
adapt tests to reflect reality
|
||||
|
||||
When CherryPy run with python3 it reads "bytes" from the wire.
|
||||
In case of python2 BytesIO == StringIO, so nothing should change.
|
||||
|
||||
This change will do the same to make unit tests reflect reality.
|
||||
---
|
||||
salt/netapi/rest_cherrypy/app.py | 25 +++++++------------------
|
||||
tests/support/cptestcase.py | 6 ++++--
|
||||
2 files changed, 11 insertions(+), 20 deletions(-)
|
||||
|
||||
diff --git a/salt/netapi/rest_cherrypy/app.py b/salt/netapi/rest_cherrypy/app.py
|
||||
index 4099416a28..67e0bad07a 100644
|
||||
--- a/salt/netapi/rest_cherrypy/app.py
|
||||
+++ b/salt/netapi/rest_cherrypy/app.py
|
||||
@@ -505,6 +505,7 @@ import salt
|
||||
import salt.auth
|
||||
import salt.utils
|
||||
import salt.utils.event
|
||||
+from salt.ext.six import BytesIO
|
||||
|
||||
# Import salt-api libs
|
||||
import salt.netapi
|
||||
@@ -830,18 +831,6 @@ def urlencoded_processor(entity):
|
||||
|
||||
:param entity: raw POST data
|
||||
'''
|
||||
- if six.PY3:
|
||||
- # https://github.com/cherrypy/cherrypy/pull/1572
|
||||
- contents = six.StringIO()
|
||||
- entity.fp.read(fp_out=contents)
|
||||
- contents.seek(0)
|
||||
- body_str = contents.read()
|
||||
- body_bytes = salt.utils.to_bytes(body_str)
|
||||
- body_bytes = six.BytesIO(body_bytes)
|
||||
- body_bytes.seek(0)
|
||||
- # Patch fp
|
||||
- entity.fp = body_bytes
|
||||
- del contents
|
||||
# First call out to CherryPy's default processor
|
||||
cherrypy._cpreqbody.process_urlencoded(entity)
|
||||
cherrypy._cpreqbody.process_urlencoded(entity)
|
||||
@@ -860,10 +849,10 @@ def json_processor(entity):
|
||||
body = entity.fp.read()
|
||||
else:
|
||||
# https://github.com/cherrypy/cherrypy/pull/1572
|
||||
- contents = six.StringIO()
|
||||
+ contents = BytesIO()
|
||||
body = entity.fp.read(fp_out=contents)
|
||||
contents.seek(0)
|
||||
- body = contents.read()
|
||||
+ body = salt.utils.to_unicode(contents.read())
|
||||
del contents
|
||||
try:
|
||||
cherrypy.serving.request.unserialized_data = json.loads(body)
|
||||
@@ -884,10 +873,10 @@ def yaml_processor(entity):
|
||||
body = entity.fp.read()
|
||||
else:
|
||||
# https://github.com/cherrypy/cherrypy/pull/1572
|
||||
- contents = six.StringIO()
|
||||
+ contents = BytesIO()
|
||||
body = entity.fp.read(fp_out=contents)
|
||||
contents.seek(0)
|
||||
- body = contents.read()
|
||||
+ body = salt.utils.to_unicode(contents.read())
|
||||
try:
|
||||
cherrypy.serving.request.unserialized_data = yaml.safe_load(body)
|
||||
except ValueError:
|
||||
@@ -910,10 +899,10 @@ def text_processor(entity):
|
||||
body = entity.fp.read()
|
||||
else:
|
||||
# https://github.com/cherrypy/cherrypy/pull/1572
|
||||
- contents = six.StringIO()
|
||||
+ contents = BytesIO()
|
||||
body = entity.fp.read(fp_out=contents)
|
||||
contents.seek(0)
|
||||
- body = contents.read()
|
||||
+ body = salt.utils.to_unicode(contents.read())
|
||||
try:
|
||||
cherrypy.serving.request.unserialized_data = json.loads(body)
|
||||
except ValueError:
|
||||
diff --git a/tests/support/cptestcase.py b/tests/support/cptestcase.py
|
||||
index ea2845f46d..75785b8eb1 100644
|
||||
--- a/tests/support/cptestcase.py
|
||||
+++ b/tests/support/cptestcase.py
|
||||
@@ -38,9 +38,11 @@ from tests.support.case import TestCase
|
||||
# pylint: disable=import-error
|
||||
import cherrypy # pylint: disable=3rd-party-module-not-gated
|
||||
import salt.ext.six as six
|
||||
-from salt.ext.six.moves import StringIO
|
||||
+from salt.ext.six import BytesIO
|
||||
# pylint: enable=import-error
|
||||
|
||||
+import salt.utils
|
||||
+
|
||||
# Not strictly speaking mandatory but just makes sense
|
||||
cherrypy.config.update({'environment': "test_suite"})
|
||||
|
||||
@@ -92,7 +94,7 @@ class BaseCherryPyTestCase(TestCase):
|
||||
fd = None
|
||||
if body is not None:
|
||||
h['content-length'] = '{0}'.format(len(body))
|
||||
- fd = StringIO(body)
|
||||
+ fd = BytesIO(salt.utils.to_bytes(body))
|
||||
|
||||
if headers is not None:
|
||||
h.update(headers)
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,76 +0,0 @@
|
||||
From 1949261a504fd01e057b41126d78f142f4977204 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
|
||||
<psuarezhernandez@suse.com>
|
||||
Date: Fri, 6 Oct 2017 17:12:15 +0100
|
||||
Subject: [PATCH] Enable '--with-salt-version' parameter for setup.py
|
||||
script
|
||||
|
||||
---
|
||||
setup.py | 20 ++++++++++++++++++--
|
||||
1 file changed, 18 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/setup.py b/setup.py
|
||||
index effdc2f230..519f753401 100755
|
||||
--- a/setup.py
|
||||
+++ b/setup.py
|
||||
@@ -183,17 +183,22 @@ class WriteSaltVersion(Command):
|
||||
'''
|
||||
|
||||
def run(self):
|
||||
- if not os.path.exists(SALT_VERSION_HARDCODED):
|
||||
+ if not os.path.exists(SALT_VERSION_HARDCODED) or self.distribution.with_salt_version:
|
||||
# Write the version file
|
||||
if getattr(self.distribution, 'salt_version_hardcoded_path', None) is None:
|
||||
print('This command is not meant to be called on it\'s own')
|
||||
exit(1)
|
||||
|
||||
+ if not self.distribution.with_salt_version:
|
||||
+ salt_version = __saltstack_version__
|
||||
+ else:
|
||||
+ salt_version = SaltStackVersion.parse(self.distribution.with_salt_version)
|
||||
+
|
||||
# pylint: disable=E0602
|
||||
open(self.distribution.salt_version_hardcoded_path, 'w').write(
|
||||
INSTALL_VERSION_TEMPLATE.format(
|
||||
date=DATE,
|
||||
- full_version_info=__saltstack_version__.full_info
|
||||
+ full_version_info=salt_version.full_info
|
||||
)
|
||||
)
|
||||
# pylint: enable=E0602
|
||||
@@ -731,6 +736,13 @@ class Build(build):
|
||||
def run(self):
|
||||
# Run build.run function
|
||||
build.run(self)
|
||||
+ if getattr(self.distribution, 'with_salt_version', False):
|
||||
+ # Write the hardcoded salt version module salt/_version.py
|
||||
+ self.distribution.salt_version_hardcoded_path = os.path.join(
|
||||
+ self.build_lib, 'salt', '_version.py'
|
||||
+ )
|
||||
+ self.run_command('write_salt_version')
|
||||
+
|
||||
if getattr(self.distribution, 'running_salt_install', False):
|
||||
# If our install attribute is present and set to True, we'll go
|
||||
# ahead and write our install time python modules.
|
||||
@@ -839,6 +851,7 @@ class SaltDistribution(distutils.dist.Distribution):
|
||||
('ssh-packaging', None, 'Run in SSH packaging mode'),
|
||||
('salt-transport=', None, 'The transport to prepare salt for. Choices are \'zeromq\' '
|
||||
'\'raet\' or \'both\'. Defaults to \'zeromq\'', 'zeromq')] + [
|
||||
+ ('with-salt-version=', None, 'Set a fixed version for Salt instead calculating it'),
|
||||
# Salt's Paths Configuration Settings
|
||||
('salt-root-dir=', None,
|
||||
'Salt\'s pre-configured root directory'),
|
||||
@@ -893,6 +906,9 @@ class SaltDistribution(distutils.dist.Distribution):
|
||||
self.salt_spm_pillar_dir = None
|
||||
self.salt_spm_reactor_dir = None
|
||||
|
||||
+ # Salt version
|
||||
+ self.with_salt_version = None
|
||||
+
|
||||
self.name = 'salt-ssh' if PACKAGED_FOR_SALT_SSH else 'salt'
|
||||
self.salt_version = __version__ # pylint: disable=undefined-variable
|
||||
self.description = 'Portable, distributed, remote execution and configuration management system'
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
From a1aec4e0d740c179657765ab30475d11f9950174 Mon Sep 17 00:00:00 2001
|
||||
From 73727acbfc0bd6a263b0a1464c58f692950b43f7 Mon Sep 17 00:00:00 2001
|
||||
From: Michele Bologna <michele.bologna@suse.com>
|
||||
Date: Thu, 14 Dec 2017 18:20:02 +0100
|
||||
Subject: [PATCH] Feat: add grain for all FQDNs
|
||||
@ -17,14 +17,14 @@ https://github.com/saltstack/salt/pull/45060
|
||||
---
|
||||
salt/grains/core.py | 27 +++++++++++++++++++++++++++
|
||||
tests/integration/modules/test_grains.py | 1 +
|
||||
tests/unit/grains/test_core.py | 28 ++++++++++++++++++++++++++++
|
||||
3 files changed, 56 insertions(+)
|
||||
tests/unit/grains/test_core.py | 1 +
|
||||
3 files changed, 29 insertions(+)
|
||||
|
||||
diff --git a/salt/grains/core.py b/salt/grains/core.py
|
||||
index a7e1a22d2a..04b73f9120 100644
|
||||
index 7d628bccb9..af25097117 100644
|
||||
--- a/salt/grains/core.py
|
||||
+++ b/salt/grains/core.py
|
||||
@@ -1756,6 +1756,33 @@ def append_domain():
|
||||
@@ -1890,6 +1890,33 @@ def append_domain():
|
||||
return grain
|
||||
|
||||
|
||||
@ -59,7 +59,7 @@ index a7e1a22d2a..04b73f9120 100644
|
||||
'''
|
||||
Return ip address and FQDN grains
|
||||
diff --git a/tests/integration/modules/test_grains.py b/tests/integration/modules/test_grains.py
|
||||
index 7d46315e42..1db90ab532 100644
|
||||
index 709f882b45..aa7bd44202 100644
|
||||
--- a/tests/integration/modules/test_grains.py
|
||||
+++ b/tests/integration/modules/test_grains.py
|
||||
@@ -51,6 +51,7 @@ class TestModulesGrains(ModuleCase):
|
||||
@ -71,49 +71,18 @@ index 7d46315e42..1db90ab532 100644
|
||||
'groupname',
|
||||
'host',
|
||||
diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py
|
||||
index 6ee4257863..4f1412de76 100644
|
||||
index e781fadefe..dba8d082c5 100644
|
||||
--- a/tests/unit/grains/test_core.py
|
||||
+++ b/tests/unit/grains/test_core.py
|
||||
@@ -6,6 +6,7 @@
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
@@ -7,6 +7,7 @@
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import logging
|
||||
import os
|
||||
+import socket
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
@@ -462,3 +463,30 @@ PATCHLEVEL = 3
|
||||
self.assertEqual(os_grains.get('osrelease'), os_release_map['osrelease'])
|
||||
self.assertListEqual(list(os_grains.get('osrelease_info')), os_release_map['osrelease_info'])
|
||||
self.assertEqual(os_grains.get('osmajorrelease'), os_release_map['osmajorrelease'])
|
||||
+
|
||||
+ @skipIf(not salt.utils.is_linux(), 'System is not Linux')
|
||||
+ def test_fqdns_return(self):
|
||||
+ '''
|
||||
+ test the return for a dns grain. test for issue:
|
||||
+ https://github.com/saltstack/salt/issues/41230
|
||||
+ '''
|
||||
+ reverse_resolv_mock = [('foo.bar.baz', [], ['1.2.3.4']),
|
||||
+ ('rinzler.evil-corp.com', [], ['5.6.7.8']),
|
||||
+ ('foo.bar.baz', [], ['fe80::a8b2:93ff:fe00:0']),
|
||||
+ ('bluesniff.foo.bar', [], ['fe80::a8b2:93ff:dead:beef'])]
|
||||
+ ret = {'fqdns': ['rinzler.evil-corp.com', 'foo.bar.baz', 'bluesniff.foo.bar']}
|
||||
+ self._run_fqdns_test(reverse_resolv_mock, ret)
|
||||
+
|
||||
+ def _run_fqdns_test(self, reverse_resolv_mock, ret):
|
||||
+ with patch.object(salt.utils, 'is_windows', MagicMock(return_value=False)):
|
||||
+ with patch('salt.utils.network.ip_addrs',
|
||||
+ MagicMock(return_value=['1.2.3.4', '5.6.7.8'])),\
|
||||
+ patch('salt.utils.network.ip_addrs6',
|
||||
+ MagicMock(return_value=['fe80::a8b2:93ff:fe00:0', 'fe80::a8b2:93ff:dead:beef'])):
|
||||
+ with patch.object(socket, 'gethostbyaddr', side_effect=reverse_resolv_mock):
|
||||
+ fqdns = core.fqdns()
|
||||
+ self.assertEqual(fqdns, ret)
|
||||
+
|
||||
+if __name__ == '__main__':
|
||||
+ from integration import run_tests
|
||||
+ run_tests(CoreGrainsTestCase, needs_daemon=False)
|
||||
try:
|
||||
--
|
||||
2.13.6
|
||||
2.16.1
|
||||
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
From 7e118cb36bacdc50606512adc562438fcc3257e2 Mon Sep 17 00:00:00 2001
|
||||
From 341c175135c555ee1a02f1ce952c95d14fd834fa Mon Sep 17 00:00:00 2001
|
||||
From: Bo Maryniuk <bo@suse.de>
|
||||
Date: Thu, 14 Dec 2017 16:21:40 +0100
|
||||
Subject: [PATCH] Fix bsc#1065792
|
||||
@ -8,7 +8,7 @@ Subject: [PATCH] Fix bsc#1065792
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/salt/states/service.py b/salt/states/service.py
|
||||
index ff5300df6a..eaa9474f78 100644
|
||||
index c5bf3f2d54..a5ec426ec4 100644
|
||||
--- a/salt/states/service.py
|
||||
+++ b/salt/states/service.py
|
||||
@@ -80,6 +80,7 @@ def __virtual__():
|
||||
@ -20,6 +20,6 @@ index ff5300df6a..eaa9474f78 100644
|
||||
return __virtualname__
|
||||
else:
|
||||
--
|
||||
2.13.6
|
||||
2.16.1
|
||||
|
||||
|
||||
|
@ -1,253 +0,0 @@
|
||||
From 01c4d8875a8be8b0707b0088ccf186c4cd137448 Mon Sep 17 00:00:00 2001
|
||||
From: Jochen Breuer <jbreuer@suse.de>
|
||||
Date: Wed, 23 Aug 2017 21:31:28 +0200
|
||||
Subject: [PATCH] Fix for delete_deployment in Kubernetes module
|
||||
|
||||
The Kubernetes module function delete_deployment() uses
|
||||
api_instance.delete_namespaced_deployment() from the Kubernetes lib. This
|
||||
method from the Kubernetes lib returns immediately without giving a success
|
||||
or failure indication, which lets Salt mark the job as failed even though we
|
||||
don't know if it failed or not.
|
||||
|
||||
To actually get a result I've implemented a polling via show_deployment() to
|
||||
check if the deployment got removed.
|
||||
|
||||
If a time limit is hit, we are returning with an error, otherwise it is a
|
||||
success.
|
||||
|
||||
Since Windows has no signal.alarm implementation, we are here falling back to
|
||||
loop counting.
|
||||
---
|
||||
salt/exceptions.py | 6 ++
|
||||
salt/modules/kubernetes.py | 44 +++++++++++-
|
||||
tests/unit/modules/test_kubernetes.py | 126 ++++++++++++++++++++++++++++++++++
|
||||
3 files changed, 175 insertions(+), 1 deletion(-)
|
||||
create mode 100644 tests/unit/modules/test_kubernetes.py
|
||||
|
||||
diff --git a/salt/exceptions.py b/salt/exceptions.py
|
||||
index 256537dd77..00111df104 100644
|
||||
--- a/salt/exceptions.py
|
||||
+++ b/salt/exceptions.py
|
||||
@@ -265,6 +265,12 @@ class SaltCacheError(SaltException):
|
||||
'''
|
||||
|
||||
|
||||
+class TimeoutError(SaltException):
|
||||
+ '''
|
||||
+ Thrown when an opration cannot be completet within a given time limit.
|
||||
+ '''
|
||||
+
|
||||
+
|
||||
class SaltReqTimeoutError(SaltException):
|
||||
'''
|
||||
Thrown when a salt master request call fails to return within the timeout
|
||||
diff --git a/salt/modules/kubernetes.py b/salt/modules/kubernetes.py
|
||||
index 2e17b11444..890659c1c8 100644
|
||||
--- a/salt/modules/kubernetes.py
|
||||
+++ b/salt/modules/kubernetes.py
|
||||
@@ -40,11 +40,15 @@ import base64
|
||||
import logging
|
||||
import yaml
|
||||
import tempfile
|
||||
+import signal
|
||||
+from time import sleep
|
||||
+from contextlib import contextmanager
|
||||
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.ext.six import iteritems
|
||||
import salt.utils
|
||||
import salt.utils.templates
|
||||
+from salt.ext.six.moves import range # pylint: disable=import-error
|
||||
|
||||
try:
|
||||
import kubernetes # pylint: disable=import-self
|
||||
@@ -78,6 +82,21 @@ def __virtual__():
|
||||
return False, 'python kubernetes library not found'
|
||||
|
||||
|
||||
+if not salt.utils.is_windows():
|
||||
+ @contextmanager
|
||||
+ def _time_limit(seconds):
|
||||
+ def signal_handler(signum, frame):
|
||||
+ raise TimeoutException
|
||||
+ signal.signal(signal.SIGALRM, signal_handler)
|
||||
+ signal.alarm(seconds)
|
||||
+ try:
|
||||
+ yield
|
||||
+ finally:
|
||||
+ signal.alarm(0)
|
||||
+
|
||||
+ POLLING_TIME_LIMIT = 30
|
||||
+
|
||||
+
|
||||
# pylint: disable=no-member
|
||||
def _setup_conn(**kwargs):
|
||||
'''
|
||||
@@ -692,7 +711,30 @@ def delete_deployment(name, namespace='default', **kwargs):
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
body=body)
|
||||
- return api_response.to_dict()
|
||||
+ mutable_api_response = api_response.to_dict()
|
||||
+ if not salt.utils.is_windows():
|
||||
+ try:
|
||||
+ with _time_limit(POLLING_TIME_LIMIT):
|
||||
+ while show_deployment(name, namespace) is not None:
|
||||
+ sleep(1)
|
||||
+ else: # pylint: disable=useless-else-on-loop
|
||||
+ mutable_api_response['code'] = 200
|
||||
+ except TimeoutException:
|
||||
+ pass
|
||||
+ else:
|
||||
+ # Windows has not signal.alarm implementation, so we are just falling
|
||||
+ # back to loop-counting.
|
||||
+ for i in range(60):
|
||||
+ if show_deployment(name, namespace) is None:
|
||||
+ mutable_api_response['code'] = 200
|
||||
+ break
|
||||
+ else:
|
||||
+ sleep(1)
|
||||
+ if mutable_api_response['code'] != 200:
|
||||
+ log.warning('Reached polling time limit. Deployment is not yet '
|
||||
+ 'deleted, but we are backing off. Sorry, but you\'ll '
|
||||
+ 'have to check manually.')
|
||||
+ return mutable_api_response
|
||||
except (ApiException, HTTPError) as exc:
|
||||
if isinstance(exc, ApiException) and exc.status == 404:
|
||||
return None
|
||||
diff --git a/tests/unit/modules/test_kubernetes.py b/tests/unit/modules/test_kubernetes.py
|
||||
new file mode 100644
|
||||
index 0000000000..493822a93c
|
||||
--- /dev/null
|
||||
+++ b/tests/unit/modules/test_kubernetes.py
|
||||
@@ -0,0 +1,126 @@
|
||||
+# -*- coding: utf-8 -*-
|
||||
+'''
|
||||
+ :codeauthor: :email:`Jochen Breuer <jbreuer@suse.de>`
|
||||
+'''
|
||||
+
|
||||
+# Import Python Libs
|
||||
+from __future__ import absolute_import
|
||||
+
|
||||
+# Import Salt Testing Libs
|
||||
+from salttesting import TestCase, skipIf
|
||||
+from salttesting.mock import (
|
||||
+ Mock,
|
||||
+ patch,
|
||||
+ NO_MOCK,
|
||||
+ NO_MOCK_REASON
|
||||
+)
|
||||
+
|
||||
+try:
|
||||
+ from salt.modules import kubernetes
|
||||
+except ImportError:
|
||||
+ kubernetes = False
|
||||
+
|
||||
+# Globals
|
||||
+kubernetes.__salt__ = dict()
|
||||
+kubernetes.__grains__ = dict()
|
||||
+kubernetes.__context__ = dict()
|
||||
+
|
||||
+
|
||||
+@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
+@skipIf(kubernetes is False, "Probably Kubernetes client lib is not installed. \
|
||||
+ Skipping test_kubernetes.py")
|
||||
+class KubernetesTestCase(TestCase):
|
||||
+ '''
|
||||
+ Test cases for salt.modules.kubernetes
|
||||
+ '''
|
||||
+
|
||||
+ def test_nodes(self):
|
||||
+ '''
|
||||
+ Test node listing.
|
||||
+ :return:
|
||||
+ '''
|
||||
+ with patch('salt.modules.kubernetes.kubernetes') as mock_kubernetes_lib:
|
||||
+ with patch.dict(kubernetes.__salt__, {'config.option': Mock(return_value="")}):
|
||||
+ mock_kubernetes_lib.client.CoreV1Api.return_value = Mock(
|
||||
+ **{"list_node.return_value.to_dict.return_value":
|
||||
+ {'items': [{'metadata': {'name': 'mock_node_name'}}]}}
|
||||
+ )
|
||||
+ self.assertEqual(kubernetes.nodes(), ['mock_node_name'])
|
||||
+ self.assertTrue(kubernetes.kubernetes.client.CoreV1Api().list_node().to_dict.called)
|
||||
+
|
||||
+ def test_deployments(self):
|
||||
+ '''
|
||||
+ Tests deployment listing.
|
||||
+ :return:
|
||||
+ '''
|
||||
+ with patch('salt.modules.kubernetes.kubernetes') as mock_kubernetes_lib:
|
||||
+ with patch.dict(kubernetes.__salt__, {'config.option': Mock(return_value="")}):
|
||||
+ mock_kubernetes_lib.client.ExtensionsV1beta1Api.return_value = Mock(
|
||||
+ **{"list_namespaced_deployment.return_value.to_dict.return_value":
|
||||
+ {'items': [{'metadata': {'name': 'mock_deployment_name'}}]}}
|
||||
+ )
|
||||
+ self.assertEqual(kubernetes.deployments(), ['mock_deployment_name'])
|
||||
+ self.assertTrue(
|
||||
+ kubernetes.kubernetes.client.ExtensionsV1beta1Api().list_namespaced_deployment().to_dict.called)
|
||||
+
|
||||
+ def test_services(self):
|
||||
+ '''
|
||||
+ Tests services listing.
|
||||
+ :return:
|
||||
+ '''
|
||||
+ with patch('salt.modules.kubernetes.kubernetes') as mock_kubernetes_lib:
|
||||
+ with patch.dict(kubernetes.__salt__, {'config.option': Mock(return_value="")}):
|
||||
+ mock_kubernetes_lib.client.CoreV1Api.return_value = Mock(
|
||||
+ **{"list_namespaced_service.return_value.to_dict.return_value":
|
||||
+ {'items': [{'metadata': {'name': 'mock_service_name'}}]}}
|
||||
+ )
|
||||
+ self.assertEqual(kubernetes.services(), ['mock_service_name'])
|
||||
+ self.assertTrue(kubernetes.kubernetes.client.CoreV1Api().list_namespaced_service().to_dict.called)
|
||||
+
|
||||
+ def test_pods(self):
|
||||
+ '''
|
||||
+ Tests pods listing.
|
||||
+ :return:
|
||||
+ '''
|
||||
+ with patch('salt.modules.kubernetes.kubernetes') as mock_kubernetes_lib:
|
||||
+ with patch.dict(kubernetes.__salt__, {'config.option': Mock(return_value="")}):
|
||||
+ mock_kubernetes_lib.client.CoreV1Api.return_value = Mock(
|
||||
+ **{"list_namespaced_pod.return_value.to_dict.return_value":
|
||||
+ {'items': [{'metadata': {'name': 'mock_pod_name'}}]}}
|
||||
+ )
|
||||
+ self.assertEqual(kubernetes.pods(), ['mock_pod_name'])
|
||||
+ self.assertTrue(kubernetes.kubernetes.client.CoreV1Api().
|
||||
+ list_namespaced_pod().to_dict.called)
|
||||
+
|
||||
+ def test_delete_deployments(self):
|
||||
+ '''
|
||||
+ Tests deployment deletion
|
||||
+ :return:
|
||||
+ '''
|
||||
+ with patch('salt.modules.kubernetes.kubernetes') as mock_kubernetes_lib:
|
||||
+ with patch('salt.modules.kubernetes.show_deployment', Mock(return_value=None)):
|
||||
+ with patch.dict(kubernetes.__salt__, {'config.option': Mock(return_value="")}):
|
||||
+ mock_kubernetes_lib.client.V1DeleteOptions = Mock(return_value="")
|
||||
+ mock_kubernetes_lib.client.ExtensionsV1beta1Api.return_value = Mock(
|
||||
+ **{"delete_namespaced_deployment.return_value.to_dict.return_value": {'code': ''}}
|
||||
+ )
|
||||
+ self.assertEqual(kubernetes.delete_deployment("test"), {'code': 200})
|
||||
+ self.assertTrue(
|
||||
+ kubernetes.kubernetes.client.ExtensionsV1beta1Api().
|
||||
+ delete_namespaced_deployment().to_dict.called)
|
||||
+
|
||||
+ def test_create_deployments(self):
|
||||
+ '''
|
||||
+ Tests deployment creation.
|
||||
+ :return:
|
||||
+ '''
|
||||
+ with patch('salt.modules.kubernetes.kubernetes') as mock_kubernetes_lib:
|
||||
+ with patch.dict(kubernetes.__salt__, {'config.option': Mock(return_value="")}):
|
||||
+ mock_kubernetes_lib.client.ExtensionsV1beta1Api.return_value = Mock(
|
||||
+ **{"create_namespaced_deployment.return_value.to_dict.return_value": {}}
|
||||
+ )
|
||||
+ self.assertEqual(kubernetes.create_deployment("test", "default", {}, {},
|
||||
+ None, None, None), {})
|
||||
+ self.assertTrue(
|
||||
+ kubernetes.kubernetes.client.ExtensionsV1beta1Api().
|
||||
+ create_namespaced_deployment().to_dict.called)
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
26
fix-grains-with-n.patch
Normal file
26
fix-grains-with-n.patch
Normal file
@ -0,0 +1,26 @@
|
||||
From 53a6d53b50695baa8004bce5f31a64f7455351f2 Mon Sep 17 00:00:00 2001
|
||||
From: Mihai Dinca <mdinca@suse.de>
|
||||
Date: Thu, 1 Mar 2018 11:06:15 +0100
|
||||
Subject: [PATCH] Fix grains with '\n'
|
||||
|
||||
---
|
||||
salt/grains/core.py | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/salt/grains/core.py b/salt/grains/core.py
|
||||
index af25097117..f8e36a895e 100644
|
||||
--- a/salt/grains/core.py
|
||||
+++ b/salt/grains/core.py
|
||||
@@ -2196,7 +2196,7 @@ def _hw_data(osdata):
|
||||
if os.path.exists(contents_file):
|
||||
try:
|
||||
with salt.utils.files.fopen(contents_file, 'r') as ifile:
|
||||
- grains[key] = ifile.read()
|
||||
+ grains[key] = ifile.read().strip()
|
||||
if key == 'uuid':
|
||||
grains['uuid'] = grains['uuid'].lower()
|
||||
except (IOError, OSError) as err:
|
||||
--
|
||||
2.16.1
|
||||
|
||||
|
@ -1,26 +0,0 @@
|
||||
From 1828df2c90f82db3b1c8a1cb968245f38e2380fe Mon Sep 17 00:00:00 2001
|
||||
From: Mihai Dinca <mdinca@suse.de>
|
||||
Date: Fri, 8 Dec 2017 13:34:06 +0100
|
||||
Subject: [PATCH] Fix salt-master for old psutil
|
||||
|
||||
---
|
||||
salt/utils/psutil_compat.py | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/salt/utils/psutil_compat.py b/salt/utils/psutil_compat.py
|
||||
index a32712860a..dee563455b 100644
|
||||
--- a/salt/utils/psutil_compat.py
|
||||
+++ b/salt/utils/psutil_compat.py
|
||||
@@ -26,7 +26,7 @@ else:
|
||||
# Psuedo "from psutil import *"
|
||||
_globals = globals()
|
||||
for attr in psutil.__all__:
|
||||
- _temp = __import__('psutil', globals(), locals(), [attr], -1)
|
||||
+ _temp = __import__('psutil', globals(), locals(), [attr], -1 if six.PY2 else 0)
|
||||
try:
|
||||
_globals[attr] = getattr(_temp, attr)
|
||||
except AttributeError:
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,211 +0,0 @@
|
||||
From cb472e1f0fc18a554e0de9e3fe6bbe16557957ee Mon Sep 17 00:00:00 2001
|
||||
From: Silvio Moioli <smoioli@suse.de>
|
||||
Date: Wed, 20 Sep 2017 14:33:33 +0200
|
||||
Subject: [PATCH] Introduce process_count_max minion configuration
|
||||
parameter
|
||||
|
||||
This allows users to limit the number of processes or threads a minion
|
||||
will start in response to published messages, prevents resource
|
||||
exhaustion in case a high number of concurrent jobs is scheduled in a
|
||||
short time.
|
||||
|
||||
process_count_max: add defaults and documentation
|
||||
|
||||
process_count_max: adapt existing unit tests
|
||||
|
||||
process_count_max: add unit test
|
||||
|
||||
process_count_max: disable by default
|
||||
---
|
||||
conf/minion | 6 +++++
|
||||
doc/ref/configuration/minion.rst | 17 +++++++++++++
|
||||
salt/config/__init__.py | 4 +++
|
||||
salt/minion.py | 10 ++++++++
|
||||
tests/unit/test_minion.py | 53 +++++++++++++++++++++++++++++++++++++---
|
||||
5 files changed, 87 insertions(+), 3 deletions(-)
|
||||
|
||||
diff --git a/conf/minion b/conf/minion
|
||||
index 6cae043295..4a3cddcbd1 100644
|
||||
--- a/conf/minion
|
||||
+++ b/conf/minion
|
||||
@@ -689,6 +689,12 @@
|
||||
# for a full explanation.
|
||||
#multiprocessing: True
|
||||
|
||||
+# Limit the maximum amount of processes or threads created by salt-minion.
|
||||
+# This is useful to avoid resource exhaustion in case the minion receives more
|
||||
+# publications than it is able to handle, as it limits the number of spawned
|
||||
+# processes or threads. -1 is the default and disables the limit.
|
||||
+#process_count_max: -1
|
||||
+
|
||||
|
||||
##### Logging settings #####
|
||||
##########################################
|
||||
diff --git a/doc/ref/configuration/minion.rst b/doc/ref/configuration/minion.rst
|
||||
index 5c92b932ab..19e9026dd8 100644
|
||||
--- a/doc/ref/configuration/minion.rst
|
||||
+++ b/doc/ref/configuration/minion.rst
|
||||
@@ -2352,6 +2352,23 @@ executed in a thread.
|
||||
|
||||
multiprocessing: True
|
||||
|
||||
+.. conf_minion:: process_count_max
|
||||
+
|
||||
+``process_count_max``
|
||||
+-------
|
||||
+
|
||||
+.. versionadded:: Oxygen
|
||||
+
|
||||
+Default: ``-1``
|
||||
+
|
||||
+Limit the maximum amount of processes or threads created by ``salt-minion``.
|
||||
+This is useful to avoid resource exhaustion in case the minion receives more
|
||||
+publications than it is able to handle, as it limits the number of spawned
|
||||
+processes or threads. ``-1`` is the default and disables the limit.
|
||||
+
|
||||
+.. code-block:: yaml
|
||||
+
|
||||
+ process_count_max: -1
|
||||
|
||||
.. _minion-logging-settings:
|
||||
|
||||
diff --git a/salt/config/__init__.py b/salt/config/__init__.py
|
||||
index 0f06f9ccca..668051a789 100644
|
||||
--- a/salt/config/__init__.py
|
||||
+++ b/salt/config/__init__.py
|
||||
@@ -328,6 +328,9 @@ VALID_OPTS = {
|
||||
# Whether or not processes should be forked when needed. The alternative is to use threading.
|
||||
'multiprocessing': bool,
|
||||
|
||||
+ # Maximum number of concurrently active processes at any given point in time
|
||||
+ 'process_count_max': int,
|
||||
+
|
||||
# Whether or not the salt minion should run scheduled mine updates
|
||||
'mine_enabled': bool,
|
||||
|
||||
@@ -1193,6 +1196,7 @@ DEFAULT_MINION_OPTS = {
|
||||
'auto_accept': True,
|
||||
'autosign_timeout': 120,
|
||||
'multiprocessing': True,
|
||||
+ 'process_count_max': -1,
|
||||
'mine_enabled': True,
|
||||
'mine_return_job': False,
|
||||
'mine_interval': 60,
|
||||
diff --git a/salt/minion.py b/salt/minion.py
|
||||
index 394b11a2e8..33cbb8fa0a 100644
|
||||
--- a/salt/minion.py
|
||||
+++ b/salt/minion.py
|
||||
@@ -1290,6 +1290,7 @@ class Minion(MinionBase):
|
||||
self._send_req_async(load, timeout, callback=lambda f: None) # pylint: disable=unexpected-keyword-arg
|
||||
return True
|
||||
|
||||
+ @tornado.gen.coroutine
|
||||
def _handle_decoded_payload(self, data):
|
||||
'''
|
||||
Override this method if you wish to handle the decoded data
|
||||
@@ -1321,6 +1322,15 @@ class Minion(MinionBase):
|
||||
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
|
||||
self.schedule.functions = self.functions
|
||||
self.schedule.returners = self.returners
|
||||
+
|
||||
+ process_count_max = self.opts.get('process_count_max')
|
||||
+ if process_count_max > 0:
|
||||
+ process_count = len(salt.utils.minion.running(self.opts))
|
||||
+ while process_count >= process_count_max:
|
||||
+ log.warn("Maximum number of processes reached while executing jid {0}, waiting...".format(data['jid']))
|
||||
+ yield tornado.gen.sleep(10)
|
||||
+ process_count = len(salt.utils.minion.running(self.opts))
|
||||
+
|
||||
# We stash an instance references to allow for the socket
|
||||
# communication in Windows. You can't pickle functions, and thus
|
||||
# python needs to be able to reconstruct the reference on the other
|
||||
diff --git a/tests/unit/test_minion.py b/tests/unit/test_minion.py
|
||||
index 535dfeedfc..6c9dca13cd 100644
|
||||
--- a/tests/unit/test_minion.py
|
||||
+++ b/tests/unit/test_minion.py
|
||||
@@ -18,6 +18,7 @@ from salt.utils import event
|
||||
from salt.exceptions import SaltSystemExit
|
||||
import salt.syspaths
|
||||
import tornado
|
||||
+from salt.ext.six.moves import range
|
||||
|
||||
__opts__ = {}
|
||||
|
||||
@@ -69,7 +70,7 @@ class MinionTestCase(TestCase):
|
||||
mock_jid_queue = [123]
|
||||
try:
|
||||
minion = salt.minion.Minion(mock_opts, jid_queue=copy.copy(mock_jid_queue), io_loop=tornado.ioloop.IOLoop())
|
||||
- ret = minion._handle_decoded_payload(mock_data)
|
||||
+ ret = minion._handle_decoded_payload(mock_data).result()
|
||||
self.assertEqual(minion.jid_queue, mock_jid_queue)
|
||||
self.assertIsNone(ret)
|
||||
finally:
|
||||
@@ -98,7 +99,7 @@ class MinionTestCase(TestCase):
|
||||
# Call the _handle_decoded_payload function and update the mock_jid_queue to include the new
|
||||
# mock_jid. The mock_jid should have been added to the jid_queue since the mock_jid wasn't
|
||||
# previously included. The minion's jid_queue attribute and the mock_jid_queue should be equal.
|
||||
- minion._handle_decoded_payload(mock_data)
|
||||
+ minion._handle_decoded_payload(mock_data).result()
|
||||
mock_jid_queue.append(mock_jid)
|
||||
self.assertEqual(minion.jid_queue, mock_jid_queue)
|
||||
finally:
|
||||
@@ -126,8 +127,54 @@ class MinionTestCase(TestCase):
|
||||
|
||||
# Call the _handle_decoded_payload function and check that the queue is smaller by one item
|
||||
# and contains the new jid
|
||||
- minion._handle_decoded_payload(mock_data)
|
||||
+ minion._handle_decoded_payload(mock_data).result()
|
||||
self.assertEqual(len(minion.jid_queue), 2)
|
||||
self.assertEqual(minion.jid_queue, [456, 789])
|
||||
finally:
|
||||
minion.destroy()
|
||||
+
|
||||
+ def test_process_count_max(self):
|
||||
+ '''
|
||||
+ Tests that the _handle_decoded_payload function does not spawn more than the configured amount of processes,
|
||||
+ as per process_count_max.
|
||||
+ '''
|
||||
+ with patch('salt.minion.Minion.ctx', MagicMock(return_value={})), \
|
||||
+ patch('salt.utils.process.SignalHandlingMultiprocessingProcess.start', MagicMock(return_value=True)), \
|
||||
+ patch('salt.utils.process.SignalHandlingMultiprocessingProcess.join', MagicMock(return_value=True)), \
|
||||
+ patch('salt.utils.minion.running', MagicMock(return_value=[])), \
|
||||
+ patch('tornado.gen.sleep', MagicMock(return_value=tornado.concurrent.Future())):
|
||||
+ process_count_max = 10
|
||||
+ mock_opts = salt.config.DEFAULT_MINION_OPTS
|
||||
+ mock_opts['minion_jid_queue_hwm'] = 100
|
||||
+ mock_opts["process_count_max"] = process_count_max
|
||||
+
|
||||
+ try:
|
||||
+ io_loop = tornado.ioloop.IOLoop()
|
||||
+ minion = salt.minion.Minion(mock_opts, jid_queue=[], io_loop=io_loop)
|
||||
+
|
||||
+ # mock gen.sleep to throw a special Exception when called, so that we detect it
|
||||
+ class SleepCalledEception(Exception):
|
||||
+ """Thrown when sleep is called"""
|
||||
+ pass
|
||||
+ tornado.gen.sleep.return_value.set_exception(SleepCalledEception())
|
||||
+
|
||||
+ # up until process_count_max: gen.sleep does not get called, processes are started normally
|
||||
+ for i in range(process_count_max):
|
||||
+ mock_data = {'fun': 'foo.bar',
|
||||
+ 'jid': i}
|
||||
+ io_loop.run_sync(lambda data=mock_data: minion._handle_decoded_payload(data))
|
||||
+ self.assertEqual(salt.utils.process.SignalHandlingMultiprocessingProcess.start.call_count, i + 1)
|
||||
+ self.assertEqual(len(minion.jid_queue), i + 1)
|
||||
+ salt.utils.minion.running.return_value += [i]
|
||||
+
|
||||
+ # above process_count_max: gen.sleep does get called, JIDs are created but no new processes are started
|
||||
+ mock_data = {'fun': 'foo.bar',
|
||||
+ 'jid': process_count_max + 1}
|
||||
+
|
||||
+ self.assertRaises(SleepCalledEception,
|
||||
+ lambda: io_loop.run_sync(lambda: minion._handle_decoded_payload(mock_data)))
|
||||
+ self.assertEqual(salt.utils.process.SignalHandlingMultiprocessingProcess.start.call_count,
|
||||
+ process_count_max)
|
||||
+ self.assertEqual(len(minion.jid_queue), process_count_max + 1)
|
||||
+ finally:
|
||||
+ minion.destroy()
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,792 +0,0 @@
|
||||
From ca7031e5223bb3bd35c31211d29177f05ed5e304 Mon Sep 17 00:00:00 2001
|
||||
From: Silvio Moioli <smoioli@suse.de>
|
||||
Date: Thu, 13 Jul 2017 15:59:01 +0200
|
||||
Subject: [PATCH] list_pkgs: add parameter for returned attribute
|
||||
selection (bsc#1052264)
|
||||
|
||||
zypper.list_pkgs:
|
||||
* It adds a new optional parameter to list_pkg in the zypper module to return more data than the version (original reason is that for SUSE Manager integration we also need arch and install_date). Format is the same of existing method info_installed.
|
||||
|
||||
yumpkg.list_pkgs:
|
||||
* It adds a new optional parameter to list_pkg, originally added to the the zypper module via PR #42310, to yumpkg providing the same functionality and interface to the yum package manager.
|
||||
---
|
||||
salt/modules/pkg_resource.py | 42 +++++++++
|
||||
salt/modules/yumpkg.py | 102 +++++++++++++++++-----
|
||||
salt/modules/zypper.py | 94 ++++++++++++++------
|
||||
salt/utils/pkg/rpm.py | 22 +++--
|
||||
tests/unit/modules/test_yumpkg.py | 174 ++++++++++++++++++++++++++++++++++++++
|
||||
tests/unit/modules/test_zypper.py | 105 ++++++++++++++++++-----
|
||||
6 files changed, 466 insertions(+), 73 deletions(-)
|
||||
create mode 100644 tests/unit/modules/test_yumpkg.py
|
||||
|
||||
diff --git a/salt/modules/pkg_resource.py b/salt/modules/pkg_resource.py
|
||||
index 928dccae7d..a9f396b212 100644
|
||||
--- a/salt/modules/pkg_resource.py
|
||||
+++ b/salt/modules/pkg_resource.py
|
||||
@@ -5,6 +5,7 @@ Resources needed by pkg providers
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
+import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
@@ -306,3 +307,44 @@ def check_extra_requirements(pkgname, pkgver):
|
||||
return __salt__['pkg.check_extra_requirements'](pkgname, pkgver)
|
||||
|
||||
return True
|
||||
+
|
||||
+
|
||||
+def format_pkg_list(packages, versions_as_list, attr):
|
||||
+ '''
|
||||
+ Formats packages according to parameters for list_pkgs.
|
||||
+ '''
|
||||
+ ret = copy.deepcopy(packages)
|
||||
+ if attr:
|
||||
+ requested_attr = set(['epoch', 'version', 'release', 'arch',
|
||||
+ 'install_date', 'install_date_time_t'])
|
||||
+
|
||||
+ if attr != 'all':
|
||||
+ requested_attr &= set(attr + ['version'])
|
||||
+
|
||||
+ for name in ret:
|
||||
+ versions = []
|
||||
+ for all_attr in ret[name]:
|
||||
+ filtered_attr = {}
|
||||
+ for key in requested_attr:
|
||||
+ if all_attr[key]:
|
||||
+ filtered_attr[key] = all_attr[key]
|
||||
+ versions.append(filtered_attr)
|
||||
+ ret[name] = versions
|
||||
+ return ret
|
||||
+
|
||||
+ for name in ret:
|
||||
+ ret[name] = [format_version(d['epoch'], d['version'], d['release'])
|
||||
+ for d in ret[name]]
|
||||
+ if not versions_as_list:
|
||||
+ stringify(ret)
|
||||
+ return ret
|
||||
+
|
||||
+
|
||||
+def format_version(epoch, version, release):
|
||||
+ '''
|
||||
+ Formats a version string for list_pkgs.
|
||||
+ '''
|
||||
+ full_version = '{0}:{1}'.format(epoch, version) if epoch else version
|
||||
+ if release:
|
||||
+ full_version += '-{0}'.format(release)
|
||||
+ return full_version
|
||||
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
|
||||
index 0af6a811f4..8b63bff4a3 100644
|
||||
--- a/salt/modules/yumpkg.py
|
||||
+++ b/salt/modules/yumpkg.py
|
||||
@@ -17,7 +17,6 @@ Support for YUM/DNF
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import contextlib
|
||||
-import copy
|
||||
import datetime
|
||||
import fnmatch
|
||||
import itertools
|
||||
@@ -595,15 +594,35 @@ def version_cmp(pkg1, pkg2, ignore_epoch=False):
|
||||
|
||||
def list_pkgs(versions_as_list=False, **kwargs):
|
||||
'''
|
||||
- List the packages currently installed in a dict::
|
||||
+ List the packages currently installed as a dict. By default, the dict
|
||||
+ contains versions as a comma separated string::
|
||||
|
||||
- {'<package_name>': '<version>'}
|
||||
+ {'<package_name>': '<version>[,<version>...]'}
|
||||
+
|
||||
+ versions_as_list:
|
||||
+ If set to true, the versions are provided as a list
|
||||
+
|
||||
+ {'<package_name>': ['<version>', '<version>']}
|
||||
+
|
||||
+ attr:
|
||||
+ If a list of package attributes is specified, returned value will
|
||||
+ contain them in addition to version, eg.::
|
||||
+
|
||||
+ {'<package_name>': [{'version' : 'version', 'arch' : 'arch'}]}
|
||||
+
|
||||
+ Valid attributes are: ``epoch``, ``version``, ``release``, ``arch``,
|
||||
+ ``install_date``, ``install_date_time_t``.
|
||||
+
|
||||
+ If ``all`` is specified, all valid attributes will be returned.
|
||||
+
|
||||
+ .. versionadded:: Oxygen
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pkg.list_pkgs
|
||||
+ salt '*' pkg.list_pkgs attr='["version", "arch"]'
|
||||
'''
|
||||
versions_as_list = salt.utils.is_true(versions_as_list)
|
||||
# not yet implemented or not applicable
|
||||
@@ -611,17 +630,14 @@ def list_pkgs(versions_as_list=False, **kwargs):
|
||||
for x in ('removed', 'purge_desired')]):
|
||||
return {}
|
||||
|
||||
+ attr = kwargs.get("attr")
|
||||
if 'pkg.list_pkgs' in __context__:
|
||||
- if versions_as_list:
|
||||
- return __context__['pkg.list_pkgs']
|
||||
- else:
|
||||
- ret = copy.deepcopy(__context__['pkg.list_pkgs'])
|
||||
- __salt__['pkg_resource.stringify'](ret)
|
||||
- return ret
|
||||
+ cached = __context__['pkg.list_pkgs']
|
||||
+ return __salt__['pkg_resource.format_pkg_list'](cached, versions_as_list, attr)
|
||||
|
||||
ret = {}
|
||||
cmd = ['rpm', '-qa', '--queryformat',
|
||||
- salt.utils.pkg.rpm.QUERYFORMAT.replace('%{REPOID}', '(none)\n')]
|
||||
+ salt.utils.pkg.rpm.QUERYFORMAT.replace('%{REPOID}', '(none)') + '\n']
|
||||
output = __salt__['cmd.run'](cmd,
|
||||
python_shell=False,
|
||||
output_loglevel='trace')
|
||||
@@ -631,15 +647,25 @@ def list_pkgs(versions_as_list=False, **kwargs):
|
||||
osarch=__grains__['osarch']
|
||||
)
|
||||
if pkginfo is not None:
|
||||
- __salt__['pkg_resource.add_pkg'](ret,
|
||||
- pkginfo.name,
|
||||
- pkginfo.version)
|
||||
-
|
||||
- __salt__['pkg_resource.sort_pkglist'](ret)
|
||||
- __context__['pkg.list_pkgs'] = copy.deepcopy(ret)
|
||||
- if not versions_as_list:
|
||||
- __salt__['pkg_resource.stringify'](ret)
|
||||
- return ret
|
||||
+ # see rpm version string rules available at https://goo.gl/UGKPNd
|
||||
+ pkgver = pkginfo.version
|
||||
+ epoch = ''
|
||||
+ release = ''
|
||||
+ if ':' in pkgver:
|
||||
+ epoch, pkgver = pkgver.split(":", 1)
|
||||
+ if '-' in pkgver:
|
||||
+ pkgver, release = pkgver.split("-", 1)
|
||||
+ all_attr = {'epoch': epoch, 'version': pkgver, 'release': release,
|
||||
+ 'arch': pkginfo.arch, 'install_date': pkginfo.install_date,
|
||||
+ 'install_date_time_t': pkginfo.install_date_time_t}
|
||||
+ __salt__['pkg_resource.add_pkg'](ret, pkginfo.name, all_attr)
|
||||
+
|
||||
+ for pkgname in ret:
|
||||
+ ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version'])
|
||||
+
|
||||
+ __context__['pkg.list_pkgs'] = ret
|
||||
+
|
||||
+ return __salt__['pkg_resource.format_pkg_list'](ret, versions_as_list, attr)
|
||||
|
||||
|
||||
def list_repo_pkgs(*args, **kwargs):
|
||||
@@ -1221,11 +1247,42 @@ def install(name=None,
|
||||
|
||||
.. versionadded:: 2014.7.0
|
||||
|
||||
+ diff_attr:
|
||||
+ If a list of package attributes is specified, returned value will
|
||||
+ contain them, eg.::
|
||||
+
|
||||
+ {'<package>': {
|
||||
+ 'old': {
|
||||
+ 'version': '<old-version>',
|
||||
+ 'arch': '<old-arch>'},
|
||||
+
|
||||
+ 'new': {
|
||||
+ 'version': '<new-version>',
|
||||
+ 'arch': '<new-arch>'}}}
|
||||
+
|
||||
+ Valid attributes are: ``epoch``, ``version``, ``release``, ``arch``,
|
||||
+ ``install_date``, ``install_date_time_t``.
|
||||
+
|
||||
+ If ``all`` is specified, all valid attributes will be returned.
|
||||
+
|
||||
+ .. versionadded:: Oxygen
|
||||
|
||||
Returns a dict containing the new package names and versions::
|
||||
|
||||
{'<package>': {'old': '<old-version>',
|
||||
'new': '<new-version>'}}
|
||||
+
|
||||
+ If an attribute list in diff_attr is specified, the dict will also contain
|
||||
+ any specified attribute, eg.::
|
||||
+
|
||||
+ {'<package>': {
|
||||
+ 'old': {
|
||||
+ 'version': '<old-version>',
|
||||
+ 'arch': '<old-arch>'},
|
||||
+
|
||||
+ 'new': {
|
||||
+ 'version': '<new-version>',
|
||||
+ 'arch': '<new-arch>'}}}
|
||||
'''
|
||||
repo_arg = _get_repo_options(**kwargs)
|
||||
exclude_arg = _get_excludes_option(**kwargs)
|
||||
@@ -1254,10 +1311,11 @@ def install(name=None,
|
||||
log.warning('"version" parameter will be ignored for multiple '
|
||||
'package targets')
|
||||
|
||||
- old = list_pkgs(versions_as_list=False) if not downloadonly else list_downloaded()
|
||||
+ diff_attr = kwargs.get("diff_attr")
|
||||
+ old = list_pkgs(versions_as_list=False, attr=diff_attr) if not downloadonly else list_downloaded()
|
||||
# Use of __context__ means no duplicate work here, just accessing
|
||||
# information already in __context__ from the previous call to list_pkgs()
|
||||
- old_as_list = list_pkgs(versions_as_list=True) if not downloadonly else list_downloaded()
|
||||
+ old_as_list = list_pkgs(versions_as_list=True, attr=diff_attr) if not downloadonly else list_downloaded()
|
||||
|
||||
to_install = []
|
||||
to_downgrade = []
|
||||
@@ -1560,7 +1618,7 @@ def install(name=None,
|
||||
errors.append(out['stdout'])
|
||||
|
||||
__context__.pop('pkg.list_pkgs', None)
|
||||
- new = list_pkgs(versions_as_list=False) if not downloadonly else list_downloaded()
|
||||
+ new = list_pkgs(versions_as_list=False, attr=diff_attr) if not downloadonly else list_downloaded()
|
||||
|
||||
ret = salt.utils.compare_dicts(old, new)
|
||||
|
||||
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
|
||||
index 4ede437c30..b440af08a4 100644
|
||||
--- a/salt/modules/zypper.py
|
||||
+++ b/salt/modules/zypper.py
|
||||
@@ -14,7 +14,6 @@ Package support for openSUSE via the zypper package manager
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
-import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import re
|
||||
@@ -652,8 +651,8 @@ def version_cmp(ver1, ver2, ignore_epoch=False):
|
||||
|
||||
def list_pkgs(versions_as_list=False, **kwargs):
|
||||
'''
|
||||
- List the packages currently installed as a dict with versions
|
||||
- as a comma separated string::
|
||||
+ List the packages currently installed as a dict. By default, the dict
|
||||
+ contains versions as a comma separated string::
|
||||
|
||||
{'<package_name>': '<version>[,<version>...]'}
|
||||
|
||||
@@ -662,6 +661,19 @@ def list_pkgs(versions_as_list=False, **kwargs):
|
||||
|
||||
{'<package_name>': ['<version>', '<version>']}
|
||||
|
||||
+ attr:
|
||||
+ If a list of package attributes is specified, returned value will
|
||||
+ contain them in addition to version, eg.::
|
||||
+
|
||||
+ {'<package_name>': [{'version' : 'version', 'arch' : 'arch'}]}
|
||||
+
|
||||
+ Valid attributes are: ``epoch``, ``version``, ``release``, ``arch``,
|
||||
+ ``install_date``, ``install_date_time_t``.
|
||||
+
|
||||
+ If ``all`` is specified, all valid attributes will be returned.
|
||||
+
|
||||
+ .. versionadded:: Oxygen
|
||||
+
|
||||
removed:
|
||||
not supported
|
||||
|
||||
@@ -673,6 +685,7 @@ def list_pkgs(versions_as_list=False, **kwargs):
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pkg.list_pkgs
|
||||
+ salt '*' pkg.list_pkgs attr='["version", "arch"]'
|
||||
'''
|
||||
versions_as_list = salt.utils.is_true(versions_as_list)
|
||||
# not yet implemented or not applicable
|
||||
@@ -680,30 +693,30 @@ def list_pkgs(versions_as_list=False, **kwargs):
|
||||
for x in ('removed', 'purge_desired')]):
|
||||
return {}
|
||||
|
||||
+ attr = kwargs.get("attr")
|
||||
if 'pkg.list_pkgs' in __context__:
|
||||
- if versions_as_list:
|
||||
- return __context__['pkg.list_pkgs']
|
||||
- else:
|
||||
- ret = copy.deepcopy(__context__['pkg.list_pkgs'])
|
||||
- __salt__['pkg_resource.stringify'](ret)
|
||||
- return ret
|
||||
+ cached = __context__['pkg.list_pkgs']
|
||||
+ return __salt__['pkg_resource.format_pkg_list'](cached, versions_as_list, attr)
|
||||
|
||||
- cmd = ['rpm', '-qa', '--queryformat', '%{NAME}_|-%{VERSION}_|-%{RELEASE}_|-%|EPOCH?{%{EPOCH}}:{}|\\n']
|
||||
+ cmd = ['rpm', '-qa', '--queryformat', (
|
||||
+ "%{NAME}_|-%{VERSION}_|-%{RELEASE}_|-%{ARCH}_|-"
|
||||
+ "%|EPOCH?{%{EPOCH}}:{}|_|-%{INSTALLTIME}\\n")]
|
||||
ret = {}
|
||||
for line in __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False).splitlines():
|
||||
- name, pkgver, rel, epoch = line.split('_|-')
|
||||
- if epoch:
|
||||
- pkgver = '{0}:{1}'.format(epoch, pkgver)
|
||||
- if rel:
|
||||
- pkgver += '-{0}'.format(rel)
|
||||
- __salt__['pkg_resource.add_pkg'](ret, name, pkgver)
|
||||
-
|
||||
- __salt__['pkg_resource.sort_pkglist'](ret)
|
||||
- __context__['pkg.list_pkgs'] = copy.deepcopy(ret)
|
||||
- if not versions_as_list:
|
||||
- __salt__['pkg_resource.stringify'](ret)
|
||||
+ name, pkgver, rel, arch, epoch, install_time = line.split('_|-')
|
||||
+ install_date = datetime.datetime.utcfromtimestamp(int(install_time)).isoformat() + "Z"
|
||||
+ install_date_time_t = int(install_time)
|
||||
|
||||
- return ret
|
||||
+ all_attr = {'epoch': epoch, 'version': pkgver, 'release': rel, 'arch': arch,
|
||||
+ 'install_date': install_date, 'install_date_time_t': install_date_time_t}
|
||||
+ __salt__['pkg_resource.add_pkg'](ret, name, all_attr)
|
||||
+
|
||||
+ for pkgname in ret:
|
||||
+ ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version'])
|
||||
+
|
||||
+ __context__['pkg.list_pkgs'] = ret
|
||||
+
|
||||
+ return __salt__['pkg_resource.format_pkg_list'](ret, versions_as_list, attr)
|
||||
|
||||
|
||||
def _get_configured_repos():
|
||||
@@ -1069,11 +1082,43 @@ def install(name=None,
|
||||
Zypper returns error code 106 if one of the repositories are not available for various reasons.
|
||||
In case to set strict check, this parameter needs to be set to True. Default: False.
|
||||
|
||||
+ diff_attr:
|
||||
+ If a list of package attributes is specified, returned value will
|
||||
+ contain them, eg.::
|
||||
+
|
||||
+ {'<package>': {
|
||||
+ 'old': {
|
||||
+ 'version': '<old-version>',
|
||||
+ 'arch': '<old-arch>'},
|
||||
+
|
||||
+ 'new': {
|
||||
+ 'version': '<new-version>',
|
||||
+ 'arch': '<new-arch>'}}}
|
||||
+
|
||||
+ Valid attributes are: ``epoch``, ``version``, ``release``, ``arch``,
|
||||
+ ``install_date``, ``install_date_time_t``.
|
||||
+
|
||||
+ If ``all`` is specified, all valid attributes will be returned.
|
||||
+
|
||||
+ .. versionadded:: Oxygen
|
||||
+
|
||||
|
||||
Returns a dict containing the new package names and versions::
|
||||
|
||||
{'<package>': {'old': '<old-version>',
|
||||
'new': '<new-version>'}}
|
||||
+
|
||||
+ If an attribute list is specified in ``diff_attr``, the dict will also contain
|
||||
+ any specified attribute, eg.::
|
||||
+
|
||||
+ {'<package>': {
|
||||
+ 'old': {
|
||||
+ 'version': '<old-version>',
|
||||
+ 'arch': '<old-arch>'},
|
||||
+
|
||||
+ 'new': {
|
||||
+ 'version': '<new-version>',
|
||||
+ 'arch': '<new-arch>'}}}
|
||||
'''
|
||||
if refresh:
|
||||
refresh_db()
|
||||
@@ -1117,7 +1162,8 @@ def install(name=None,
|
||||
else:
|
||||
targets = pkg_params
|
||||
|
||||
- old = list_pkgs() if not downloadonly else list_downloaded()
|
||||
+ diff_attr = kwargs.get("diff_attr")
|
||||
+ old = list_pkgs(attr=diff_attr) if not downloadonly else list_downloaded()
|
||||
downgrades = []
|
||||
if fromrepo:
|
||||
fromrepoopt = ['--force', '--force-resolution', '--from', fromrepo]
|
||||
@@ -1155,7 +1201,7 @@ def install(name=None,
|
||||
__zypper__(no_repo_failure=ignore_repo_failure).call(*cmd)
|
||||
|
||||
__context__.pop('pkg.list_pkgs', None)
|
||||
- new = list_pkgs() if not downloadonly else list_downloaded()
|
||||
+ new = list_pkgs(attr=diff_attr) if not downloadonly else list_downloaded()
|
||||
|
||||
# Handle packages which report multiple new versions
|
||||
# (affects only kernel packages at this point)
|
||||
diff --git a/salt/utils/pkg/rpm.py b/salt/utils/pkg/rpm.py
|
||||
index 0d5c21a82f..7ac7db6316 100644
|
||||
--- a/salt/utils/pkg/rpm.py
|
||||
+++ b/salt/utils/pkg/rpm.py
|
||||
@@ -6,6 +6,7 @@ Common functions for working with RPM packages
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import collections
|
||||
+import datetime
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
@@ -34,7 +35,7 @@ ARCHES = ARCHES_64 + ARCHES_32 + ARCHES_PPC + ARCHES_S390 + \
|
||||
ARCHES_ALPHA + ARCHES_ARM + ARCHES_SH
|
||||
|
||||
# EPOCHNUM can't be used until RHEL5 is EOL as it is not present
|
||||
-QUERYFORMAT = '%{NAME}_|-%{EPOCH}_|-%{VERSION}_|-%{RELEASE}_|-%{ARCH}_|-%{REPOID}'
|
||||
+QUERYFORMAT = '%{NAME}_|-%{EPOCH}_|-%{VERSION}_|-%{RELEASE}_|-%{ARCH}_|-%{REPOID}_|-%{INSTALLTIME}'
|
||||
|
||||
|
||||
def get_osarch():
|
||||
@@ -59,15 +60,17 @@ def check_32(arch, osarch=None):
|
||||
return all(x in ARCHES_32 for x in (osarch, arch))
|
||||
|
||||
|
||||
-def pkginfo(name, version, arch, repoid):
|
||||
+def pkginfo(name, version, arch, repoid, install_date=None, install_date_time_t=None):
|
||||
'''
|
||||
Build and return a pkginfo namedtuple
|
||||
'''
|
||||
pkginfo_tuple = collections.namedtuple(
|
||||
'PkgInfo',
|
||||
- ('name', 'version', 'arch', 'repoid')
|
||||
+ ('name', 'version', 'arch', 'repoid', 'install_date',
|
||||
+ 'install_date_time_t')
|
||||
)
|
||||
- return pkginfo_tuple(name, version, arch, repoid)
|
||||
+ return pkginfo_tuple(name, version, arch, repoid, install_date,
|
||||
+ install_date_time_t)
|
||||
|
||||
|
||||
def resolve_name(name, arch, osarch=None):
|
||||
@@ -89,7 +92,7 @@ def parse_pkginfo(line, osarch=None):
|
||||
pkginfo namedtuple.
|
||||
'''
|
||||
try:
|
||||
- name, epoch, version, release, arch, repoid = line.split('_|-')
|
||||
+ name, epoch, version, release, arch, repoid, install_time = line.split('_|-')
|
||||
# Handle unpack errors (should never happen with the queryformat we are
|
||||
# using, but can't hurt to be careful).
|
||||
except ValueError:
|
||||
@@ -101,7 +104,14 @@ def parse_pkginfo(line, osarch=None):
|
||||
if epoch not in ('(none)', '0'):
|
||||
version = ':'.join((epoch, version))
|
||||
|
||||
- return pkginfo(name, version, arch, repoid)
|
||||
+ if install_time not in ('(none)', '0'):
|
||||
+ install_date = datetime.datetime.utcfromtimestamp(int(install_time)).isoformat() + "Z"
|
||||
+ install_date_time_t = int(install_time)
|
||||
+ else:
|
||||
+ install_date = None
|
||||
+ install_date_time_t = None
|
||||
+
|
||||
+ return pkginfo(name, version, arch, repoid, install_date, install_date_time_t)
|
||||
|
||||
|
||||
def combine_comments(comments):
|
||||
diff --git a/tests/unit/modules/test_yumpkg.py b/tests/unit/modules/test_yumpkg.py
|
||||
new file mode 100644
|
||||
index 0000000000..cf754d6289
|
||||
--- /dev/null
|
||||
+++ b/tests/unit/modules/test_yumpkg.py
|
||||
@@ -0,0 +1,174 @@
|
||||
+# -*- coding: utf-8 -*-
|
||||
+
|
||||
+# Import Python Libs
|
||||
+from __future__ import absolute_import
|
||||
+import os
|
||||
+
|
||||
+# Import Salt Testing Libs
|
||||
+from tests.support.mixins import LoaderModuleMockMixin
|
||||
+from tests.support.unit import TestCase, skipIf
|
||||
+from tests.support.mock import (
|
||||
+ MagicMock,
|
||||
+ patch,
|
||||
+ NO_MOCK,
|
||||
+ NO_MOCK_REASON
|
||||
+)
|
||||
+
|
||||
+# Import Salt libs
|
||||
+import salt.modules.yumpkg as yumpkg
|
||||
+import salt.modules.pkg_resource as pkg_resource
|
||||
+
|
||||
+
|
||||
+@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
+class YumTestCase(TestCase, LoaderModuleMockMixin):
|
||||
+ '''
|
||||
+ Test cases for salt.modules.yumpkg
|
||||
+ '''
|
||||
+ def setup_loader_modules(self):
|
||||
+ return {yumpkg: {'rpm': None}}
|
||||
+
|
||||
+ def test_list_pkgs(self):
|
||||
+ '''
|
||||
+ Test packages listing.
|
||||
+
|
||||
+ :return:
|
||||
+ '''
|
||||
+ def _add_data(data, key, value):
|
||||
+ data.setdefault(key, []).append(value)
|
||||
+
|
||||
+ rpm_out = [
|
||||
+ 'python-urlgrabber_|-(none)_|-3.10_|-8.el7_|-noarch_|-(none)_|-1487838471',
|
||||
+ 'alsa-lib_|-(none)_|-1.1.1_|-1.el7_|-x86_64_|-(none)_|-1487838475',
|
||||
+ 'gnupg2_|-(none)_|-2.0.22_|-4.el7_|-x86_64_|-(none)_|-1487838477',
|
||||
+ 'rpm-python_|-(none)_|-4.11.3_|-21.el7_|-x86_64_|-(none)_|-1487838477',
|
||||
+ 'pygpgme_|-(none)_|-0.3_|-9.el7_|-x86_64_|-(none)_|-1487838478',
|
||||
+ 'yum_|-(none)_|-3.4.3_|-150.el7.centos_|-noarch_|-(none)_|-1487838479',
|
||||
+ 'lzo_|-(none)_|-2.06_|-8.el7_|-x86_64_|-(none)_|-1487838479',
|
||||
+ 'qrencode-libs_|-(none)_|-3.4.1_|-3.el7_|-x86_64_|-(none)_|-1487838480',
|
||||
+ 'ustr_|-(none)_|-1.0.4_|-16.el7_|-x86_64_|-(none)_|-1487838480',
|
||||
+ 'shadow-utils_|-2_|-4.1.5.1_|-24.el7_|-x86_64_|-(none)_|-1487838481',
|
||||
+ 'util-linux_|-(none)_|-2.23.2_|-33.el7_|-x86_64_|-(none)_|-1487838484',
|
||||
+ 'openssh_|-(none)_|-6.6.1p1_|-33.el7_3_|-x86_64_|-(none)_|-1487838485',
|
||||
+ 'virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486',
|
||||
+ ]
|
||||
+ with patch.dict(yumpkg.__grains__, {'osarch': 'x86_64'}), \
|
||||
+ patch.dict(yumpkg.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
|
||||
+ patch.dict(yumpkg.__salt__, {'pkg_resource.add_pkg': _add_data}), \
|
||||
+ patch.dict(yumpkg.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
|
||||
+ patch.dict(yumpkg.__salt__, {'pkg_resource.stringify': MagicMock()}):
|
||||
+ pkgs = yumpkg.list_pkgs(versions_as_list=True)
|
||||
+ for pkg_name, pkg_version in {
|
||||
+ 'python-urlgrabber': '3.10-8.el7',
|
||||
+ 'alsa-lib': '1.1.1-1.el7',
|
||||
+ 'gnupg2': '2.0.22-4.el7',
|
||||
+ 'rpm-python': '4.11.3-21.el7',
|
||||
+ 'pygpgme': '0.3-9.el7',
|
||||
+ 'yum': '3.4.3-150.el7.centos',
|
||||
+ 'lzo': '2.06-8.el7',
|
||||
+ 'qrencode-libs': '3.4.1-3.el7',
|
||||
+ 'ustr': '1.0.4-16.el7',
|
||||
+ 'shadow-utils': '2:4.1.5.1-24.el7',
|
||||
+ 'util-linux': '2.23.2-33.el7',
|
||||
+ 'openssh': '6.6.1p1-33.el7_3',
|
||||
+ 'virt-what': '1.13-8.el7'}.items():
|
||||
+ self.assertTrue(pkgs.get(pkg_name))
|
||||
+ self.assertEqual(pkgs[pkg_name], [pkg_version])
|
||||
+
|
||||
+ def test_list_pkgs_with_attr(self):
|
||||
+ '''
|
||||
+ Test packages listing with the attr parameter
|
||||
+
|
||||
+ :return:
|
||||
+ '''
|
||||
+ def _add_data(data, key, value):
|
||||
+ data.setdefault(key, []).append(value)
|
||||
+
|
||||
+ rpm_out = [
|
||||
+ 'python-urlgrabber_|-(none)_|-3.10_|-8.el7_|-noarch_|-(none)_|-1487838471',
|
||||
+ 'alsa-lib_|-(none)_|-1.1.1_|-1.el7_|-x86_64_|-(none)_|-1487838475',
|
||||
+ 'gnupg2_|-(none)_|-2.0.22_|-4.el7_|-x86_64_|-(none)_|-1487838477',
|
||||
+ 'rpm-python_|-(none)_|-4.11.3_|-21.el7_|-x86_64_|-(none)_|-1487838477',
|
||||
+ 'pygpgme_|-(none)_|-0.3_|-9.el7_|-x86_64_|-(none)_|-1487838478',
|
||||
+ 'yum_|-(none)_|-3.4.3_|-150.el7.centos_|-noarch_|-(none)_|-1487838479',
|
||||
+ 'lzo_|-(none)_|-2.06_|-8.el7_|-x86_64_|-(none)_|-1487838479',
|
||||
+ 'qrencode-libs_|-(none)_|-3.4.1_|-3.el7_|-x86_64_|-(none)_|-1487838480',
|
||||
+ 'ustr_|-(none)_|-1.0.4_|-16.el7_|-x86_64_|-(none)_|-1487838480',
|
||||
+ 'shadow-utils_|-2_|-4.1.5.1_|-24.el7_|-x86_64_|-(none)_|-1487838481',
|
||||
+ 'util-linux_|-(none)_|-2.23.2_|-33.el7_|-x86_64_|-(none)_|-1487838484',
|
||||
+ 'openssh_|-(none)_|-6.6.1p1_|-33.el7_3_|-x86_64_|-(none)_|-1487838485',
|
||||
+ 'virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486',
|
||||
+ ]
|
||||
+ with patch.dict(yumpkg.__grains__, {'osarch': 'x86_64'}), \
|
||||
+ patch.dict(yumpkg.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
|
||||
+ patch.dict(yumpkg.__salt__, {'pkg_resource.add_pkg': _add_data}), \
|
||||
+ patch.dict(yumpkg.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
|
||||
+ patch.dict(yumpkg.__salt__, {'pkg_resource.stringify': MagicMock()}):
|
||||
+ pkgs = yumpkg.list_pkgs(attr=['arch', 'install_date_time_t'])
|
||||
+ for pkg_name, pkg_attr in {
|
||||
+ 'python-urlgrabber': {
|
||||
+ 'version': '3.10-8.el7',
|
||||
+ 'arch': 'noarch',
|
||||
+ 'install_date_time_t': 1487838471,
|
||||
+ },
|
||||
+ 'alsa-lib': {
|
||||
+ 'version': '1.1.1-1.el7',
|
||||
+ 'arch': 'x86_64',
|
||||
+ 'install_date_time_t': 1487838475,
|
||||
+ },
|
||||
+ 'gnupg2': {
|
||||
+ 'version': '2.0.22-4.el7',
|
||||
+ 'arch': 'x86_64',
|
||||
+ 'install_date_time_t': 1487838477,
|
||||
+ },
|
||||
+ 'rpm-python': {
|
||||
+ 'version': '4.11.3-21.el7',
|
||||
+ 'arch': 'x86_64',
|
||||
+ 'install_date_time_t': 1487838477,
|
||||
+ },
|
||||
+ 'pygpgme': {
|
||||
+ 'version': '0.3-9.el7',
|
||||
+ 'arch': 'x86_64',
|
||||
+ 'install_date_time_t': 1487838478,
|
||||
+ },
|
||||
+ 'yum': {
|
||||
+ 'version': '3.4.3-150.el7.centos',
|
||||
+ 'arch': 'noarch',
|
||||
+ 'install_date_time_t': 1487838479,
|
||||
+ },
|
||||
+ 'lzo': {
|
||||
+ 'version': '2.06-8.el7',
|
||||
+ 'arch': 'x86_64',
|
||||
+ 'install_date_time_t': 1487838479,
|
||||
+ },
|
||||
+ 'qrencode-libs': {
|
||||
+ 'version': '3.4.1-3.el7',
|
||||
+ 'arch': 'x86_64',
|
||||
+ 'install_date_time_t': 1487838480,
|
||||
+ },
|
||||
+ 'ustr': {
|
||||
+ 'version': '1.0.4-16.el7',
|
||||
+ 'arch': 'x86_64',
|
||||
+ 'install_date_time_t': 1487838480,
|
||||
+ },
|
||||
+ 'shadow-utils': {
|
||||
+ 'version': '2:4.1.5.1-24.el7',
|
||||
+ 'arch': 'x86_64',
|
||||
+ 'install_date_time_t': 1487838481,
|
||||
+ },
|
||||
+ 'util-linux': {
|
||||
+ 'version': '2.23.2-33.el7',
|
||||
+ 'arch': 'x86_64',
|
||||
+ 'install_date_time_t': 1487838484,
|
||||
+ },
|
||||
+ 'openssh': {
|
||||
+ 'version': '6.6.1p1-33.el7_3',
|
||||
+ 'arch': 'x86_64',
|
||||
+ 'install_date_time_t': 1487838485,
|
||||
+ },
|
||||
+ 'virt-what': {
|
||||
+ 'version': '1.13-8.el7',
|
||||
+ 'arch': 'x86_64',
|
||||
+ 'install_date_time_t': 1487838486,
|
||||
+ }}.items():
|
||||
+ self.assertTrue(pkgs.get(pkg_name))
|
||||
+ self.assertEqual(pkgs[pkg_name], [pkg_attr])
|
||||
diff --git a/tests/unit/modules/test_zypper.py b/tests/unit/modules/test_zypper.py
|
||||
index f3403e6e1c..41f3845646 100644
|
||||
--- a/tests/unit/modules/test_zypper.py
|
||||
+++ b/tests/unit/modules/test_zypper.py
|
||||
@@ -23,6 +23,7 @@ from tests.support.mock import (
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
import salt.modules.zypper as zypper
|
||||
+import salt.modules.pkg_resource as pkg_resource
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
# Import 3rd-party libs
|
||||
@@ -486,30 +487,92 @@ Repository 'DUMMY' not found by its alias, number, or URI.
|
||||
:return:
|
||||
'''
|
||||
def _add_data(data, key, value):
|
||||
- data[key] = value
|
||||
+ data.setdefault(key, []).append(value)
|
||||
|
||||
rpm_out = [
|
||||
- 'protobuf-java_|-2.6.1_|-3.1.develHead_|-',
|
||||
- 'yast2-ftp-server_|-3.1.8_|-8.1_|-',
|
||||
- 'jose4j_|-0.4.4_|-2.1.develHead_|-',
|
||||
- 'apache-commons-cli_|-1.2_|-1.233_|-',
|
||||
- 'jakarta-commons-discovery_|-0.4_|-129.686_|-',
|
||||
- 'susemanager-build-keys-web_|-12.0_|-5.1.develHead_|-',
|
||||
+ 'protobuf-java_|-2.6.1_|-3.1.develHead_|-noarch_|-_|-1499257756',
|
||||
+ 'yast2-ftp-server_|-3.1.8_|-8.1_|-x86_64_|-_|-1499257798',
|
||||
+ 'jose4j_|-0.4.4_|-2.1.develHead_|-noarch_|-_|-1499257756',
|
||||
+ 'apache-commons-cli_|-1.2_|-1.233_|-noarch_|-_|-1498636510',
|
||||
+ 'jakarta-commons-discovery_|-0.4_|-129.686_|-noarch_|-_|-1498636511',
|
||||
+ 'susemanager-build-keys-web_|-12.0_|-5.1.develHead_|-noarch_|-_|-1498636510',
|
||||
]
|
||||
- with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}):
|
||||
- with patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}):
|
||||
- with patch.dict(zypper.__salt__, {'pkg_resource.sort_pkglist': MagicMock()}):
|
||||
- with patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}):
|
||||
- pkgs = zypper.list_pkgs()
|
||||
- for pkg_name, pkg_version in {
|
||||
- 'jakarta-commons-discovery': '0.4-129.686',
|
||||
- 'yast2-ftp-server': '3.1.8-8.1',
|
||||
- 'protobuf-java': '2.6.1-3.1.develHead',
|
||||
- 'susemanager-build-keys-web': '12.0-5.1.develHead',
|
||||
- 'apache-commons-cli': '1.2-1.233',
|
||||
- 'jose4j': '0.4.4-2.1.develHead'}.items():
|
||||
- self.assertTrue(pkgs.get(pkg_name))
|
||||
- self.assertEqual(pkgs[pkg_name], pkg_version)
|
||||
+ with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
|
||||
+ patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \
|
||||
+ patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
|
||||
+ patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}):
|
||||
+ pkgs = zypper.list_pkgs(versions_as_list=True)
|
||||
+ for pkg_name, pkg_version in {
|
||||
+ 'jakarta-commons-discovery': '0.4-129.686',
|
||||
+ 'yast2-ftp-server': '3.1.8-8.1',
|
||||
+ 'protobuf-java': '2.6.1-3.1.develHead',
|
||||
+ 'susemanager-build-keys-web': '12.0-5.1.develHead',
|
||||
+ 'apache-commons-cli': '1.2-1.233',
|
||||
+ 'jose4j': '0.4.4-2.1.develHead'}.items():
|
||||
+ self.assertTrue(pkgs.get(pkg_name))
|
||||
+ self.assertEqual(pkgs[pkg_name], [pkg_version])
|
||||
+
|
||||
+ def test_list_pkgs_with_attr(self):
|
||||
+ '''
|
||||
+ Test packages listing with the attr parameter
|
||||
+
|
||||
+ :return:
|
||||
+ '''
|
||||
+ def _add_data(data, key, value):
|
||||
+ data.setdefault(key, []).append(value)
|
||||
+
|
||||
+ rpm_out = [
|
||||
+ 'protobuf-java_|-2.6.1_|-3.1.develHead_|-noarch_|-_|-1499257756',
|
||||
+ 'yast2-ftp-server_|-3.1.8_|-8.1_|-x86_64_|-_|-1499257798',
|
||||
+ 'jose4j_|-0.4.4_|-2.1.develHead_|-noarch_|-_|-1499257756',
|
||||
+ 'apache-commons-cli_|-1.2_|-1.233_|-noarch_|-_|-1498636510',
|
||||
+ 'jakarta-commons-discovery_|-0.4_|-129.686_|-noarch_|-_|-1498636511',
|
||||
+ 'susemanager-build-keys-web_|-12.0_|-5.1.develHead_|-noarch_|-_|-1498636510',
|
||||
+ ]
|
||||
+ with patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
|
||||
+ patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \
|
||||
+ patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
|
||||
+ patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}):
|
||||
+ pkgs = zypper.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
|
||||
+ for pkg_name, pkg_attr in {
|
||||
+ 'jakarta-commons-discovery': {
|
||||
+ 'version': '0.4',
|
||||
+ 'release': '129.686',
|
||||
+ 'arch': 'noarch',
|
||||
+ 'install_date_time_t': 1498636511,
|
||||
+ },
|
||||
+ 'yast2-ftp-server': {
|
||||
+ 'version': '3.1.8',
|
||||
+ 'release': '8.1',
|
||||
+ 'arch': 'x86_64',
|
||||
+ 'install_date_time_t': 1499257798,
|
||||
+ },
|
||||
+ 'protobuf-java': {
|
||||
+ 'version': '2.6.1',
|
||||
+ 'release': '3.1.develHead',
|
||||
+ 'install_date_time_t': 1499257756,
|
||||
+ 'arch': 'noarch',
|
||||
+ },
|
||||
+ 'susemanager-build-keys-web': {
|
||||
+ 'version': '12.0',
|
||||
+ 'release': '5.1.develHead',
|
||||
+ 'arch': 'noarch',
|
||||
+ 'install_date_time_t': 1498636510,
|
||||
+ },
|
||||
+ 'apache-commons-cli': {
|
||||
+ 'version': '1.2',
|
||||
+ 'release': '1.233',
|
||||
+ 'arch': 'noarch',
|
||||
+ 'install_date_time_t': 1498636510,
|
||||
+ },
|
||||
+ 'jose4j': {
|
||||
+ 'arch': 'noarch',
|
||||
+ 'version': '0.4.4',
|
||||
+ 'release': '2.1.develHead',
|
||||
+ 'install_date_time_t': 1499257756,
|
||||
+ }}.items():
|
||||
+ self.assertTrue(pkgs.get(pkg_name))
|
||||
+ self.assertEqual(pkgs[pkg_name], [pkg_attr])
|
||||
|
||||
def test_list_patches(self):
|
||||
'''
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,47 +0,0 @@
|
||||
From 461654496d36a264903057a8255aec4eb700e506 Mon Sep 17 00:00:00 2001
|
||||
From: Silvio Moioli <smoioli@suse.de>
|
||||
Date: Wed, 20 Sep 2017 14:32:47 +0200
|
||||
Subject: [PATCH] multiprocessing minion option: documentation fixes
|
||||
|
||||
---
|
||||
doc/man/salt.7 | 1 +
|
||||
doc/ref/configuration/minion.rst | 7 +++++--
|
||||
2 files changed, 6 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/doc/man/salt.7 b/doc/man/salt.7
|
||||
index d6cfe937a1..86c463b771 100644
|
||||
--- a/doc/man/salt.7
|
||||
+++ b/doc/man/salt.7
|
||||
@@ -10795,6 +10795,7 @@ cmd_whitelist_glob:
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.SS Thread Settings
|
||||
+.SS \fBmultiprocessing\fP
|
||||
.sp
|
||||
Default: \fBTrue\fP
|
||||
.sp
|
||||
diff --git a/doc/ref/configuration/minion.rst b/doc/ref/configuration/minion.rst
|
||||
index e0f349931c..5c92b932ab 100644
|
||||
--- a/doc/ref/configuration/minion.rst
|
||||
+++ b/doc/ref/configuration/minion.rst
|
||||
@@ -2337,11 +2337,14 @@ Thread Settings
|
||||
|
||||
.. conf_minion:: multiprocessing
|
||||
|
||||
+``multiprocessing``
|
||||
+-------
|
||||
+
|
||||
Default: ``True``
|
||||
|
||||
-If `multiprocessing` is enabled when a minion receives a
|
||||
+If ``multiprocessing`` is enabled when a minion receives a
|
||||
publication a new process is spawned and the command is executed therein.
|
||||
-Conversely, if `multiprocessing` is disabled the new publication will be run
|
||||
+Conversely, if ``multiprocessing`` is disabled the new publication will be run
|
||||
executed in a thread.
|
||||
|
||||
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,25 +0,0 @@
|
||||
From c0a3f1a73c5ca49c94ba9eae94193baf2d08c7eb Mon Sep 17 00:00:00 2001
|
||||
From: Michael Calmer <mc@suse.de>
|
||||
Date: Thu, 7 Dec 2017 17:30:31 +0100
|
||||
Subject: [PATCH] older logrotate need su directive
|
||||
|
||||
---
|
||||
pkg/suse/salt-common.logrotate | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/pkg/suse/salt-common.logrotate b/pkg/suse/salt-common.logrotate
|
||||
index 0d99d1b801..625670942c 100644
|
||||
--- a/pkg/suse/salt-common.logrotate
|
||||
+++ b/pkg/suse/salt-common.logrotate
|
||||
@@ -8,6 +8,7 @@
|
||||
}
|
||||
|
||||
/var/log/salt/minion {
|
||||
+ su root root
|
||||
weekly
|
||||
missingok
|
||||
rotate 7
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,25 +0,0 @@
|
||||
From 01467c793bddad471ba425918064dffb0ee21bdf Mon Sep 17 00:00:00 2001
|
||||
From: Michael Calmer <mc@suse.de>
|
||||
Date: Tue, 19 Dec 2017 17:07:19 +0100
|
||||
Subject: [PATCH] python3 compatibility fix - got bytes instead of string
|
||||
|
||||
---
|
||||
salt/netapi/rest_cherrypy/tools/websockets.py | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/salt/netapi/rest_cherrypy/tools/websockets.py b/salt/netapi/rest_cherrypy/tools/websockets.py
|
||||
index fe4f9def85..44c5e2fa05 100644
|
||||
--- a/salt/netapi/rest_cherrypy/tools/websockets.py
|
||||
+++ b/salt/netapi/rest_cherrypy/tools/websockets.py
|
||||
@@ -54,6 +54,6 @@ class SynchronizingWebsocket(WebSocket):
|
||||
This ensures completion of the underlying websocket connection
|
||||
and can be used to synchronize parallel senders.
|
||||
'''
|
||||
- if message.data == 'websocket client ready':
|
||||
+ if message.data.decode('utf-8') == 'websocket client ready':
|
||||
self.pipe.send(message)
|
||||
self.send('server received message', False)
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
42
remove-obsolete-unicode-handling-in-pkg.info_install.patch
Normal file
42
remove-obsolete-unicode-handling-in-pkg.info_install.patch
Normal file
@ -0,0 +1,42 @@
|
||||
From 79a840d183975d949d4c4cadf392e0f37dea16a9 Mon Sep 17 00:00:00 2001
|
||||
From: Mihai Dinca <mdinca@suse.de>
|
||||
Date: Tue, 13 Feb 2018 16:11:20 +0100
|
||||
Subject: [PATCH] Remove obsolete unicode handling in pkg.info_installed
|
||||
|
||||
---
|
||||
salt/modules/zypper.py | 11 +----------
|
||||
1 file changed, 1 insertion(+), 10 deletions(-)
|
||||
|
||||
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
|
||||
index 51d01c3fc9..659d8858f0 100644
|
||||
--- a/salt/modules/zypper.py
|
||||
+++ b/salt/modules/zypper.py
|
||||
@@ -309,7 +309,7 @@ class _Zypper(object):
|
||||
if self.error_msg and not self.__no_raise and not self.__ignore_repo_failure:
|
||||
raise CommandExecutionError('Zypper command failure: {0}'.format(self.error_msg))
|
||||
|
||||
- return self._is_xml_mode() and dom.parseString(self.__call_result['stdout']) or self.__call_result['stdout']
|
||||
+ return self._is_xml_mode() and dom.parseString(self.__call_result['stdout'].encode('utf-8')) or self.__call_result['stdout']
|
||||
|
||||
|
||||
__zypper__ = _Zypper()
|
||||
@@ -482,15 +482,6 @@ def info_installed(*names, **kwargs):
|
||||
t_nfo = dict()
|
||||
# Translate dpkg-specific keys to a common structure
|
||||
for key, value in six.iteritems(pkg_nfo):
|
||||
- if isinstance(value, six.string_types):
|
||||
- # Check, if string is encoded in a proper UTF-8
|
||||
- if six.PY3:
|
||||
- value_ = value.encode('UTF-8', 'ignore').decode('UTF-8', 'ignore')
|
||||
- else:
|
||||
- value_ = value.decode('UTF-8', 'ignore').encode('UTF-8', 'ignore')
|
||||
- if value != value_:
|
||||
- value = kwargs.get('errors', 'ignore') == 'ignore' and value_ or 'N/A (invalid UTF-8)'
|
||||
- log.error('Package %s has bad UTF-8 code in %s: %s', pkg_name, key, value)
|
||||
if key == 'source_rpm':
|
||||
t_nfo['source'] = value
|
||||
else:
|
||||
--
|
||||
2.16.1
|
||||
|
||||
|
@ -1,123 +0,0 @@
|
||||
From 85ddadf0815071c0000fddca3f0b4a62da69bceb Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
|
||||
<psuarezhernandez@suse.com>
|
||||
Date: Wed, 10 Jan 2018 11:59:33 +0000
|
||||
Subject: [PATCH] Return error when gid_from_name and group does not
|
||||
exist.
|
||||
|
||||
Fixes #45345
|
||||
|
||||
Ensure empty string gid is set to None
|
||||
|
||||
Make pylint happy
|
||||
|
||||
Fix integration tests for 'user.present' state.
|
||||
|
||||
Update documentation for 'gid_from_name' parameter
|
||||
|
||||
Refactor to prevent logical bug when gid is 0
|
||||
---
|
||||
salt/states/user.py | 7 +++++-
|
||||
tests/integration/states/test_user.py | 42 +++++++++++++++++++++++------------
|
||||
2 files changed, 34 insertions(+), 15 deletions(-)
|
||||
|
||||
diff --git a/salt/states/user.py b/salt/states/user.py
|
||||
index 8a731cc2a1..737c39f4b4 100644
|
||||
--- a/salt/states/user.py
|
||||
+++ b/salt/states/user.py
|
||||
@@ -240,7 +240,8 @@ def present(name,
|
||||
|
||||
gid_from_name
|
||||
If True, the default group id will be set to the id of the group with
|
||||
- the same name as the user, Default is ``False``.
|
||||
+ the same name as the user. If the group does not exist the state will
|
||||
+ fail. Default is ``False``.
|
||||
|
||||
groups
|
||||
A list of groups to assign the user to, pass a list object. If a group
|
||||
@@ -455,6 +456,10 @@ def present(name,
|
||||
|
||||
if gid_from_name:
|
||||
gid = __salt__['file.group_to_gid'](name)
|
||||
+ if gid == '':
|
||||
+ ret['comment'] = 'Default group with name "{0}" is not present'.format(name)
|
||||
+ ret['result'] = False
|
||||
+ return ret
|
||||
|
||||
changes = _changes(name,
|
||||
uid,
|
||||
diff --git a/tests/integration/states/test_user.py b/tests/integration/states/test_user.py
|
||||
index 1317f12f97..ae9774a241 100644
|
||||
--- a/tests/integration/states/test_user.py
|
||||
+++ b/tests/integration/states/test_user.py
|
||||
@@ -97,15 +97,25 @@ class UserTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
home=HOMEDIR)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
- def test_user_present_nondefault(self):
|
||||
+ @requires_system_grains
|
||||
+ def test_user_present_nondefault(self, grains=None):
|
||||
'''
|
||||
This is a DESTRUCTIVE TEST it creates a new user on the on the minion.
|
||||
'''
|
||||
ret = self.run_state('user.present', name=self.user_name,
|
||||
home=self.user_home)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
+ ret = self.run_function('user.info', [self.user_name])
|
||||
+ self.assertReturnNonEmptySaltType(ret)
|
||||
+ group_name = grp.getgrgid(ret['gid']).gr_name
|
||||
if not salt.utils.is_darwin():
|
||||
self.assertTrue(os.path.isdir(self.user_home))
|
||||
+ if grains['os_family'] in ('Suse',):
|
||||
+ self.assertEqual(group_name, 'users')
|
||||
+ elif grains['os_family'] == 'MacOS':
|
||||
+ self.assertEqual(group_name, 'staff')
|
||||
+ else:
|
||||
+ self.assertEqual(group_name, self.user_name)
|
||||
|
||||
@requires_system_grains
|
||||
def test_user_present_gid_from_name_default(self, grains=None):
|
||||
@@ -120,22 +130,26 @@ class UserTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
# user
|
||||
gid_from_name = False if grains['os_family'] == 'MacOS' else True
|
||||
|
||||
- ret = self.run_state('user.present', name=self.user_name,
|
||||
+ ret_user_present = self.run_state('user.present', name=self.user_name,
|
||||
gid_from_name=gid_from_name, home=self.user_home)
|
||||
- self.assertSaltTrueReturn(ret)
|
||||
|
||||
- ret = self.run_function('user.info', [self.user_name])
|
||||
- self.assertReturnNonEmptySaltType(ret)
|
||||
- group_name = grp.getgrgid(ret['gid']).gr_name
|
||||
-
|
||||
- if not salt.utils.is_darwin():
|
||||
- self.assertTrue(os.path.isdir(self.user_home))
|
||||
- if grains['os_family'] in ('Suse',):
|
||||
- self.assertEqual(group_name, 'users')
|
||||
- elif grains['os_family'] == 'MacOS':
|
||||
- self.assertEqual(group_name, 'staff')
|
||||
+ if gid_from_name:
|
||||
+ self.assertSaltFalseReturn(ret_user_present)
|
||||
+ ret_user_present = ret_user_present[next(iter(ret_user_present))]
|
||||
+ self.assertTrue('is not present' in ret_user_present['comment'])
|
||||
else:
|
||||
- self.assertEqual(group_name, self.user_name)
|
||||
+ self.assertSaltTrueReturn(ret_user_present)
|
||||
+ ret_user_info = self.run_function('user.info', [self.user_name])
|
||||
+ self.assertReturnNonEmptySaltType(ret_user_info)
|
||||
+ group_name = grp.getgrgid(ret_user_info['gid']).gr_name
|
||||
+ if not salt.utils.is_darwin():
|
||||
+ self.assertTrue(os.path.isdir(self.user_home))
|
||||
+ if grains['os_family'] in ('Suse',):
|
||||
+ self.assertEqual(group_name, 'users')
|
||||
+ elif grains['os_family'] == 'MacOS':
|
||||
+ self.assertEqual(group_name, 'staff')
|
||||
+ else:
|
||||
+ self.assertEqual(group_name, self.user_name)
|
||||
|
||||
def test_user_present_gid_from_name(self):
|
||||
'''
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
From 8f81bee8d8929cc4cd30dabc7cbc92d2cba9760e Mon Sep 17 00:00:00 2001
|
||||
From d1dfed119ba7e90964451a0859eb9a72c5ff8db8 Mon Sep 17 00:00:00 2001
|
||||
From: Christian Lanig <clanig@suse.com>
|
||||
Date: Mon, 27 Nov 2017 13:10:26 +0100
|
||||
Subject: [PATCH] Run salt-api as user salt (bsc#1064520)
|
||||
@ -20,6 +20,6 @@ index 7ca582dfb4..bf513e4dbd 100644
|
||||
ExecStart=/usr/bin/salt-api
|
||||
TimeoutStopSec=3
|
||||
--
|
||||
2.13.6
|
||||
2.16.1
|
||||
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
From 3902fe4183d169808b9d248b9b963926035ba954 Mon Sep 17 00:00:00 2001
|
||||
From b69a8f76657ce9e1078a75dcbcb8c3ea0d4a842f Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Klaus=20K=C3=A4mpf?= <kkaempf@suse.de>
|
||||
Date: Wed, 20 Jan 2016 11:01:06 +0100
|
||||
Subject: [PATCH] Run salt master as dedicated salt user
|
||||
@ -10,7 +10,7 @@ Subject: [PATCH] Run salt master as dedicated salt user
|
||||
2 files changed, 4 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/conf/master b/conf/master
|
||||
index abfc1fa808..bd28f6d406 100644
|
||||
index 986898436a..8461101210 100644
|
||||
--- a/conf/master
|
||||
+++ b/conf/master
|
||||
@@ -25,7 +25,8 @@
|
||||
@ -42,6 +42,6 @@ index 3cd002308e..0d99d1b801 100644
|
||||
missingok
|
||||
rotate 7
|
||||
--
|
||||
2.13.6
|
||||
2.16.1
|
||||
|
||||
|
||||
|
70
salt.changes
70
salt.changes
@ -1,3 +1,73 @@
|
||||
-------------------------------------------------------------------
|
||||
Thu Mar 1 10:35:18 UTC 2018 - Mihai Dinca <mdinca@suse.de>
|
||||
|
||||
- Fix grains containing trailing "\n"
|
||||
|
||||
- Added:
|
||||
* fix-grains-with-n.patch
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Tue Feb 20 10:47:34 UTC 2018 - Mihai Dinca <mdinca@suse.de>
|
||||
|
||||
- Remove salt-minion python2 requirement when python3 is default (bsc#1081592)
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Tue Feb 13 15:17:11 UTC 2018 - Mihai Dinca <mdinca@suse.de>
|
||||
|
||||
- Remove-obsolete-unicode-handling-in-pkg.info_installed
|
||||
|
||||
- Added:
|
||||
* remove-obsolete-unicode-handling-in-pkg.info_install.patch
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Feb 09 15:39:08 UTC 2018 - Mihai Dinca <mdinca@suse.de>
|
||||
|
||||
- Update to salt-2018.1.99
|
||||
|
||||
- Modified:
|
||||
* activate-all-beacons-sources-config-pillar-grains.patch
|
||||
* avoid-excessive-syslogging-by-watchdog-cronjob-58.patch
|
||||
* feat-add-grain-for-all-fqdns.patch
|
||||
* fix-bsc-1065792.patch
|
||||
* list_pkgs-add-parameter-for-returned-attribute-selec.patch
|
||||
* run-salt-api-as-user-salt-bsc-1064520.patch
|
||||
* run-salt-master-as-dedicated-salt-user.patch
|
||||
|
||||
- Deleted:
|
||||
* python3-compatibility-fix-got-bytes-instead-of-strin.patch
|
||||
* enable-with-salt-version-parameter-for-setup.py-scri.patch
|
||||
* catching-error-when-pidfile-cannot-be-deleted.patch
|
||||
* bugfix-always-return-a-string-list-on-unknown-job-ta.patch
|
||||
* bugfix-the-logic-according-to-the-exact-described-pu.patch
|
||||
* cherrypy-read-reads-bytes-from-the-wire-and-write-th.patch
|
||||
* fix-for-delete_deployment-in-kubernetes-module.patch
|
||||
* fix-salt-master-for-old-psutil.patch
|
||||
* introduce-process_count_max-minion-configuration-par.patch
|
||||
* multiprocessing-minion-option-documentation-fixes.patch
|
||||
* older-logrotate-need-su-directive.patch
|
||||
* return-error-when-gid_from_name-and-group-does-not-e.patch
|
||||
* set-shell-environment-variable-64.patch
|
||||
* split-only-strings-if-they-are-such.patch
|
||||
* use-home-to-get-the-user-home-directory-instead-usin.patch
|
||||
* yumpkg-don-t-use-diff_attr-when-determining-install-.patch
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Feb 09 15:39:08 UTC 2018 - Jochen Breuer <jbreuer@suse.de>
|
||||
|
||||
- Fix-epoch-handling-for-Rhel-6-and-7
|
||||
- Modified:
|
||||
* yumpkg-don-t-use-diff_attr-when-determining-install-.patch
|
||||
|
||||
- Removed:
|
||||
* fix-for-wrong-version-processing.patch
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Fri Feb 09 15:39:08 UTC 2018 - Jochen Breuer <jbreuer@suse.de>
|
||||
|
||||
- Restoring-installation-of-packages-for-Rhel-6-7
|
||||
Added:
|
||||
* yumpkg-don-t-use-diff_attr-when-determining-install-.patch
|
||||
|
||||
-------------------------------------------------------------------
|
||||
Wed Feb 07 13:23:51 UTC 2018 - Mihai Dinca <mdinca@suse.de>
|
||||
|
||||
|
62
salt.spec
62
salt.spec
@ -52,44 +52,33 @@
|
||||
%bcond_with builddocs
|
||||
|
||||
Name: salt
|
||||
Version: 2017.7.2
|
||||
Version: 2018.1.99
|
||||
Release: 0
|
||||
Summary: A parallel remote execution system
|
||||
License: Apache-2.0
|
||||
Group: System/Management
|
||||
Url: http://saltstack.org/
|
||||
Source: https://github.com/saltstack/salt/archive/v%{version}.tar.gz
|
||||
# Source: https://github.com/saltstack/salt/archive/v%{version}.tar.gz
|
||||
Source: https://github.com/saltstack/salt/archive/2018.3.0rc1.tar.gz
|
||||
Source1: README.SUSE
|
||||
Source2: salt-tmpfiles.d
|
||||
Source3: html.tar.bz2
|
||||
Source4: update-documentation.sh
|
||||
Source5: travis.yml
|
||||
|
||||
Patch1: list_pkgs-add-parameter-for-returned-attribute-selec.patch
|
||||
Patch2: use-home-to-get-the-user-home-directory-instead-usin.patch
|
||||
Patch3: multiprocessing-minion-option-documentation-fixes.patch
|
||||
Patch4: introduce-process_count_max-minion-configuration-par.patch
|
||||
Patch5: bugfix-always-return-a-string-list-on-unknown-job-ta.patch
|
||||
Patch6: enable-with-salt-version-parameter-for-setup.py-scri.patch
|
||||
Patch7: run-salt-master-as-dedicated-salt-user.patch
|
||||
Patch8: run-salt-api-as-user-salt-bsc-1064520.patch
|
||||
Patch9: activate-all-beacons-sources-config-pillar-grains.patch
|
||||
Patch10: fix-for-delete_deployment-in-kubernetes-module.patch
|
||||
Patch11: catching-error-when-pidfile-cannot-be-deleted.patch
|
||||
Patch12: avoid-excessive-syslogging-by-watchdog-cronjob-58.patch
|
||||
Patch13: older-logrotate-need-su-directive.patch
|
||||
Patch14: fix-salt-master-for-old-psutil.patch
|
||||
Patch15: split-only-strings-if-they-are-such.patch
|
||||
Patch16: cherrypy-read-reads-bytes-from-the-wire-and-write-th.patch
|
||||
Patch17: python3-compatibility-fix-got-bytes-instead-of-strin.patch
|
||||
Patch19: feat-add-grain-for-all-fqdns.patch
|
||||
Patch20: fix-bsc-1065792.patch
|
||||
Patch21: set-shell-environment-variable-64.patch
|
||||
Patch22: bugfix-the-logic-according-to-the-exact-described-pu.patch
|
||||
Patch23: return-error-when-gid_from_name-and-group-does-not-e.patch
|
||||
Patch24: yumpkg-don-t-use-diff_attr-when-determining-install-.patch
|
||||
Patch1: run-salt-master-as-dedicated-salt-user.patch
|
||||
Patch2: run-salt-api-as-user-salt-bsc-1064520.patch
|
||||
Patch3: activate-all-beacons-sources-config-pillar-grains.patch
|
||||
Patch4: avoid-excessive-syslogging-by-watchdog-cronjob-58.patch
|
||||
Patch5: feat-add-grain-for-all-fqdns.patch
|
||||
Patch6: fix-bsc-1065792.patch
|
||||
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46006
|
||||
Patch7: remove-obsolete-unicode-handling-in-pkg.info_install.patch
|
||||
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/46278
|
||||
Patch8: fix-grains-with-n.patch
|
||||
|
||||
BuildRoot: %{_tmppath}/%{name}-%{version}-build
|
||||
# BuildRoot: %{_tmppath}/%{name}-%{version}-build
|
||||
BuildRoot: %{_tmppath}/%{name}-2018.3.0rc1-build
|
||||
BuildRequires: logrotate
|
||||
%if 0%{?suse_version} > 1020
|
||||
BuildRequires: fdupes
|
||||
@ -531,7 +520,8 @@ Zsh command line completion support for %{name}.
|
||||
%endif
|
||||
|
||||
%prep
|
||||
%setup -q -n salt-%{version}
|
||||
# %setup -q -n salt-%{version}
|
||||
%setup -q -n salt-2018.3.0rc1
|
||||
cp %{S:1} .
|
||||
cp %{S:5} ./.travis.yml
|
||||
%patch1 -p1
|
||||
@ -542,21 +532,6 @@ cp %{S:5} ./.travis.yml
|
||||
%patch6 -p1
|
||||
%patch7 -p1
|
||||
%patch8 -p1
|
||||
%patch9 -p1
|
||||
%patch10 -p1
|
||||
%patch11 -p1
|
||||
%patch12 -p1
|
||||
%patch13 -p1
|
||||
%patch14 -p1
|
||||
%patch15 -p1
|
||||
%patch16 -p1
|
||||
%patch17 -p1
|
||||
%patch19 -p1
|
||||
%patch20 -p1
|
||||
%patch21 -p1
|
||||
%patch22 -p1
|
||||
%patch23 -p1
|
||||
%patch24 -p1
|
||||
|
||||
%build
|
||||
%if 0%{?build_py2}
|
||||
@ -649,6 +624,9 @@ install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/minion
|
||||
%if 0%{?suse_version}
|
||||
install -Dd -m 0750 %{buildroot}%{_prefix}/lib/zypp/plugins/commit
|
||||
%{__install} scripts/suse/zypper/plugins/commit/zyppnotify %{buildroot}%{_prefix}/lib/zypp/plugins/commit/zyppnotify
|
||||
%if 0%{?default_py3}
|
||||
sed -i '1s=^#!/usr/bin/\(python\|env python\)[0-9.]*=#!/usr/bin/python3=' %{buildroot}%{_prefix}/lib/zypp/plugins/commit/zyppnotify
|
||||
%endif
|
||||
%endif
|
||||
|
||||
# Install Yum plugins only on RH machines
|
||||
|
@ -1,25 +0,0 @@
|
||||
From 38f5221734b84d8d3188db82ff4873cd89a4a85c Mon Sep 17 00:00:00 2001
|
||||
From: Johannes Renner <jrenner@suse.com>
|
||||
Date: Thu, 11 Jan 2018 15:55:25 +0100
|
||||
Subject: [PATCH] Set SHELL environment variable (#64)
|
||||
|
||||
---
|
||||
pkg/suse/salt-api.service | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/pkg/suse/salt-api.service b/pkg/suse/salt-api.service
|
||||
index 6634b74a7d..5c73bb9022 100644
|
||||
--- a/pkg/suse/salt-api.service
|
||||
+++ b/pkg/suse/salt-api.service
|
||||
@@ -6,6 +6,7 @@ After=network.target
|
||||
[Service]
|
||||
User=salt
|
||||
Type=simple
|
||||
+Environment=SHELL=/bin/bash
|
||||
LimitNOFILE=8192
|
||||
ExecStart=/usr/bin/salt-api
|
||||
TimeoutStopSec=3
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,50 +0,0 @@
|
||||
From 3fa025fc994d307e08ef271ca8463fe837891a63 Mon Sep 17 00:00:00 2001
|
||||
From: Bo Maryniuk <bo@suse.de>
|
||||
Date: Thu, 14 Dec 2017 14:43:52 +0100
|
||||
Subject: [PATCH] Split only strings, if they are such
|
||||
|
||||
* Use unicode literals
|
||||
* Lintfix: PEP8
|
||||
|
||||
See: https://bugzilla.suse.com/show_bug.cgi?id=1072218
|
||||
https://github.com/saltstack/salt/pull/44991
|
||||
---
|
||||
salt/pillar/sql_base.py | 7 +++----
|
||||
1 file changed, 3 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/salt/pillar/sql_base.py b/salt/pillar/sql_base.py
|
||||
index e7abceb134..a59c45331f 100644
|
||||
--- a/salt/pillar/sql_base.py
|
||||
+++ b/salt/pillar/sql_base.py
|
||||
@@ -168,7 +168,7 @@ More complete example for MySQL (to also show configuration)
|
||||
as_list: True
|
||||
with_lists: [1,3]
|
||||
'''
|
||||
-from __future__ import absolute_import
|
||||
+from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Please don't strip redundant parentheses from this file.
|
||||
# I have added some for clarity.
|
||||
@@ -275,7 +275,7 @@ class SqlBaseExtPillar(six.with_metaclass(abc.ABCMeta, object)):
|
||||
# May set 'as_list' from qb[1][2].
|
||||
else:
|
||||
defaults.update(qb[1])
|
||||
- if defaults['with_lists']:
|
||||
+ if defaults['with_lists'] and isinstance(defaults['with_lists'], six.string_types):
|
||||
defaults['with_lists'] = [
|
||||
int(i) for i in defaults['with_lists'].split(',')
|
||||
]
|
||||
@@ -437,8 +437,7 @@ class SqlBaseExtPillar(six.with_metaclass(abc.ABCMeta, object)):
|
||||
cursor.execute(details['query'], (minion_id,))
|
||||
|
||||
# Extract the field names the db has returned and process them
|
||||
- self.process_fields([row[0] for row in cursor.description],
|
||||
- details['depth'])
|
||||
+ self.process_fields([row[0] for row in cursor.description], details['depth'])
|
||||
self.enter_root(root)
|
||||
self.as_list = details['as_list']
|
||||
if details['with_lists']:
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,29 +0,0 @@
|
||||
From 418bcf80ca263c5523e701801e97bae60fdf9b35 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
|
||||
<psuarezhernandez@suse.com>
|
||||
Date: Mon, 11 Sep 2017 19:57:28 +0200
|
||||
Subject: [PATCH] Use $HOME to get the user home directory instead using
|
||||
'~' char
|
||||
|
||||
---
|
||||
pkg/salt.bash | 3 ++-
|
||||
1 file changed, 2 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/pkg/salt.bash b/pkg/salt.bash
|
||||
index 480361fe23..00174c072f 100644
|
||||
--- a/pkg/salt.bash
|
||||
+++ b/pkg/salt.bash
|
||||
@@ -35,7 +35,8 @@ _salt_get_keys(){
|
||||
}
|
||||
|
||||
_salt(){
|
||||
- local _salt_cache_functions=${SALT_COMP_CACHE_FUNCTIONS:='~/.cache/salt-comp-cache_functions'}
|
||||
+ CACHE_DIR="$HOME/.cache/salt-comp-cache_functions"
|
||||
+ local _salt_cache_functions=${SALT_COMP_CACHE_FUNCTIONS:=$CACHE_DIR}
|
||||
local _salt_cache_timeout=${SALT_COMP_CACHE_TIMEOUT:='last hour'}
|
||||
|
||||
if [ ! -d "$(dirname ${_salt_cache_functions})" ]; then
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:b9f9dd9ddd129ddadadf963178383b50c32283aeb1c338d9c23cc01b11722db2
|
||||
size 11483585
|
@ -1,203 +0,0 @@
|
||||
From ea9c1f9a83b42b773579d5ef34dc29249bcd680a Mon Sep 17 00:00:00 2001
|
||||
From: Erik Johnson <palehose@gmail.com>
|
||||
Date: Thu, 4 Jan 2018 16:10:18 -0600
|
||||
Subject: [PATCH] yumpkg: don't use diff_attr when determining
|
||||
install/downgrade targets
|
||||
|
||||
Doing so breaks epoch handling, and is unnecessary anyway since the
|
||||
diff_attr is only used for the return data.
|
||||
|
||||
Additionally, this tweaks the "attr" argument in both yumpkg and
|
||||
zypper's list_pkgs func so that it will accept a comma-separated list as
|
||||
well as a Python list, and makes a DRY tweak so that we're only
|
||||
returning and formatting the return data in one place in the function.
|
||||
---
|
||||
salt/modules/yumpkg.py | 83 ++++++++++++++++++++++++++++----------------------
|
||||
salt/modules/zypper.py | 45 +++++++++++++++------------
|
||||
2 files changed, 73 insertions(+), 55 deletions(-)
|
||||
|
||||
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
|
||||
index 8b63bff4a3..89aceb21cb 100644
|
||||
--- a/salt/modules/yumpkg.py
|
||||
+++ b/salt/modules/yumpkg.py
|
||||
@@ -622,6 +622,7 @@ def list_pkgs(versions_as_list=False, **kwargs):
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pkg.list_pkgs
|
||||
+ salt '*' pkg.list_pkgs attr=version,arch
|
||||
salt '*' pkg.list_pkgs attr='["version", "arch"]'
|
||||
'''
|
||||
versions_as_list = salt.utils.is_true(versions_as_list)
|
||||
@@ -630,42 +631,52 @@ def list_pkgs(versions_as_list=False, **kwargs):
|
||||
for x in ('removed', 'purge_desired')]):
|
||||
return {}
|
||||
|
||||
- attr = kwargs.get("attr")
|
||||
- if 'pkg.list_pkgs' in __context__:
|
||||
- cached = __context__['pkg.list_pkgs']
|
||||
- return __salt__['pkg_resource.format_pkg_list'](cached, versions_as_list, attr)
|
||||
+ attr = kwargs.get('attr')
|
||||
+ if attr is not None:
|
||||
+ attr = salt.utils.split_input(attr)
|
||||
|
||||
- ret = {}
|
||||
- cmd = ['rpm', '-qa', '--queryformat',
|
||||
- salt.utils.pkg.rpm.QUERYFORMAT.replace('%{REPOID}', '(none)') + '\n']
|
||||
- output = __salt__['cmd.run'](cmd,
|
||||
- python_shell=False,
|
||||
- output_loglevel='trace')
|
||||
- for line in output.splitlines():
|
||||
- pkginfo = salt.utils.pkg.rpm.parse_pkginfo(
|
||||
- line,
|
||||
- osarch=__grains__['osarch']
|
||||
- )
|
||||
- if pkginfo is not None:
|
||||
- # see rpm version string rules available at https://goo.gl/UGKPNd
|
||||
- pkgver = pkginfo.version
|
||||
- epoch = ''
|
||||
- release = ''
|
||||
- if ':' in pkgver:
|
||||
- epoch, pkgver = pkgver.split(":", 1)
|
||||
- if '-' in pkgver:
|
||||
- pkgver, release = pkgver.split("-", 1)
|
||||
- all_attr = {'epoch': epoch, 'version': pkgver, 'release': release,
|
||||
- 'arch': pkginfo.arch, 'install_date': pkginfo.install_date,
|
||||
- 'install_date_time_t': pkginfo.install_date_time_t}
|
||||
- __salt__['pkg_resource.add_pkg'](ret, pkginfo.name, all_attr)
|
||||
-
|
||||
- for pkgname in ret:
|
||||
- ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version'])
|
||||
-
|
||||
- __context__['pkg.list_pkgs'] = ret
|
||||
+ contextkey = 'pkg.list_pkgs'
|
||||
|
||||
- return __salt__['pkg_resource.format_pkg_list'](ret, versions_as_list, attr)
|
||||
+ if contextkey not in __context__:
|
||||
+ ret = {}
|
||||
+ cmd = ['rpm', '-qa', '--queryformat',
|
||||
+ salt.utils.pkg.rpm.QUERYFORMAT.replace('%{REPOID}', '(none)') + '\n']
|
||||
+ output = __salt__['cmd.run'](cmd,
|
||||
+ python_shell=False,
|
||||
+ output_loglevel='trace')
|
||||
+ for line in output.splitlines():
|
||||
+ pkginfo = salt.utils.pkg.rpm.parse_pkginfo(
|
||||
+ line,
|
||||
+ osarch=__grains__['osarch']
|
||||
+ )
|
||||
+ if pkginfo is not None:
|
||||
+ # see rpm version string rules available at https://goo.gl/UGKPNd
|
||||
+ pkgver = pkginfo.version
|
||||
+ epoch = ''
|
||||
+ release = ''
|
||||
+ if ':' in pkgver:
|
||||
+ epoch, pkgver = pkgver.split(":", 1)
|
||||
+ if '-' in pkgver:
|
||||
+ pkgver, release = pkgver.split("-", 1)
|
||||
+ all_attr = {
|
||||
+ 'epoch': epoch,
|
||||
+ 'version': pkgver,
|
||||
+ 'release': release,
|
||||
+ 'arch': pkginfo.arch,
|
||||
+ 'install_date': pkginfo.install_date,
|
||||
+ 'install_date_time_t': pkginfo.install_date_time_t
|
||||
+ }
|
||||
+ __salt__['pkg_resource.add_pkg'](ret, pkginfo.name, all_attr)
|
||||
+
|
||||
+ for pkgname in ret:
|
||||
+ ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version'])
|
||||
+
|
||||
+ __context__[contextkey] = ret
|
||||
+
|
||||
+ return __salt__['pkg_resource.format_pkg_list'](
|
||||
+ __context__[contextkey],
|
||||
+ versions_as_list,
|
||||
+ attr)
|
||||
|
||||
|
||||
def list_repo_pkgs(*args, **kwargs):
|
||||
@@ -1311,11 +1322,11 @@ def install(name=None,
|
||||
log.warning('"version" parameter will be ignored for multiple '
|
||||
'package targets')
|
||||
|
||||
- diff_attr = kwargs.get("diff_attr")
|
||||
+ diff_attr = kwargs.get('diff_attr')
|
||||
old = list_pkgs(versions_as_list=False, attr=diff_attr) if not downloadonly else list_downloaded()
|
||||
# Use of __context__ means no duplicate work here, just accessing
|
||||
# information already in __context__ from the previous call to list_pkgs()
|
||||
- old_as_list = list_pkgs(versions_as_list=True, attr=diff_attr) if not downloadonly else list_downloaded()
|
||||
+ old_as_list = list_pkgs(versions_as_list=True) if not downloadonly else list_downloaded()
|
||||
|
||||
to_install = []
|
||||
to_downgrade = []
|
||||
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
|
||||
index b440af08a4..ffe38be17d 100644
|
||||
--- a/salt/modules/zypper.py
|
||||
+++ b/salt/modules/zypper.py
|
||||
@@ -685,6 +685,7 @@ def list_pkgs(versions_as_list=False, **kwargs):
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pkg.list_pkgs
|
||||
+ salt '*' pkg.list_pkgs attr=version,arch
|
||||
salt '*' pkg.list_pkgs attr='["version", "arch"]'
|
||||
'''
|
||||
versions_as_list = salt.utils.is_true(versions_as_list)
|
||||
@@ -693,30 +694,36 @@ def list_pkgs(versions_as_list=False, **kwargs):
|
||||
for x in ('removed', 'purge_desired')]):
|
||||
return {}
|
||||
|
||||
- attr = kwargs.get("attr")
|
||||
- if 'pkg.list_pkgs' in __context__:
|
||||
- cached = __context__['pkg.list_pkgs']
|
||||
- return __salt__['pkg_resource.format_pkg_list'](cached, versions_as_list, attr)
|
||||
+ attr = kwargs.get('attr')
|
||||
+ if attr is not None:
|
||||
+ attr = salt.utils.split_input(attr)
|
||||
|
||||
- cmd = ['rpm', '-qa', '--queryformat', (
|
||||
- "%{NAME}_|-%{VERSION}_|-%{RELEASE}_|-%{ARCH}_|-"
|
||||
- "%|EPOCH?{%{EPOCH}}:{}|_|-%{INSTALLTIME}\\n")]
|
||||
- ret = {}
|
||||
- for line in __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False).splitlines():
|
||||
- name, pkgver, rel, arch, epoch, install_time = line.split('_|-')
|
||||
- install_date = datetime.datetime.utcfromtimestamp(int(install_time)).isoformat() + "Z"
|
||||
- install_date_time_t = int(install_time)
|
||||
+ contextkey = 'pkg.list_pkgs'
|
||||
+
|
||||
+ if contextkey not in __context__:
|
||||
+
|
||||
+ cmd = ['rpm', '-qa', '--queryformat', (
|
||||
+ "%{NAME}_|-%{VERSION}_|-%{RELEASE}_|-%{ARCH}_|-"
|
||||
+ "%|EPOCH?{%{EPOCH}}:{}|_|-%{INSTALLTIME}\\n")]
|
||||
+ ret = {}
|
||||
+ for line in __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False).splitlines():
|
||||
+ name, pkgver, rel, arch, epoch, install_time = line.split('_|-')
|
||||
+ install_date = datetime.datetime.utcfromtimestamp(int(install_time)).isoformat() + "Z"
|
||||
+ install_date_time_t = int(install_time)
|
||||
|
||||
- all_attr = {'epoch': epoch, 'version': pkgver, 'release': rel, 'arch': arch,
|
||||
- 'install_date': install_date, 'install_date_time_t': install_date_time_t}
|
||||
- __salt__['pkg_resource.add_pkg'](ret, name, all_attr)
|
||||
+ all_attr = {'epoch': epoch, 'version': pkgver, 'release': rel, 'arch': arch,
|
||||
+ 'install_date': install_date, 'install_date_time_t': install_date_time_t}
|
||||
+ __salt__['pkg_resource.add_pkg'](ret, name, all_attr)
|
||||
|
||||
- for pkgname in ret:
|
||||
- ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version'])
|
||||
+ for pkgname in ret:
|
||||
+ ret[pkgname] = sorted(ret[pkgname], key=lambda d: d['version'])
|
||||
|
||||
- __context__['pkg.list_pkgs'] = ret
|
||||
+ __context__[contextkey] = ret
|
||||
|
||||
- return __salt__['pkg_resource.format_pkg_list'](ret, versions_as_list, attr)
|
||||
+ return __salt__['pkg_resource.format_pkg_list'](
|
||||
+ __context__[contextkey],
|
||||
+ versions_as_list,
|
||||
+ attr)
|
||||
|
||||
|
||||
def _get_configured_repos():
|
||||
--
|
||||
2.13.6
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user