SHA256
1
0
forked from pool/salt

Accepting request 514555 from systemsmanagement:saltstack:testing

- Update to 2017.7.0
  See https://docs.saltstack.com/en/develop/topics/releases/2017.7.0.html
  for full changelog
- fix ownership for whole master cache directory (bsc#1035914)
- fix setting the language on SUSE systems (bsc#1038855)
- wrong os_family grains on SUSE - fix unittests (bsc#1038855)
- speed-up cherrypy by removing sleep call
- Disable 3rd party runtime packages to be explicitly recommended.
  (bsc#1040886)
- fix format error (bsc#1043111)
- Add a salt-minion watchdog for RHEL6 and SLES11 systems (sysV)
  to restart salt-minion in case of crashes during upgrade.
- Add procps as dependency.
- Bugfix: jobs scheduled to run at a future time stay
  pending for Salt minions (bsc#1036125)
- All current patches has been removed as they were added upstream:
  * add-a-salt-minion-service-control-file.patch
  * add-options-for-dockerng.patch
  * add-ssh-option-to-salt-ssh.patch
  * add-unit-test-for-skip-false-values-from-preferred_i.patch
  * add-yum-plugin.patch
  * add-zypp-notify-plugin.patch
  * adding-support-for-installing-patches-in-yum-dnf-exe.patch
  * avoid-failures-on-sles-12-sp2-because-of-new-systemd.patch
  * bugfix-unable-to-use-127-as-hostname.patch
  * change-travis-configuration-file-to-use-salt-toaster.patch
  * check-if-byte-strings-are-properly-encoded-in-utf-8.patch
  * clean-up-change-attribute-from-interface-dict.patch
  * do-not-generate-a-date-in-a-comment-to-prevent-rebui.patch
  * fix-grain-for-os_family-on-suse-series.patch

OBS-URL: https://build.opensuse.org/request/show/514555
OBS-URL: https://build.opensuse.org/package/show/systemsmanagement:saltstack/salt?expand=0&rev=90
This commit is contained in:
Klaus Kämpf 2017-08-04 12:51:31 +00:00 committed by Git OBS Bridge
parent ae1540a455
commit 850dee1a06
35 changed files with 939 additions and 5110 deletions

View File

@ -1,35 +0,0 @@
From 69eeaf17252a2912ed33f2160c14282c2ff703bc Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Wed, 18 Jan 2017 15:38:53 +0100
Subject: [PATCH] Add a salt-minion service control file
---
pkg/suse/salt-minion.service | 15 +++++++++++++++
1 file changed, 15 insertions(+)
create mode 100644 pkg/suse/salt-minion.service
diff --git a/pkg/suse/salt-minion.service b/pkg/suse/salt-minion.service
new file mode 100644
index 0000000000..1dbaa77755
--- /dev/null
+++ b/pkg/suse/salt-minion.service
@@ -0,0 +1,15 @@
+[Unit]
+Description=The Salt Minion
+After=network.target
+
+[Service]
+Type=notify
+NotifyAccess=all
+LimitNOFILE=8192
+ExecStart=/usr/bin/salt-minion
+KillMode=process
+Restart=on-failure
+RestartSec=15
+
+[Install]
+WantedBy=multi-user.target
--
2.11.0

View File

@ -1,254 +0,0 @@
From c1a54f79fa0c35536e420eda1e429723c532c891 Mon Sep 17 00:00:00 2001
From: Michael Calmer <mc@suse.de>
Date: Thu, 19 Jan 2017 15:39:10 +0100
Subject: [PATCH] add options for dockerng
* add buildargs option to dockerng.build
* docker buildargs require minimal dockerpy 1.6.0 and docker version 1.9.0
* update version in the docs
* support dryrun for dockerng.sls_build
provide the possibility to put extra modules into the thin
added unit test for dockerng.sls_build dryrun option
---
salt/modules/dockerng.py | 51 ++++++++++++++++++-----
tests/unit/modules/dockerng_test.py | 82 +++++++++++++++++++++++++++++++++++++
2 files changed, 122 insertions(+), 11 deletions(-)
diff --git a/salt/modules/dockerng.py b/salt/modules/dockerng.py
index 09d80bdd3f..8366e5e8ba 100644
--- a/salt/modules/dockerng.py
+++ b/salt/modules/dockerng.py
@@ -28,13 +28,13 @@ to replace references to ``dockerng`` with ``docker``.
Installation Prerequisites
--------------------------
-This execution module requires at least version 1.4.0 of both docker-py_ and
-Docker_. docker-py can easily be installed using :py:func:`pip.install
-<salt.modules.pip.install>`:
+This execution module requires at least version 1.6.0 of docker-py_ and
+version 1.9.0 of Docker_. docker-py can easily be installed using
+:py:func:`pip.install <salt.modules.pip.install>`:
.. code-block:: bash
- salt myminion pip.install docker-py>=1.4.0
+ salt myminion pip.install docker-py>=1.6.0
.. _docker-py: https://pypi.python.org/pypi/docker-py
.. _Docker: https://www.docker.com/
@@ -268,8 +268,8 @@ __func_alias__ = {
}
# Minimum supported versions
-MIN_DOCKER = (1, 6, 0)
-MIN_DOCKER_PY = (1, 4, 0)
+MIN_DOCKER = (1, 9, 0)
+MIN_DOCKER_PY = (1, 6, 0)
VERSION_RE = r'([\d.]+)'
@@ -3479,7 +3479,8 @@ def build(path=None,
rm=True,
api_response=False,
fileobj=None,
- dockerfile=None):
+ dockerfile=None,
+ buildargs=None):
'''
Builds a docker image from a Dockerfile or a URL
@@ -3513,6 +3514,10 @@ def build(path=None,
.. versionadded:: develop
+ buildargs
+ A dictionary of build arguments provided to the docker build process.
+
+
**RETURN DATA**
A dictionary containing one or more of the following keys:
@@ -3559,7 +3564,8 @@ def build(path=None,
fileobj=fileobj,
rm=rm,
nocache=not cache,
- dockerfile=dockerfile)
+ dockerfile=dockerfile,
+ buildargs=buildargs)
ret = {'Time_Elapsed': time.time() - time_started}
_clear_context()
@@ -5657,7 +5663,9 @@ def call(name, function, *args, **kwargs):
raise CommandExecutionError('Missing function parameter')
# move salt into the container
- thin_path = salt.utils.thin.gen_thin(__opts__['cachedir'])
+ thin_path = salt.utils.thin.gen_thin(__opts__['cachedir'],
+ extra_mods=__salt__['config.option']("thin_extra_mods", ''),
+ so_mods=__salt__['config.option']("thin_so_mods", ''))
with io.open(thin_path, 'rb') as file:
_client_wrapper('put_archive', name, thin_dest_path, file)
try:
@@ -5774,7 +5782,7 @@ def sls(name, mods=None, saltenv='base', **kwargs):
def sls_build(name, base='opensuse/python', mods=None, saltenv='base',
- **kwargs):
+ dryrun=False, **kwargs):
'''
Build a Docker image using the specified SLS modules on top of base image
@@ -5796,6 +5804,24 @@ def sls_build(name, base='opensuse/python', mods=None, saltenv='base',
Specify the environment from which to retrieve the SLS indicated by the
`mods` parameter.
+ base
+ the base image
+
+ mods
+ the state modules to execute during build
+
+ saltenv
+ the salt environment to use
+
+ dryrun: False
+ when set to True the container will not be commited at the end of
+ the build. The dryrun succeed also when the state contains errors.
+
+ **RETURN DATA**
+
+ A dictionary with the ID of the new container. In case of a dryrun,
+ the state result is returned and the container gets removed.
+
CLI Example:
.. code-block:: bash
@@ -5822,11 +5848,14 @@ def sls_build(name, base='opensuse/python', mods=None, saltenv='base',
# Now execute the state into the container
ret = __salt__['dockerng.sls'](id_, mods, saltenv, **kwargs)
# fail if the state was not successful
- if not salt.utils.check_state_result(ret):
+ if not dryrun and not salt.utils.check_state_result(ret):
raise CommandExecutionError(ret)
finally:
__salt__['dockerng.stop'](id_)
+ if dryrun:
+ __salt__['dockerng.rm'](id_)
+ return ret
return __salt__['dockerng.commit'](id_, name)
diff --git a/tests/unit/modules/dockerng_test.py b/tests/unit/modules/dockerng_test.py
index f213ef0d1f..36478cc4c6 100644
--- a/tests/unit/modules/dockerng_test.py
+++ b/tests/unit/modules/dockerng_test.py
@@ -755,6 +755,84 @@ class DockerngTestCase(TestCase):
self.assertEqual(
{'Id': 'ID2', 'Image': 'foo', 'Time_Elapsed': 42}, ret)
+ def test_sls_build_dryrun(self, *args):
+ '''
+ test build sls image in dryrun mode.
+ '''
+ docker_start_mock = MagicMock(
+ return_value={})
+ docker_create_mock = MagicMock(
+ return_value={'Id': 'ID', 'Name': 'NAME'})
+ docker_stop_mock = MagicMock(
+ return_value={'state': {'old': 'running', 'new': 'stopped'},
+ 'result': True})
+ docker_rm_mock = MagicMock(
+ return_value={})
+
+ docker_sls_mock = MagicMock(
+ return_value={
+ "file_|-/etc/test.sh_|-/etc/test.sh_|-managed": {
+ "comment": "File /etc/test.sh is in the correct state",
+ "name": "/etc/test.sh",
+ "start_time": "07:04:26.834792",
+ "result": True,
+ "duration": 13.492,
+ "__run_num__": 0,
+ "changes": {}
+ },
+ "test_|-always-passes_|-foo_|-succeed_without_changes": {
+ "comment": "Success!",
+ "name": "foo",
+ "start_time": "07:04:26.848915",
+ "result": True,
+ "duration": 0.363,
+ "__run_num__": 1,
+ "changes": {}
+ }
+ })
+
+ ret = None
+ with patch.dict(dockerng_mod.__salt__, {
+ 'dockerng.start': docker_start_mock,
+ 'dockerng.create': docker_create_mock,
+ 'dockerng.stop': docker_stop_mock,
+ 'dockerng.rm': docker_rm_mock,
+ 'dockerng.sls': docker_sls_mock}):
+ ret = dockerng_mod.sls_build(
+ 'foo',
+ mods='foo',
+ dryrun=True
+ )
+ docker_create_mock.assert_called_once_with(
+ cmd='sleep infinity',
+ image='opensuse/python', interactive=True, name='foo', tty=True)
+ docker_start_mock.assert_called_once_with('ID')
+ docker_sls_mock.assert_called_once_with('ID', 'foo', 'base')
+ docker_stop_mock.assert_called_once_with('ID')
+ docker_rm_mock.assert_called_once_with('ID')
+ self.assertEqual(
+ {
+ "file_|-/etc/test.sh_|-/etc/test.sh_|-managed": {
+ "comment": "File /etc/test.sh is in the correct state",
+ "name": "/etc/test.sh",
+ "start_time": "07:04:26.834792",
+ "result": True,
+ "duration": 13.492,
+ "__run_num__": 0,
+ "changes": {}
+ },
+ "test_|-always-passes_|-foo_|-succeed_without_changes": {
+ "comment": "Success!",
+ "name": "foo",
+ "start_time": "07:04:26.848915",
+ "result": True,
+ "duration": 0.363,
+ "__run_num__": 1,
+ "changes": {}
+ }
+ },
+ ret)
+
def test_call_success(self):
'''
test module calling inside containers
@@ -769,6 +847,9 @@ class DockerngTestCase(TestCase):
return_value={
'retcode': 0
})
+ docker_config_mock = MagicMock(
+ return_value=''
+ )
client = Mock()
client.put_archive = Mock()
@@ -779,6 +860,7 @@ class DockerngTestCase(TestCase):
dockerng_mod.__salt__, {
'dockerng.run_all': docker_run_all_mock,
'dockerng.copy_to': docker_copy_to_mock,
+ 'config.option': docker_config_mock
}),
patch.dict(
dockerng_mod.__context__, {
--
2.11.0

View File

@ -1,163 +0,0 @@
From c4c6610bf7314cc4c6ecf656bef341e2d1ca1587 Mon Sep 17 00:00:00 2001
From: Matei Albu <malbu@suse.de>
Date: Mon, 19 Dec 2016 16:54:52 +0100
Subject: [PATCH] Add --ssh-option to salt-ssh
--ssh-option can be used to pass -o options to the ssh client.
(cherry picked from commit 16f21e5)
Add spaces around =
Fix salt-ssh err when -ssh-option is missing
---
salt/client/ssh/__init__.py | 7 ++++++-
salt/client/ssh/shell.py | 19 ++++++++++++++++---
salt/utils/parsers.py | 18 +++++++++++++-----
3 files changed, 35 insertions(+), 9 deletions(-)
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py
index 23ec948fe0..bbef9d8de1 100644
--- a/salt/client/ssh/__init__.py
+++ b/salt/client/ssh/__init__.py
@@ -295,6 +295,9 @@ class SSH(object):
'remote_port_forwards': self.opts.get(
'ssh_remote_port_forwards'
),
+ 'ssh_options': self.opts.get(
+ 'ssh_options'
+ )
}
if self.opts.get('rand_thin_dir'):
self.defaults['thin_dir'] = os.path.join(
@@ -693,6 +696,7 @@ class Single(object):
identities_only=False,
sudo_user=None,
remote_port_forwards=None,
+ ssh_options=None,
**kwargs):
# Get mine setting and mine_functions if defined in kwargs (from roster)
self.mine = mine
@@ -749,7 +753,8 @@ class Single(object):
'mods': self.mods,
'identities_only': identities_only,
'sudo_user': sudo_user,
- 'remote_port_forwards': remote_port_forwards}
+ 'remote_port_forwards': remote_port_forwards,
+ 'ssh_options': ssh_options}
# Pre apply changeable defaults
self.minion_opts = {
'grains_cache': True,
diff --git a/salt/client/ssh/shell.py b/salt/client/ssh/shell.py
index f78cb623e1..613660fe34 100644
--- a/salt/client/ssh/shell.py
+++ b/salt/client/ssh/shell.py
@@ -64,7 +64,8 @@ class Shell(object):
mods=None,
identities_only=False,
sudo_user=None,
- remote_port_forwards=None):
+ remote_port_forwards=None,
+ ssh_options=None):
self.opts = opts
# ssh <ipv6>, but scp [<ipv6]:/path
self.host = host.strip('[]')
@@ -78,6 +79,7 @@ class Shell(object):
self.mods = mods
self.identities_only = identities_only
self.remote_port_forwards = remote_port_forwards
+ self.ssh_options = ssh_options
def get_error(self, errstr):
'''
@@ -169,6 +171,12 @@ class Shell(object):
ret.append('-o {0} '.format(option))
return ''.join(ret)
+ def _ssh_opts(self):
+ if self.ssh_options:
+ return ' '.join(['-o {0}'.format(opt)
+ for opt in self.ssh_options])
+ return ''
+
def _copy_id_str_old(self):
'''
Return the string to execute ssh-copy-id
@@ -176,11 +184,12 @@ class Shell(object):
if self.passwd:
# Using single quotes prevents shell expansion and
# passwords containing '$'
- return "{0} {1} '{2} -p {3} {4}@{5}'".format(
+ return "{0} {1} '{2} -p {3} {4} {5}@{6}'".format(
'ssh-copy-id',
'-i {0}.pub'.format(self.priv),
self._passwd_opts(),
self.port,
+ self._ssh_opts(),
self.user,
self.host)
return None
@@ -193,11 +202,12 @@ class Shell(object):
if self.passwd:
# Using single quotes prevents shell expansion and
# passwords containing '$'
- return "{0} {1} {2} -p {3} {4}@{5}".format(
+ return "{0} {1} {2} -p {3} {4} {5}@{6}".format(
'ssh-copy-id',
'-i {0}.pub'.format(self.priv),
self._passwd_opts(),
self.port,
+ self._ssh_opts(),
self.user,
self.host)
return None
@@ -229,6 +239,9 @@ class Shell(object):
if ssh != 'scp' and self.remote_port_forwards:
command.append(' '.join(['-R {0}'.format(item)
for item in self.remote_port_forwards.split(',')]))
+ if self.ssh_options:
+ command.append(self._ssh_opts())
+
command.append(cmd)
return ' '.join(command)
diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py
index 6330ed6596..c38506f3c5 100644
--- a/salt/utils/parsers.py
+++ b/salt/utils/parsers.py
@@ -2828,11 +2828,11 @@ class SaltSSHOptionParser(six.with_metaclass(OptionParserMeta,
help='Pass a JID to be used instead of generating one.'
)
- ports_group = optparse.OptionGroup(
- self, 'Port Forwarding Options',
- 'Parameters for setting up SSH port forwarding.'
+ ssh_group = optparse.OptionGroup(
+ self, 'SSH Options',
+ 'Parameters for the SSH client.'
)
- ports_group.add_option(
+ ssh_group.add_option(
'--remote-port-forwards',
dest='ssh_remote_port_forwards',
help='Setup remote port forwarding using the same syntax as with '
@@ -2840,7 +2840,15 @@ class SaltSSHOptionParser(six.with_metaclass(OptionParserMeta,
'forwarding definitions will be translated into multiple '
'-R parameters.'
)
- self.add_option_group(ports_group)
+ ssh_group.add_option(
+ '--ssh-option',
+ dest='ssh_options',
+ action='append',
+ help='Equivalent to the -o ssh command option. Passes options to '
+ 'the SSH client in the format used in the client configuration file. '
+ 'Can be used multiple times.'
+ )
+ self.add_option_group(ssh_group)
auth_group = optparse.OptionGroup(
self, 'Authentication Options',
--
2.11.0

View File

@ -1,917 +0,0 @@
From a983f9342c6917eaa1aba63cd5ceebd9271f43d5 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 20 Apr 2017 14:03:30 +0200
Subject: [PATCH] Add unit test for skip false values from preferred_ip
- Add fake preferred IP function for testing
- Add initial unit test for openstack cloud module
- Move out nested function to be unit-testable
- Lintfix
- Add unit test for nova connector
- Move out nested function for testing purposes
- Fix name error exception
- Skip test, if libcloud is not around
- Add unit test for node ip filtering
- Lintfix E0602
- Fix UT parameter changes
- Fix lint, typos and readability
- PEP8: fix unused variable
- Reformat idents, fix typos
- Describe debug information
---
salt/cloud/clouds/dimensiondata.py | 116 +++++-----
salt/cloud/clouds/nova.py | 295 ++++++++++++--------------
salt/cloud/clouds/openstack.py | 229 ++++++++++----------
tests/unit/cloud/clouds/__init__.py | 17 ++
tests/unit/cloud/clouds/dimensiondata_test.py | 28 ++-
tests/unit/cloud/clouds/nova_test.py | 43 ++++
tests/unit/cloud/clouds/openstack_test.py | 43 ++++
7 files changed, 441 insertions(+), 330 deletions(-)
create mode 100644 tests/unit/cloud/clouds/nova_test.py
create mode 100644 tests/unit/cloud/clouds/openstack_test.py
diff --git a/salt/cloud/clouds/dimensiondata.py b/salt/cloud/clouds/dimensiondata.py
index e4af241867..d8478436b8 100644
--- a/salt/cloud/clouds/dimensiondata.py
+++ b/salt/cloud/clouds/dimensiondata.py
@@ -131,6 +131,60 @@ def get_dependencies():
)
+def _query_node_data(vm_, data):
+ running = False
+ try:
+ node = show_instance(vm_['name'], 'action')
+ running = (node['state'] == NodeState.RUNNING)
+ log.debug('Loaded node data for %s:\nname: %s\nstate: %s',
+ vm_['name'], pprint.pformat(node['name']), node['state'])
+ except Exception as err:
+ log.error(
+ 'Failed to get nodes list: %s', err,
+ # Show the traceback if the debug logging level is enabled
+ exc_info_on_loglevel=logging.DEBUG
+ )
+ # Trigger a failure in the wait for IP function
+ return running
+
+ if not running:
+ # Still not running, trigger another iteration
+ return
+
+ private = node['private_ips']
+ public = node['public_ips']
+
+ if private and not public:
+ log.warning('Private IPs returned, but not public. Checking for misidentified IPs.')
+ for private_ip in private:
+ private_ip = preferred_ip(vm_, [private_ip])
+ if private_ip is False:
+ continue
+ if salt.utils.cloud.is_public_ip(private_ip):
+ log.warning('%s is a public IP', private_ip)
+ data.public_ips.append(private_ip)
+ else:
+ log.warning('%s is a private IP', private_ip)
+ if private_ip not in data.private_ips:
+ data.private_ips.append(private_ip)
+
+ if ssh_interface(vm_) == 'private_ips' and data.private_ips:
+ return data
+
+ if private:
+ data.private_ips = private
+ if ssh_interface(vm_) == 'private_ips':
+ return data
+
+ if public:
+ data.public_ips = public
+ if ssh_interface(vm_) != 'private_ips':
+ return data
+
+ log.debug('Contents of the node data:')
+ log.debug(data)
+
+
def create(vm_):
'''
Create a single VM from a data dict
@@ -197,69 +251,9 @@ def create(vm_):
)
return False
- def __query_node_data(vm_, data):
- running = False
- try:
- node = show_instance(vm_['name'], 'action')
- running = (node['state'] == NodeState.RUNNING)
- log.debug(
- 'Loaded node data for %s:\nname: %s\nstate: %s',
- vm_['name'],
- pprint.pformat(node['name']),
- node['state']
- )
- except Exception as err:
- log.error(
- 'Failed to get nodes list: %s', err,
- # Show the traceback if the debug logging level is enabled
- exc_info_on_loglevel=logging.DEBUG
- )
- # Trigger a failure in the wait for IP function
- return False
-
- if not running:
- # Still not running, trigger another iteration
- return
-
- private = node['private_ips']
- public = node['public_ips']
-
- if private and not public:
- log.warning(
- 'Private IPs returned, but not public... Checking for '
- 'misidentified IPs'
- )
- for private_ip in private:
- private_ip = preferred_ip(vm_, [private_ip])
- if private_ip is False:
- continue
- if salt.utils.cloud.is_public_ip(private_ip):
- log.warning('%s is a public IP', private_ip)
- data.public_ips.append(private_ip)
- else:
- log.warning('%s is a private IP', private_ip)
- if private_ip not in data.private_ips:
- data.private_ips.append(private_ip)
-
- if ssh_interface(vm_) == 'private_ips' and data.private_ips:
- return data
-
- if private:
- data.private_ips = private
- if ssh_interface(vm_) == 'private_ips':
- return data
-
- if public:
- data.public_ips = public
- if ssh_interface(vm_) != 'private_ips':
- return data
-
- log.debug('DATA')
- log.debug(data)
-
try:
data = salt.utils.cloud.wait_for_ip(
- __query_node_data,
+ _query_node_data,
update_args=(vm_, data),
timeout=config.get_cloud_config_value(
'wait_for_ip_timeout', vm_, __opts__, default=25 * 60),
diff --git a/salt/cloud/clouds/nova.py b/salt/cloud/clouds/nova.py
index ed9251d4b1..d2cbf7387a 100644
--- a/salt/cloud/clouds/nova.py
+++ b/salt/cloud/clouds/nova.py
@@ -722,6 +722,145 @@ def request_instance(vm_=None, call=None):
return data, vm_
+def _query_node_data(vm_, data, conn):
+ try:
+ node = show_instance(vm_['name'], 'action')
+ log.debug('Loaded node data for {0}:'
+ '\n{1}'.format(vm_['name'], pprint.pformat(node)))
+ except Exception as err:
+ # Show the traceback if the debug logging level is enabled
+ log.error('Failed to get nodes list: {0}'.format(err),
+ exc_info_on_loglevel=logging.DEBUG)
+ # Trigger a failure in the wait for IP function
+ return False
+
+ running = node['state'] == 'ACTIVE'
+ if not running:
+ # Still not running, trigger another iteration
+ return
+
+ if rackconnect(vm_) is True:
+ extra = node.get('extra', {})
+ rc_status = extra.get('metadata', {}).get('rackconnect_automation_status', '')
+ if rc_status != 'DEPLOYED':
+ log.debug('Waiting for Rackconnect automation to complete')
+ return
+
+ if managedcloud(vm_) is True:
+ extra = conn.server_show_libcloud(node['id']).extra
+ mc_status = extra.get('metadata', {}).get('rax_service_level_automation', '')
+
+ if mc_status != 'Complete':
+ log.debug('Waiting for managed cloud automation to complete')
+ return
+
+ access_ip = node.get('extra', {}).get('access_ip', '')
+
+ rcv3 = rackconnectv3(vm_) in node['addresses']
+ sshif = ssh_interface(vm_) in node['addresses']
+
+ if any((rcv3, sshif)):
+ networkname = rackconnectv3(vm_) if rcv3 else ssh_interface(vm_)
+ for network in node['addresses'].get(networkname, []):
+ if network['version'] is 4:
+ access_ip = network['addr']
+ break
+ vm_['cloudnetwork'] = True
+
+ # Conditions to pass this
+ #
+ # Rackconnect v2: vm_['rackconnect'] = True
+ # If this is True, then the server will not be accessible from the ipv4 addres in public_ips.
+ # That interface gets turned off, and an ipv4 from the dedicated firewall is routed to the
+ # server. In this case we can use the private_ips for ssh_interface, or the access_ip.
+ #
+ # Rackconnect v3: vm['rackconnectv3'] = <cloudnetwork>
+ # If this is the case, salt will need to use the cloud network to login to the server. There
+ # is no ipv4 address automatically provisioned for these servers when they are booted. SaltCloud
+ # also cannot use the private_ips, because that traffic is dropped at the hypervisor.
+ #
+ # CloudNetwork: vm['cloudnetwork'] = True
+ # If this is True, then we should have an access_ip at this point set to the ip on the cloud
+ # network. If that network does not exist in the 'addresses' dictionary, then SaltCloud will
+ # use the initial access_ip, and not overwrite anything.
+
+ if (any((cloudnetwork(vm_), rackconnect(vm_)))
+ and (ssh_interface(vm_) != 'private_ips' or rcv3)
+ and access_ip != ''):
+ data.public_ips = [access_ip]
+ return data
+
+ result = []
+
+ if ('private_ips' not in node
+ and 'public_ips' not in node
+ and 'floating_ips' not in node
+ and 'fixed_ips' not in node
+ and 'access_ip' in node.get('extra', {})):
+ result = [node['extra']['access_ip']]
+
+ private = node.get('private_ips', [])
+ public = node.get('public_ips', [])
+ fixed = node.get('fixed_ips', [])
+ floating = node.get('floating_ips', [])
+
+ if private and not public:
+ log.warning('Private IPs returned, but not public. '
+ 'Checking for misidentified IPs')
+ for private_ip in private:
+ private_ip = preferred_ip(vm_, [private_ip])
+ if private_ip is False:
+ continue
+ if salt.utils.cloud.is_public_ip(private_ip):
+ log.warning('{0} is a public IP'.format(private_ip))
+ data.public_ips.append(private_ip)
+ log.warning('Public IP address was not ready when we last checked. '
+ 'Appending public IP address now.')
+ public = data.public_ips
+ else:
+ log.warning('{0} is a private IP'.format(private_ip))
+ ignore_ip = ignore_cidr(vm_, private_ip)
+ if private_ip not in data.private_ips and not ignore_ip:
+ result.append(private_ip)
+
+ # populate return data with private_ips
+ # when ssh_interface is set to private_ips and public_ips exist
+ if not result and ssh_interface(vm_) == 'private_ips':
+ for private_ip in private:
+ ignore_ip = ignore_cidr(vm_, private_ip)
+ if private_ip not in data.private_ips and not ignore_ip:
+ result.append(private_ip)
+
+ non_private_ips = []
+
+ if public:
+ data.public_ips = public
+ if ssh_interface(vm_) == 'public_ips':
+ non_private_ips.append(public)
+
+ if floating:
+ data.floating_ips = floating
+ if ssh_interface(vm_) == 'floating_ips':
+ non_private_ips.append(floating)
+
+ if fixed:
+ data.fixed_ips = fixed
+ if ssh_interface(vm_) == 'fixed_ips':
+ non_private_ips.append(fixed)
+
+ if non_private_ips:
+ log.debug('result = {0}'.format(non_private_ips))
+ data.private_ips = result
+ if ssh_interface(vm_) != 'private_ips':
+ return data
+
+ if result:
+ log.debug('result = {0}'.format(result))
+ data.private_ips = result
+ if ssh_interface(vm_) == 'private_ips':
+ return data
+
+
def create(vm_):
'''
Create a single VM from a data dict
@@ -792,162 +931,10 @@ def create(vm_):
# Pull the instance ID, valid for both spot and normal instances
vm_['instance_id'] = data.id
- def __query_node_data(vm_, data):
- try:
- node = show_instance(vm_['name'], 'action')
- log.debug(
- 'Loaded node data for {0}:\n{1}'.format(
- vm_['name'],
- pprint.pformat(node)
- )
- )
- except Exception as err:
- log.error(
- 'Failed to get nodes list: {0}'.format(
- err
- ),
- # Show the traceback if the debug logging level is enabled
- exc_info_on_loglevel=logging.DEBUG
- )
- # Trigger a failure in the wait for IP function
- return False
-
- running = node['state'] == 'ACTIVE'
- if not running:
- # Still not running, trigger another iteration
- return
-
- if rackconnect(vm_) is True:
- extra = node.get('extra', {})
- rc_status = extra.get('metadata', {}).get(
- 'rackconnect_automation_status', '')
- if rc_status != 'DEPLOYED':
- log.debug('Waiting for Rackconnect automation to complete')
- return
-
- if managedcloud(vm_) is True:
- extra = conn.server_show_libcloud(
- node['id']
- ).extra
- mc_status = extra.get('metadata', {}).get(
- 'rax_service_level_automation', '')
-
- if mc_status != 'Complete':
- log.debug('Waiting for managed cloud automation to complete')
- return
-
- access_ip = node.get('extra', {}).get('access_ip', '')
-
- rcv3 = rackconnectv3(vm_) in node['addresses']
- sshif = ssh_interface(vm_) in node['addresses']
-
- if any((rcv3, sshif)):
- networkname = rackconnectv3(vm_) if rcv3 else ssh_interface(vm_)
- for network in node['addresses'].get(networkname, []):
- if network['version'] is 4:
- access_ip = network['addr']
- break
- vm_['cloudnetwork'] = True
-
- # Conditions to pass this
- #
- # Rackconnect v2: vm_['rackconnect'] = True
- # If this is True, then the server will not be accessible from the ipv4 addres in public_ips.
- # That interface gets turned off, and an ipv4 from the dedicated firewall is routed to the
- # server. In this case we can use the private_ips for ssh_interface, or the access_ip.
- #
- # Rackconnect v3: vm['rackconnectv3'] = <cloudnetwork>
- # If this is the case, salt will need to use the cloud network to login to the server. There
- # is no ipv4 address automatically provisioned for these servers when they are booted. SaltCloud
- # also cannot use the private_ips, because that traffic is dropped at the hypervisor.
- #
- # CloudNetwork: vm['cloudnetwork'] = True
- # If this is True, then we should have an access_ip at this point set to the ip on the cloud
- # network. If that network does not exist in the 'addresses' dictionary, then SaltCloud will
- # use the initial access_ip, and not overwrite anything.
-
- if any((cloudnetwork(vm_), rackconnect(vm_))) and (ssh_interface(vm_) != 'private_ips' or rcv3) and access_ip != '':
- data.public_ips = [access_ip, ]
- return data
-
- result = []
-
- if 'private_ips' not in node and 'public_ips' not in node and \
- 'floating_ips' not in node and 'fixed_ips' not in node and \
- 'access_ip' in node.get('extra', {}):
- result = [node['extra']['access_ip']]
-
- private = node.get('private_ips', [])
- public = node.get('public_ips', [])
- fixed = node.get('fixed_ips', [])
- floating = node.get('floating_ips', [])
-
- if private and not public:
- log.warning(
- 'Private IPs returned, but not public... Checking for '
- 'misidentified IPs'
- )
- for private_ip in private:
- private_ip = preferred_ip(vm_, [private_ip])
- if private_ip is False:
- continue
- if salt.utils.cloud.is_public_ip(private_ip):
- log.warning('{0} is a public IP'.format(private_ip))
- data.public_ips.append(private_ip)
- log.warning(
- (
- 'Public IP address was not ready when we last'
- ' checked. Appending public IP address now.'
- )
- )
- public = data.public_ips
- else:
- log.warning('{0} is a private IP'.format(private_ip))
- ignore_ip = ignore_cidr(vm_, private_ip)
- if private_ip not in data.private_ips and not ignore_ip:
- result.append(private_ip)
-
- # populate return data with private_ips
- # when ssh_interface is set to private_ips and public_ips exist
- if not result and ssh_interface(vm_) == 'private_ips':
- for private_ip in private:
- ignore_ip = ignore_cidr(vm_, private_ip)
- if private_ip not in data.private_ips and not ignore_ip:
- result.append(private_ip)
-
- non_private_ips = []
-
- if public:
- data.public_ips = public
- if ssh_interface(vm_) == 'public_ips':
- non_private_ips.append(public)
-
- if floating:
- data.floating_ips = floating
- if ssh_interface(vm_) == 'floating_ips':
- non_private_ips.append(floating)
-
- if fixed:
- data.fixed_ips = fixed
- if ssh_interface(vm_) == 'fixed_ips':
- non_private_ips.append(fixed)
-
- if non_private_ips:
- log.debug('result = {0}'.format(non_private_ips))
- data.private_ips = result
- if ssh_interface(vm_) != 'private_ips':
- return data
-
- if result:
- log.debug('result = {0}'.format(result))
- data.private_ips = result
- if ssh_interface(vm_) == 'private_ips':
- return data
-
try:
data = salt.utils.cloud.wait_for_ip(
- __query_node_data,
- update_args=(vm_, data),
+ _query_node_data,
+ update_args=(vm_, data, conn),
timeout=config.get_cloud_config_value(
'wait_for_ip_timeout', vm_, __opts__, default=10 * 60),
interval=config.get_cloud_config_value(
diff --git a/salt/cloud/clouds/openstack.py b/salt/cloud/clouds/openstack.py
index cc936509c7..c8ad91ff23 100644
--- a/salt/cloud/clouds/openstack.py
+++ b/salt/cloud/clouds/openstack.py
@@ -585,6 +585,119 @@ def request_instance(vm_=None, call=None):
return data, vm_
+def _query_node_data(vm_, data, floating, conn):
+ try:
+ node = show_instance(vm_['name'], 'action')
+ log.debug(
+ 'Loaded node data for {0}:\n{1}'.format(
+ vm_['name'],
+ pprint.pformat(node)
+ )
+ )
+ except Exception as err:
+ log.error(
+ 'Failed to get nodes list: {0}'.format(
+ err
+ ),
+ # Show the traceback if the debug logging level is enabled
+ exc_info_on_loglevel=logging.DEBUG
+ )
+ # Trigger a failure in the wait for IP function
+ return False
+
+ running = node['state'] == NodeState.RUNNING
+ if not running:
+ # Still not running, trigger another iteration
+ return
+
+ if rackconnect(vm_) is True:
+ check_libcloud_version((0, 14, 0), why='rackconnect: True')
+ extra = node.get('extra')
+ rc_status = extra.get('metadata', {}).get(
+ 'rackconnect_automation_status', '')
+ access_ip = extra.get('access_ip', '')
+
+ if rc_status != 'DEPLOYED':
+ log.debug('Waiting for Rackconnect automation to complete')
+ return
+
+ if managedcloud(vm_) is True:
+ extra = node.get('extra')
+ mc_status = extra.get('metadata', {}).get(
+ 'rax_service_level_automation', '')
+
+ if mc_status != 'Complete':
+ log.debug('Waiting for managed cloud automation to complete')
+ return
+
+ public = node['public_ips']
+ if floating:
+ try:
+ name = data.name
+ ip = floating[0].ip_address
+ conn.ex_attach_floating_ip_to_node(data, ip)
+ log.info(
+ 'Attaching floating IP \'{0}\' to node \'{1}\''.format(
+ ip, name
+ )
+ )
+ data.public_ips.append(ip)
+ public = data.public_ips
+ except Exception:
+ # Note(pabelanger): Because we loop, we only want to attach the
+ # floating IP address one. So, expect failures if the IP is
+ # already attached.
+ pass
+
+ result = []
+ private = node['private_ips']
+ if private and not public:
+ log.warning(
+ 'Private IPs returned, but not public... Checking for '
+ 'misidentified IPs'
+ )
+ for private_ip in private:
+ private_ip = preferred_ip(vm_, [private_ip])
+ if private_ip is False:
+ continue
+ if salt.utils.cloud.is_public_ip(private_ip):
+ log.warning('{0} is a public IP'.format(private_ip))
+ data.public_ips.append(private_ip)
+ log.warning(
+ 'Public IP address was not ready when we last checked.'
+ ' Appending public IP address now.'
+ )
+ public = data.public_ips
+ else:
+ log.warning('{0} is a private IP'.format(private_ip))
+ ignore_ip = ignore_cidr(vm_, private_ip)
+ if private_ip not in data.private_ips and not ignore_ip:
+ result.append(private_ip)
+
+ if rackconnect(vm_) is True and ssh_interface(vm_) != 'private_ips':
+ data.public_ips = access_ip
+ return data
+
+ # populate return data with private_ips
+ # when ssh_interface is set to private_ips and public_ips exist
+ if not result and ssh_interface(vm_) == 'private_ips':
+ for private_ip in private:
+ ignore_ip = ignore_cidr(vm_, private_ip)
+ if private_ip not in data.private_ips and not ignore_ip:
+ result.append(private_ip)
+
+ if result:
+ log.debug('result = {0}'.format(result))
+ data.private_ips = result
+ if ssh_interface(vm_) == 'private_ips':
+ return data
+
+ if public:
+ data.public_ips = public
+ if ssh_interface(vm_) != 'private_ips':
+ return data
+
+
def create(vm_):
'''
Create a single VM from a data dict
@@ -659,122 +772,10 @@ def create(vm_):
# Pull the instance ID, valid for both spot and normal instances
vm_['instance_id'] = data.id
- def __query_node_data(vm_, data, floating):
- try:
- node = show_instance(vm_['name'], 'action')
- log.debug(
- 'Loaded node data for {0}:\n{1}'.format(
- vm_['name'],
- pprint.pformat(node)
- )
- )
- except Exception as err:
- log.error(
- 'Failed to get nodes list: {0}'.format(
- err
- ),
- # Show the traceback if the debug logging level is enabled
- exc_info_on_loglevel=logging.DEBUG
- )
- # Trigger a failure in the wait for IP function
- return False
-
- running = node['state'] == NodeState.RUNNING
- if not running:
- # Still not running, trigger another iteration
- return
-
- if rackconnect(vm_) is True:
- check_libcloud_version((0, 14, 0), why='rackconnect: True')
- extra = node.get('extra')
- rc_status = extra.get('metadata', {}).get(
- 'rackconnect_automation_status', '')
- access_ip = extra.get('access_ip', '')
-
- if rc_status != 'DEPLOYED':
- log.debug('Waiting for Rackconnect automation to complete')
- return
-
- if managedcloud(vm_) is True:
- extra = node.get('extra')
- mc_status = extra.get('metadata', {}).get(
- 'rax_service_level_automation', '')
-
- if mc_status != 'Complete':
- log.debug('Waiting for managed cloud automation to complete')
- return
-
- public = node['public_ips']
- if floating:
- try:
- name = data.name
- ip = floating[0].ip_address
- conn.ex_attach_floating_ip_to_node(data, ip)
- log.info(
- 'Attaching floating IP \'{0}\' to node \'{1}\''.format(
- ip, name
- )
- )
- data.public_ips.append(ip)
- public = data.public_ips
- except Exception:
- # Note(pabelanger): Because we loop, we only want to attach the
- # floating IP address one. So, expect failures if the IP is
- # already attached.
- pass
-
- result = []
- private = node['private_ips']
- if private and not public:
- log.warning(
- 'Private IPs returned, but not public... Checking for '
- 'misidentified IPs'
- )
- for private_ip in private:
- private_ip = preferred_ip(vm_, [private_ip])
- if private_ip is False:
- continue
- if salt.utils.cloud.is_public_ip(private_ip):
- log.warning('{0} is a public IP'.format(private_ip))
- data.public_ips.append(private_ip)
- log.warning(
- 'Public IP address was not ready when we last checked.'
- ' Appending public IP address now.'
- )
- public = data.public_ips
- else:
- log.warning('{0} is a private IP'.format(private_ip))
- ignore_ip = ignore_cidr(vm_, private_ip)
- if private_ip not in data.private_ips and not ignore_ip:
- result.append(private_ip)
-
- if rackconnect(vm_) is True and ssh_interface(vm_) != 'private_ips':
- data.public_ips = access_ip
- return data
-
- # populate return data with private_ips
- # when ssh_interface is set to private_ips and public_ips exist
- if not result and ssh_interface(vm_) == 'private_ips':
- for private_ip in private:
- ignore_ip = ignore_cidr(vm_, private_ip)
- if private_ip not in data.private_ips and not ignore_ip:
- result.append(private_ip)
-
- if result:
- log.debug('result = {0}'.format(result))
- data.private_ips = result
- if ssh_interface(vm_) == 'private_ips':
- return data
-
- if public:
- data.public_ips = public
- if ssh_interface(vm_) != 'private_ips':
- return data
-
try:
data = salt.utils.cloud.wait_for_ip(
- __query_node_data,
- update_args=(vm_, data, vm_['floating']),
+ _query_node_data,
+ update_args=(vm_, data, vm_['floating'], conn),
timeout=config.get_cloud_config_value(
'wait_for_ip_timeout', vm_, __opts__, default=10 * 60),
interval=config.get_cloud_config_value(
diff --git a/tests/unit/cloud/clouds/__init__.py b/tests/unit/cloud/clouds/__init__.py
index 40a96afc6f..15d1e2c5c6 100644
--- a/tests/unit/cloud/clouds/__init__.py
+++ b/tests/unit/cloud/clouds/__init__.py
@@ -1 +1,18 @@
# -*- coding: utf-8 -*-
+
+
+def _preferred_ip(ip_set, preferred=None):
+ '''
+ Returns a function that reacts which ip is prefered
+ :param ip_set:
+ :param private:
+ :return:
+ '''
+
+ def _ip_decider(vm, ips):
+ for ip in ips:
+ if ip in preferred:
+ return ip
+ return False
+
+ return _ip_decider
diff --git a/tests/unit/cloud/clouds/dimensiondata_test.py b/tests/unit/cloud/clouds/dimensiondata_test.py
index b4ea7f57f5..9f92fd7dbe 100644
--- a/tests/unit/cloud/clouds/dimensiondata_test.py
+++ b/tests/unit/cloud/clouds/dimensiondata_test.py
@@ -25,6 +25,7 @@ from salt.exceptions import SaltCloudSystemExit
from salttesting import TestCase, skipIf
from salttesting.mock import MagicMock, NO_MOCK, NO_MOCK_REASON, patch
from salttesting.helpers import ensure_in_syspath
+from tests.unit.cloud.clouds import _preferred_ip
ensure_in_syspath('../../../')
@@ -48,7 +49,7 @@ VM_NAME = 'winterfell'
try:
import certifi
libcloud.security.CA_CERTS_PATH.append(certifi.where())
-except ImportError:
+except (ImportError, NameError):
pass
@@ -129,6 +130,7 @@ class DimensionDataTestCase(ExtendedTestCase):
call='function'
)
+ @skipIf(HAS_LIBCLOUD is False, "Install 'libcloud' to be able to run this unit test.")
def test_avail_sizes(self):
'''
Tests that avail_sizes returns an empty dictionary.
@@ -160,6 +162,30 @@ class DimensionDataTestCase(ExtendedTestCase):
p = dimensiondata.get_configured_provider()
self.assertNotEqual(p, None)
+ PRIVATE_IPS = ['0.0.0.0', '1.1.1.1', '2.2.2.2']
+
+ @patch('salt.cloud.clouds.dimensiondata.show_instance',
+ MagicMock(return_value={'state': True,
+ 'name': 'foo',
+ 'public_ips': [],
+ 'private_ips': PRIVATE_IPS}))
+ @patch('salt.cloud.clouds.dimensiondata.preferred_ip', _preferred_ip(PRIVATE_IPS, ['0.0.0.0']))
+ @patch('salt.cloud.clouds.dimensiondata.ssh_interface', MagicMock(return_value='private_ips'))
+ def test_query_node_data_filter_preferred_ip_addresses(self):
+ '''
+ Test if query node data is filtering out unpreferred IP addresses.
+ '''
+ dimensiondata.NodeState = MagicMock()
+ dimensiondata.NodeState.RUNNING = True
+ dimensiondata.__opts__ = {}
+
+ vm = {'name': None}
+ data = MagicMock()
+ data.public_ips = []
+
+ assert dimensiondata._query_node_data(vm, data).public_ips == ['0.0.0.0']
+
+
if __name__ == '__main__':
from integration import run_tests
run_tests(DimensionDataTestCase, needs_daemon=False)
diff --git a/tests/unit/cloud/clouds/nova_test.py b/tests/unit/cloud/clouds/nova_test.py
new file mode 100644
index 0000000000..c44c0bd507
--- /dev/null
+++ b/tests/unit/cloud/clouds/nova_test.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+'''
+ :codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
+'''
+
+# Import Python libs
+from __future__ import absolute_import
+
+# Import Salt Testing Libs
+from salttesting import TestCase
+from salt.cloud.clouds import nova
+from salttesting.mock import MagicMock, patch
+from tests.unit.cloud.clouds import _preferred_ip
+
+
+class NovaTestCase(TestCase):
+ '''
+ Test case for openstack
+ '''
+ PRIVATE_IPS = ['0.0.0.0', '1.1.1.1', '2.2.2.2']
+
+ @patch('salt.cloud.clouds.nova.show_instance',
+ MagicMock(return_value={'state': 'ACTIVE',
+ 'public_ips': [],
+ 'addresses': [],
+ 'private_ips': PRIVATE_IPS}))
+ @patch('salt.cloud.clouds.nova.rackconnect', MagicMock(return_value=False))
+ @patch('salt.cloud.clouds.nova.rackconnectv3', MagicMock(return_value={'mynet': ['1.1.1.1']}))
+ @patch('salt.cloud.clouds.nova.cloudnetwork', MagicMock(return_value=False))
+ @patch('salt.cloud.clouds.nova.managedcloud', MagicMock(return_value=False))
+ @patch('salt.cloud.clouds.nova.preferred_ip', _preferred_ip(PRIVATE_IPS, ['0.0.0.0']))
+ @patch('salt.cloud.clouds.nova.ssh_interface', MagicMock(return_value='public_ips'))
+ def test_query_node_data_filter_preferred_ip_addresses(self):
+ '''
+ Test if query node data is filtering out unpreferred IP addresses.
+ '''
+ nova.__opts__ = {}
+
+ vm = {'name': None}
+ data = MagicMock()
+ data.public_ips = []
+
+ assert nova._query_node_data(vm, data, MagicMock()).public_ips == ['0.0.0.0']
diff --git a/tests/unit/cloud/clouds/openstack_test.py b/tests/unit/cloud/clouds/openstack_test.py
new file mode 100644
index 0000000000..9e70e3874a
--- /dev/null
+++ b/tests/unit/cloud/clouds/openstack_test.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+'''
+ :codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
+'''
+
+# Import Python libs
+from __future__ import absolute_import
+
+# Import Salt Testing Libs
+from salttesting import TestCase
+from salt.cloud.clouds import openstack
+from salttesting.mock import MagicMock, patch
+from tests.unit.cloud.clouds import _preferred_ip
+
+
+class OpenstackTestCase(TestCase):
+ '''
+ Test case for openstack
+ '''
+ PRIVATE_IPS = ['0.0.0.0', '1.1.1.1', '2.2.2.2']
+
+ @patch('salt.cloud.clouds.openstack.show_instance',
+ MagicMock(return_value={'state': True,
+ 'public_ips': [],
+ 'private_ips': PRIVATE_IPS}))
+ @patch('salt.cloud.clouds.openstack.rackconnect', MagicMock(return_value=False))
+ @patch('salt.cloud.clouds.openstack.managedcloud', MagicMock(return_value=False))
+ @patch('salt.cloud.clouds.openstack.preferred_ip', _preferred_ip(PRIVATE_IPS, ['0.0.0.0']))
+ @patch('salt.cloud.clouds.openstack.ssh_interface', MagicMock(return_value=False))
+ def test_query_node_data_filter_preferred_ip_addresses(self):
+ '''
+ Test if query node data is filtering out unpreferred IP addresses.
+ '''
+ openstack.NodeState = MagicMock()
+ openstack.NodeState.RUNNING = True
+ openstack.__opts__ = {}
+
+ vm = {'name': None}
+ data = MagicMock()
+ data.public_ips = []
+
+ with patch('salt.utils.cloud.is_public_ip', MagicMock(return_value=True)):
+ assert openstack._query_node_data(vm, data, False, MagicMock()).public_ips == ['0.0.0.0']
--
2.11.0

View File

@ -1,116 +0,0 @@
From a0523ac82a1dcca7a7c77f9b3816f237f211b94e Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Thu, 29 Sep 2016 17:00:14 +0200
Subject: [PATCH] Add YUM plugin
* Add plugin for Yum-Salt integration
* Add configuration for the yumnotify plugin
* Fixes wrong 'enabled' opts for yumnotify plugin
---
scripts/yum/plugins/README.md | 20 ++++++++++++++
scripts/yum/plugins/yumnotify.conf | 2 ++
scripts/yum/plugins/yumnotify.py | 55 ++++++++++++++++++++++++++++++++++++++
3 files changed, 77 insertions(+)
create mode 100644 scripts/yum/plugins/README.md
create mode 100644 scripts/yum/plugins/yumnotify.conf
create mode 100644 scripts/yum/plugins/yumnotify.py
diff --git a/scripts/yum/plugins/README.md b/scripts/yum/plugins/README.md
new file mode 100644
index 0000000000..cb3abd2260
--- /dev/null
+++ b/scripts/yum/plugins/README.md
@@ -0,0 +1,20 @@
+## What it is
+
+Plugin which provides a notification mechanism to Salt, if Yum is
+used outside of it.
+
+## Installation
+
+Configuration files are going to:
+
+ `/etc/yum/pluginconf.d/[name].conf`
+
+Plugin itself goes to:
+
+ `/usr/share/yum-plugins/[name].conf`
+
+## Permissions
+
+User: root
+Group: root
+Mode: 644
diff --git a/scripts/yum/plugins/yumnotify.conf b/scripts/yum/plugins/yumnotify.conf
new file mode 100644
index 0000000000..8e4d76c728
--- /dev/null
+++ b/scripts/yum/plugins/yumnotify.conf
@@ -0,0 +1,2 @@
+[main]
+enabled=1
diff --git a/scripts/yum/plugins/yumnotify.py b/scripts/yum/plugins/yumnotify.py
new file mode 100644
index 0000000000..268e1e9531
--- /dev/null
+++ b/scripts/yum/plugins/yumnotify.py
@@ -0,0 +1,55 @@
+# Copyright (c) 2016 SUSE Linux LLC
+# All Rights Reserved.
+#
+# Author: Bo Maryniuk <bo@suse.de>
+
+from yum.plugins import TYPE_CORE
+from yum import config
+import os
+import hashlib
+
+CK_PATH = "/var/cache/salt/minion/rpmdb.cookie"
+RPM_PATH = "/var/lib/rpm/Packages"
+
+requires_api_version = '2.5'
+plugin_type = TYPE_CORE
+
+
+def _get_mtime():
+ """
+ Get the modified time of the RPM Database.
+
+ Returns:
+ Unix ticks
+ """
+ return os.path.exists(RPM_PATH) and int(os.path.getmtime(RPM_PATH)) or 0
+
+
+def _get_checksum():
+ """
+ Get the checksum of the RPM Database.
+
+ Returns:
+ hexdigest
+ """
+ digest = hashlib.md5()
+ with open(RPM_PATH, "rb") as rpm_db_fh:
+ while True:
+ buff = rpm_db_fh.read(0x1000)
+ if not buff:
+ break
+ digest.update(buff)
+ return digest.hexdigest()
+
+
+def posttrans_hook(conduit):
+ """
+ Hook after the package installation transaction.
+
+ :param conduit:
+ :return:
+ """
+ # Integrate Yum with Salt
+ if 'SALT_RUNNING' not in os.environ:
+ with open(CK_PATH, 'w') as ck_fh:
+ ck_fh.write('{chksum} {mtime}\n'.format(chksum=_get_checksum(), mtime=_get_mtime()))
--
2.11.0

View File

@ -1,222 +0,0 @@
From 2616f36c3a1131a73546449d33d917783f6f1f7b Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Mon, 9 May 2016 10:33:44 +0200
Subject: [PATCH] Add zypp-notify plugin
* Add unit test to the libzypp drift detector plugin
---
scripts/zypper/plugins/commit/README.md | 3 ++
scripts/zypper/plugins/commit/zyppnotify | 59 +++++++++++++++++++++++++++++
tests/unit/zypp_plugins_test.py | 51 +++++++++++++++++++++++++
tests/zypp_plugin.py | 64 ++++++++++++++++++++++++++++++++
4 files changed, 177 insertions(+)
create mode 100644 scripts/zypper/plugins/commit/README.md
create mode 100755 scripts/zypper/plugins/commit/zyppnotify
create mode 100644 tests/unit/zypp_plugins_test.py
create mode 100644 tests/zypp_plugin.py
diff --git a/scripts/zypper/plugins/commit/README.md b/scripts/zypper/plugins/commit/README.md
new file mode 100644
index 0000000000..01c8917c8e
--- /dev/null
+++ b/scripts/zypper/plugins/commit/README.md
@@ -0,0 +1,3 @@
+# Zypper plugins
+
+Plugins here are required to interact with SUSE Manager in conjunction of SaltStack and Zypper.
diff --git a/scripts/zypper/plugins/commit/zyppnotify b/scripts/zypper/plugins/commit/zyppnotify
new file mode 100755
index 0000000000..268298b108
--- /dev/null
+++ b/scripts/zypper/plugins/commit/zyppnotify
@@ -0,0 +1,59 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2016 SUSE Linux LLC
+# All Rights Reserved.
+#
+# Author: Bo Maryniuk <bo@suse.de>
+
+import sys
+import os
+import hashlib
+
+from zypp_plugin import Plugin
+
+
+class DriftDetector(Plugin):
+ """
+ Return diff of the installed packages outside the Salt.
+ """
+ def __init__(self):
+ Plugin.__init__(self)
+ self.ck_path = "/var/cache/salt/minion/rpmdb.cookie"
+ self.rpm_path = "/var/lib/rpm/Packages"
+
+ def _get_mtime(self):
+ '''
+ Get the modified time of the RPM Database.
+ Returns:
+ Unix ticks
+ '''
+ return os.path.exists(self.rpm_path) and int(os.path.getmtime(self.rpm_path)) or 0
+
+ def _get_checksum(self):
+ '''
+ Get the checksum of the RPM Database.
+ Returns:
+ hexdigest
+ '''
+ digest = hashlib.md5()
+ with open(self.rpm_path, "rb") as rpm_db_fh:
+ while True:
+ buff = rpm_db_fh.read(0x1000)
+ if not buff:
+ break
+ digest.update(buff)
+
+ return digest.hexdigest()
+
+ def PLUGINEND(self, headers, body):
+ """
+ Hook when plugin closes Zypper's transaction.
+ """
+ if 'SALT_RUNNING' not in os.environ:
+ with open(self.ck_path, 'w') as ck_fh:
+ ck_fh.write('{chksum} {mtime}\n'.format(chksum=self._get_checksum(), mtime=self._get_mtime()))
+
+ self.ack()
+
+
+DriftDetector().main()
diff --git a/tests/unit/zypp_plugins_test.py b/tests/unit/zypp_plugins_test.py
new file mode 100644
index 0000000000..550403cc24
--- /dev/null
+++ b/tests/unit/zypp_plugins_test.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+'''
+ :codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
+'''
+
+# Import Python Libs
+from __future__ import absolute_import
+
+# Import Salt Testing Libs
+from salttesting.helpers import ensure_in_syspath
+from salttesting import TestCase, skipIf
+from salttesting.mock import (
+ MagicMock,
+ patch,
+ NO_MOCK,
+ NO_MOCK_REASON
+)
+
+ensure_in_syspath('../')
+
+import os
+import imp
+from zypp_plugin import BogusIO
+
+zyppnotify = imp.load_source('zyppnotify', os.path.sep.join(os.path.dirname(__file__).split(
+ os.path.sep)[:-2] + ['scripts', 'zypper', 'plugins', 'commit', 'zyppnotify']))
+
+@skipIf(NO_MOCK, NO_MOCK_REASON)
+class ZyppPluginsTestCase(TestCase):
+ '''
+ Test shipped libzypp plugins.
+ '''
+ def test_drift_detector(self):
+ '''
+ Test drift detector for a correct cookie file.
+ Returns:
+
+ '''
+ drift = zyppnotify.DriftDetector()
+ drift._get_mtime = MagicMock(return_value=123)
+ drift._get_checksum = MagicMock(return_value='deadbeef')
+ bogus_io = BogusIO()
+ with patch('zyppnotify.open', bogus_io):
+ drift.PLUGINEND(None, None)
+ self.assertEqual(str(bogus_io), 'deadbeef 123\n')
+ self.assertEqual(bogus_io.mode, 'w')
+ self.assertEqual(bogus_io.path, '/var/cache/salt/minion/rpmdb.cookie')
+
+if __name__ == '__main__':
+ from integration import run_tests
+ run_tests(ZyppPluginsTestCase, needs_daemon=False)
diff --git a/tests/zypp_plugin.py b/tests/zypp_plugin.py
new file mode 100644
index 0000000000..218f703811
--- /dev/null
+++ b/tests/zypp_plugin.py
@@ -0,0 +1,64 @@
+'''
+Related to zypp_plugins_test.py module.
+'''
+
+
+class Plugin(object):
+ '''
+ Bogus module for Zypp Plugins tests.
+ '''
+ def ack(self):
+ '''
+ Acknowledge that the plugin had finished the transaction
+ Returns:
+
+ '''
+
+ def main(self):
+ '''
+ Register plugin
+ Returns:
+
+ '''
+
+
+class BogusIO(object):
+ '''
+ Read/write logger.
+ '''
+
+ def __init__(self):
+ self.content = list()
+ self.closed = False
+
+ def __str__(self):
+ return '\n'.join(self.content)
+
+ def __call__(self, *args, **kwargs):
+ self.path, self.mode = args
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+
+ def __enter__(self):
+ return self
+
+ def write(self, data):
+ '''
+ Simulate writing data
+ Args:
+ data:
+
+ Returns:
+
+ '''
+ self.content.append(data)
+
+ def close(self):
+ '''
+ Simulate closing the IO object.
+ Returns:
+
+ '''
+ self.closed = True
--
2.11.0

View File

@ -1,991 +0,0 @@
From 55ac73ad201d8a23ddc8e44b5310343e99562610 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 21 Mar 2017 11:10:06 +0000
Subject: [PATCH] Adding support for installing patches in yum/dnf
execution module
Adding support for installing patches in Zypper module
Adding list_downloaded function to Zypper module
Adding list_downloaded function to Yum module
Adding new pkg.downloaded state
Adding documentation for pkg.downloaded state
Adding pkg.patched and pkg.patch_downloaded states
Check targets for advisory patches installation
Adds support for listing advisory patches with Zypper
Adds support for listing advisory patches with Yum
Improving function naming
Moving advisory ids checks into pkg_resource.parse_targets
Fixes _find_download_targets to call _preflight_check
Fixes parse_targets when advisory id is passed as name
Pylint fixes
Enables pkg.downloaded to verify packages after package manager call.
Adding missing kwargs parameters to pkg.install call
Adding versionadded flags
Some refactoring and minor fixes
Adding unit tests for Zypper execution module
Adding more unit tests for Zypper module
Pylint fix
---
salt/modules/pkg_resource.py | 9 +
salt/modules/yumpkg.py | 108 +++++++-
salt/modules/zypper.py | 91 ++++++-
salt/states/pkg.py | 420 +++++++++++++++++++++++++++++
tests/unit/modules/zypp/zypper-patches.xml | 10 +
tests/unit/modules/zypper_test.py | 119 ++++++++
6 files changed, 751 insertions(+), 6 deletions(-)
create mode 100644 tests/unit/modules/zypp/zypper-patches.xml
diff --git a/salt/modules/pkg_resource.py b/salt/modules/pkg_resource.py
index 1df9307..ad0f0ff 100644
--- a/salt/modules/pkg_resource.py
+++ b/salt/modules/pkg_resource.py
@@ -119,6 +119,15 @@ def parse_targets(name=None,
log.error('Only one of "pkgs" and "sources" can be used.')
return None, None
+ elif 'advisory_ids' in kwargs:
+ if pkgs:
+ log.error('Cannot use "advisory_ids" and "pkgs" at the same time')
+ return None, None
+ elif kwargs['advisory_ids']:
+ return kwargs['advisory_ids'], 'advisory'
+ else:
+ return [name], 'advisory'
+
elif pkgs:
pkgs = _repack_pkgs(pkgs, normalize=normalize)
if not pkgs:
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index d67bbe0..f6777d7 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -19,6 +19,7 @@ from __future__ import absolute_import
import contextlib
import copy
import fnmatch
+import glob
import itertools
import logging
import os
@@ -800,6 +801,27 @@ def list_upgrades(refresh=True, **kwargs):
list_updates = salt.utils.alias_function(list_upgrades, 'list_updates')
+def list_downloaded():
+ '''
+ .. versionadded:: Oxygen
+
+ List prefetched packages downloaded by Yum in the local disk.
+
+ CLI example:
+
+ .. code-block:: bash
+
+ salt '*' pkg.list_downloaded
+ '''
+ CACHE_DIR = os.path.join('/var/cache/', _yum())
+
+ ret = {}
+ for package_path in glob.glob(os.path.join(CACHE_DIR, '*/*/*/packages/*.rpm')):
+ pkg_info = __salt__['lowpkg.bin_pkg_info'](package_path)
+ ret.setdefault(pkg_info['name'], {})[pkg_info['version']] = package_path
+ return ret
+
+
def info_installed(*names):
'''
.. versionadded:: 2015.8.1
@@ -1082,10 +1104,10 @@ def install(name=None,
log.warning('"version" parameter will be ignored for multiple '
'package targets')
- old = list_pkgs(versions_as_list=False)
+ old = list_pkgs(versions_as_list=False) if not downloadonly else list_downloaded()
# Use of __context__ means no duplicate work here, just accessing
# information already in __context__ from the previous call to list_pkgs()
- old_as_list = list_pkgs(versions_as_list=True)
+ old_as_list = list_pkgs(versions_as_list=True) if not downloadonly else list_downloaded()
to_install = []
to_downgrade = []
@@ -1108,6 +1130,16 @@ def install(name=None,
if pkg_type == 'repository':
pkg_params_items = six.iteritems(pkg_params)
+ elif pkg_type == 'advisory':
+ pkg_params_items = []
+ cur_patches = list_patches()
+ for advisory_id in pkg_params:
+ if advisory_id not in cur_patches:
+ raise CommandExecutionError(
+ 'Advisory id "{0}" not found'.format(advisory_id)
+ )
+ else:
+ pkg_params_items.append(advisory_id)
else:
pkg_params_items = []
for pkg_source in pkg_params:
@@ -1131,6 +1163,9 @@ def install(name=None,
for pkg_item_list in pkg_params_items:
if pkg_type == 'repository':
pkgname, version_num = pkg_item_list
+ elif pkg_type == 'advisory':
+ pkgname = pkg_item_list
+ version_num = None
else:
try:
pkgname, pkgpath, version_num = pkg_item_list
@@ -1145,6 +1180,8 @@ def install(name=None,
to_reinstall.append((pkgname, pkgname))
else:
to_install.append((pkgname, pkgname))
+ elif pkg_type == 'advisory':
+ to_install.append((pkgname, pkgname))
else:
to_install.append((pkgname, pkgpath))
else:
@@ -1291,6 +1328,8 @@ def install(name=None,
targets = []
with _temporarily_unhold(to_install, targets):
if targets:
+ if pkg_type == 'advisory':
+ targets = ["--advisory={0}".format(t) for t in targets]
cmd = []
if salt.utils.systemd.has_scope(__context__) \
and __salt__['config.get']('systemd.scope', True):
@@ -1299,7 +1338,7 @@ def install(name=None,
if _yum() == 'dnf':
cmd.extend(['--best', '--allowerasing'])
_add_common_args(cmd)
- cmd.append('install')
+ cmd.append('install' if pkg_type is not 'advisory' else 'update')
cmd.extend(targets)
out = __salt__['cmd.run_all'](
cmd,
@@ -1351,7 +1390,7 @@ def install(name=None,
errors.append(out['stdout'])
__context__.pop('pkg.list_pkgs', None)
- new = list_pkgs(versions_as_list=False)
+ new = list_pkgs(versions_as_list=False) if not downloadonly else list_downloaded()
ret = salt.utils.compare_dicts(old, new)
@@ -2757,3 +2796,64 @@ def diff(*paths):
local_pkgs[pkg]['path'], path) or 'Unchanged'
return ret
+
+
+def _get_patches(installed_only=False):
+ '''
+ List all known patches in repos.
+ '''
+ patches = {}
+
+ cmd = [_yum(), '--quiet', 'updateinfo', 'list', 'security', 'all']
+ ret = __salt__['cmd.run_stdout'](
+ cmd,
+ python_shell=False
+ )
+ for line in salt.utils.itertools.split(ret, os.linesep):
+ inst, advisory_id, sev, pkg = re.match(r'([i|\s]) ([^\s]+) +([^\s]+) +([^\s]+)',
+ line).groups()
+ if inst != 'i' and installed_only:
+ continue
+ patches[advisory_id] = {
+ 'installed': True if inst == 'i' else False,
+ 'summary': pkg
+ }
+ return patches
+
+
+def list_patches(refresh=False):
+ '''
+ .. versionadded:: Oxygen
+
+ List all known advisory patches from available repos.
+
+ refresh
+ force a refresh if set to True.
+ If set to False (default) it depends on yum if a refresh is
+ executed.
+
+ CLI Examples:
+
+ .. code-block:: bash
+
+ salt '*' pkg.list_patches
+ '''
+ if refresh:
+ refresh_db()
+
+ return _get_patches()
+
+
+def list_installed_patches():
+ '''
+ .. versionadded:: Oxygen
+
+ List installed advisory patches on the system.
+
+ CLI Examples:
+
+ .. code-block:: bash
+
+ salt '*' pkg.list_installed_patches
+ '''
+ return _get_patches(installed_only=True)
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 75e529c..28087f5 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -15,6 +15,7 @@ Package support for openSUSE via the zypper package manager
# Import python libs
from __future__ import absolute_import
import copy
+import glob
import logging
import re
import os
@@ -1029,10 +1030,18 @@ def install(name=None,
for problem in problems:
log.error(problem)
return {}
+ elif pkg_type == 'advisory':
+ targets = []
+ cur_patches = list_patches()
+ for advisory_id in pkg_params:
+ if advisory_id not in cur_patches:
+ raise CommandExecutionError('Advisory id "{0}" not found'.format(advisory_id))
+ else:
+ targets.append(advisory_id)
else:
targets = pkg_params
- old = list_pkgs()
+ old = list_pkgs() if not downloadonly else list_downloaded()
downgrades = []
if fromrepo:
fromrepoopt = ['--force', '--force-resolution', '--from', fromrepo]
@@ -1050,6 +1059,8 @@ def install(name=None,
cmd_install.extend(fromrepoopt)
errors = []
+ if pkg_type == 'advisory':
+ targets = ["patch:{0}".format(t) for t in targets]
# Split the targets into batches of 500 packages each, so that
# the maximal length of the command line is not broken
@@ -1068,7 +1079,7 @@ def install(name=None,
__zypper__(no_repo_failure=ignore_repo_failure).call(*cmd)
__context__.pop('pkg.list_pkgs', None)
- new = list_pkgs()
+ new = list_pkgs() if not downloadonly else list_downloaded()
ret = salt.utils.compare_dicts(old, new)
if errors:
@@ -1771,6 +1782,28 @@ def download(*packages, **kwargs):
)
+def list_downloaded():
+ '''
+ .. versionadded:: Oxygen
+
+ List prefetched packages downloaded by Zypper in the local disk.
+
+ CLI example:
+
+ .. code-block:: bash
+
+ salt '*' pkg.list_downloaded
+ '''
+ CACHE_DIR = '/var/cache/zypp/packages/'
+
+ ret = {}
+ # Zypper storage is repository_tag/arch/package-version.rpm
+ for package_path in glob.glob(os.path.join(CACHE_DIR, '*/*/*.rpm')):
+ pkg_info = __salt__['lowpkg.bin_pkg_info'](package_path)
+ ret.setdefault(pkg_info['name'], {})[pkg_info['version']] = package_path
+ return ret
+
+
def diff(*paths):
'''
Return a formatted diff between current files and original in a package.
@@ -1808,3 +1841,57 @@ def diff(*paths):
) or 'Unchanged'
return ret
+
+
+def _get_patches(installed_only=False):
+ '''
+ List all known patches in repos.
+ '''
+ patches = {}
+ for element in __zypper__.nolock.xml.call('se', '-t', 'patch').getElementsByTagName('solvable'):
+ installed = element.getAttribute('status') == 'installed'
+ if (installed_only and installed) or not installed_only:
+ patches[element.getAttribute('name')] = {
+ 'installed': installed,
+ 'summary': element.getAttribute('summary'),
+ }
+
+ return patches
+
+
+def list_patches(refresh=False):
+ '''
+ .. versionadded:: Oxygen
+
+ List all known advisory patches from available repos.
+
+ refresh
+ force a refresh if set to True.
+ If set to False (default) it depends on zypper if a refresh is
+ executed.
+
+ CLI Examples:
+
+ .. code-block:: bash
+
+ salt '*' pkg.list_patches
+ '''
+ if refresh:
+ refresh_db()
+
+ return _get_patches()
+
+
+def list_installed_patches():
+ '''
+ .. versionadded:: Oxygen
+
+ List installed advisory patches on the system.
+
+ CLI Examples:
+
+ .. code-block:: bash
+
+ salt '*' pkg.list_installed_patches
+ '''
+ return _get_patches(installed_only=True)
diff --git a/salt/states/pkg.py b/salt/states/pkg.py
index 7c3b27b..d185002 100644
--- a/salt/states/pkg.py
+++ b/salt/states/pkg.py
@@ -213,6 +213,171 @@ def _find_unpurge_targets(desired):
]
+def _find_download_targets(name=None,
+ version=None,
+ pkgs=None,
+ normalize=True,
+ skip_suggestions=False,
+ ignore_epoch=False,
+ **kwargs):
+ '''
+ Inspect the arguments to pkg.downloaded and discover what packages need to
+ be downloaded. Return a dict of packages to download.
+ '''
+ cur_pkgs = __salt__['pkg.list_downloaded']()
+ if pkgs:
+ to_download = _repack_pkgs(pkgs, normalize=normalize)
+
+ if not to_download:
+ # Badly-formatted SLS
+ return {'name': name,
+ 'changes': {},
+ 'result': False,
+ 'comment': 'Invalidly formatted pkgs parameter. See '
+ 'minion log.'}
+ else:
+ if normalize:
+ _normalize_name = \
+ __salt__.get('pkg.normalize_name', lambda pkgname: pkgname)
+ to_download = {_normalize_name(name): version}
+ else:
+ to_download = {name: version}
+
+ cver = cur_pkgs.get(name, {})
+ if name in to_download:
+ # Package already downloaded, no need to download again
+ if cver and version in cver:
+ return {'name': name,
+ 'changes': {},
+ 'result': True,
+ 'comment': 'Version {0} of package \'{1}\' is already '
+ 'downloaded'.format(version, name)}
+
+ # if cver is not an empty string, the package is already downloaded
+ elif cver and version is None:
+ # The package is downloaded
+ return {'name': name,
+ 'changes': {},
+ 'result': True,
+ 'comment': 'Package {0} is already '
+ 'downloaded'.format(name)}
+
+ version_spec = False
+ if not skip_suggestions:
+ try:
+ problems = _preflight_check(to_download, **kwargs)
+ except CommandExecutionError:
+ pass
+ else:
+ comments = []
+ if problems.get('no_suggest'):
+ comments.append(
+ 'The following package(s) were not found, and no '
+ 'possible matches were found in the package db: '
+ '{0}'.format(
+ ', '.join(sorted(problems['no_suggest']))
+ )
+ )
+ if problems.get('suggest'):
+ for pkgname, suggestions in \
+ six.iteritems(problems['suggest']):
+ comments.append(
+ 'Package \'{0}\' not found (possible matches: '
+ '{1})'.format(pkgname, ', '.join(suggestions))
+ )
+ if comments:
+ if len(comments) > 1:
+ comments.append('')
+ return {'name': name,
+ 'changes': {},
+ 'result': False,
+ 'comment': '. '.join(comments).rstrip()}
+
+ # Find out which packages will be targeted in the call to pkg.download
+ # Check current downloaded versions against specified versions
+ targets = {}
+ problems = []
+ for pkgname, pkgver in six.iteritems(to_download):
+ cver = cur_pkgs.get(pkgname, {})
+ # Package not yet downloaded, so add to targets
+ if not cver:
+ targets[pkgname] = pkgver
+ continue
+ # No version specified but package is already downloaded
+ elif cver and not pkgver:
+ continue
+
+ version_spec = True
+ try:
+ oper, verstr = _get_comparison_spec(pkgver)
+ except CommandExecutionError as exc:
+ problems.append(exc.strerror)
+ continue
+
+ if not _fulfills_version_spec(cver.keys(), oper, verstr,
+ ignore_epoch=ignore_epoch):
+ targets[pkgname] = pkgver
+
+ if problems:
+ return {'name': name,
+ 'changes': {},
+ 'result': False,
+ 'comment': ' '.join(problems)}
+
+ if not targets:
+ # All specified packages are already downloaded
+ msg = (
+ 'All specified packages{0} are already downloaded'
+ .format(' (matching specified versions)' if version_spec else '')
+ )
+ return {'name': name,
+ 'changes': {},
+ 'result': True,
+ 'comment': msg}
+
+ return targets
+
+
+def _find_advisory_targets(name=None,
+ advisory_ids=None,
+ **kwargs):
+ '''
+ Inspect the arguments to pkg.patch_installed and discover what advisory
+ patches need to be installed. Return a dict of advisory patches to install.
+ '''
+ cur_patches = __salt__['pkg.list_installed_patches']()
+ if advisory_ids:
+ to_download = advisory_ids
+ else:
+ to_download = [name]
+ if cur_patches.get(name, {}):
+ # Advisory patch already installed, no need to install it again
+ return {'name': name,
+ 'changes': {},
+ 'result': True,
+ 'comment': 'Advisory patch {0} is already '
+ 'installed'.format(name)}
+
+ # Find out which advisory patches will be targeted in the call to pkg.install
+ targets = []
+ for patch_name in to_download:
+ cver = cur_patches.get(patch_name, {})
+ # Advisory patch not yet installed, so add to targets
+ if not cver:
+ targets.append(patch_name)
+ continue
+
+ if not targets:
+ # All specified packages are already downloaded
+ msg = ('All specified advisory patches are already installed')
+ return {'name': name,
+ 'changes': {},
+ 'result': True,
+ 'comment': msg}
+
+ return targets
+
+
def _find_remove_targets(name=None,
version=None,
pkgs=None,
@@ -1700,6 +1865,261 @@ def installed(
return ret
+def downloaded(name,
+ version=None,
+ pkgs=None,
+ fromrepo=None,
+ ignore_epoch=None,
+ **kwargs):
+ '''
+ .. versionadded:: Oxygen
+
+ Ensure that the package is downloaded, and that it is the correct version
+ (if specified).
+
+ Currently supported for the following pkg providers:
+ :mod:`yumpkg <salt.modules.yumpkg>` and :mod:`zypper <salt.modules.zypper>`
+
+ :param str name:
+ The name of the package to be downloaded. This parameter is ignored if
+ either "pkgs" is used. Additionally, please note that this option can
+ only be used to download packages from a software repository.
+
+ :param str version:
+ Download a specific version of a package.
+
+ .. important::
+ As of version 2015.8.7, for distros which use yum/dnf, packages
+ which have a version with a nonzero epoch (that is, versions which
+ start with a number followed by a colon must have the epoch included
+ when specifying the version number. For example:
+
+ .. code-block:: yaml
+
+ vim-enhanced:
+ pkg.downloaded:
+ - version: 2:7.4.160-1.el7
+
+ An **ignore_epoch** argument has been added to which causes the
+ epoch to be disregarded when the state checks to see if the desired
+ version was installed.
+
+ You can install a specific version when using the ``pkgs`` argument by
+ including the version after the package:
+
+ .. code-block:: yaml
+
+ common_packages:
+ pkg.downloaded:
+ - pkgs:
+ - unzip
+ - dos2unix
+ - salt-minion: 2015.8.5-1.el6
+
+ CLI Example:
+
+ .. code-block:: yaml
+
+ zsh:
+ pkg.downloaded:
+ - version: 5.0.5-4.63
+ - fromrepo: "myrepository"
+ '''
+ ret = {'name': name,
+ 'changes': {},
+ 'result': None,
+ 'comment': ''}
+
+ if 'pkg.list_downloaded' not in __salt__:
+ ret['result'] = False
+ ret['comment'] = 'The pkg.downloaded state is not available on ' \
+ 'this platform'
+ return ret
+
+ if isinstance(pkgs, list) and len(pkgs) == 0:
+ ret['result'] = True
+ ret['comment'] = 'No packages to download provided'
+ return ret
+
+ # It doesn't make sense here to received 'downloadonly' as kwargs
+ # as we're explicitely passing 'downloadonly=True' to execution module.
+ if 'downloadonly' in kwargs:
+ del kwargs['downloadonly']
+
+ # Only downloading not yet downloaded packages
+ targets = _find_download_targets(name,
+ version,
+ pkgs,
+ fromrepo=fromrepo,
+ ignore_epoch=ignore_epoch,
+ **kwargs)
+ if isinstance(targets, dict) and 'result' in targets:
+ return targets
+ elif not isinstance(targets, dict):
+ ret['result'] = False
+ ret['comment'] = 'An error was encountered while checking targets: ' \
+ '{0}'.format(targets)
+ return ret
+
+ if __opts__['test']:
+ summary = ', '.join(targets)
+ ret['comment'] = 'The following packages would be ' \
+ 'downloaded: {0}'.format(summary)
+ return ret
+
+ try:
+ pkg_ret = __salt__['pkg.install'](name=name,
+ pkgs=pkgs,
+ version=version,
+ downloadonly=True,
+ fromrepo=fromrepo,
+ ignore_epoch=ignore_epoch,
+ **kwargs)
+ ret['result'] = True
+ ret['changes'].update(pkg_ret)
+ except CommandExecutionError as exc:
+ ret = {'name': name, 'result': False}
+ if exc.info:
+ # Get information for state return from the exception.
+ ret['changes'] = exc.info.get('changes', {})
+ ret['comment'] = exc.strerror_without_changes
+ else:
+ ret['changes'] = {}
+ ret['comment'] = 'An error was encountered while downloading ' \
+ 'package(s): {0}'.format(exc)
+ return ret
+
+ new_pkgs = __salt__['pkg.list_downloaded']()
+ ok, failed = _verify_install(targets, new_pkgs, ignore_epoch=ignore_epoch)
+
+ if failed:
+ summary = ', '.join([_get_desired_pkg(x, targets)
+ for x in failed])
+ ret['result'] = False
+ ret['comment'] = 'The following packages failed to ' \
+ 'download: {0}'.format(summary)
+
+ if not ret['changes'] and not ret['comment']:
+ ret['result'] = True
+ ret['comment'] = 'Packages are already downloaded: ' \
+ '{0}'.format(', '.join(targets))
+
+ return ret
+
+
+def patch_installed(name, advisory_ids=None, downloadonly=None, **kwargs):
+ '''
+ .. versionadded:: Oxygen
+
+ Ensure that packages related to certain advisory ids are installed.
+
+ Currently supported for the following pkg providers:
+ :mod:`yumpkg <salt.modules.yumpkg>` and :mod:`zypper <salt.modules.zypper>`
+
+ CLI Example:
+
+ .. code-block:: yaml
+
+ issue-foo-fixed:
+ pkg.patch_installed:
+ - advisory_ids:
+ - SUSE-SLE-SERVER-12-SP2-2017-185
+ - SUSE-SLE-SERVER-12-SP2-2017-150
+ - SUSE-SLE-SERVER-12-SP2-2017-120
+ '''
+ ret = {'name': name,
+ 'changes': {},
+ 'result': None,
+ 'comment': ''}
+
+ if 'pkg.list_patches' not in __salt__:
+ ret['result'] = False
+ ret['comment'] = 'The pkg.patch_installed state is not available on ' \
+ 'this platform'
+ return ret
+
+ if isinstance(advisory_ids, list) and len(advisory_ids) == 0:
+ ret['result'] = True
+ ret['comment'] = 'No advisory ids provided'
+ return ret
+
+ # Only downloading not yet downloaded packages
+ targets = _find_advisory_targets(name, advisory_ids, **kwargs)
+ if isinstance(targets, dict) and 'result' in targets:
+ return targets
+ elif not isinstance(targets, list):
+ ret['result'] = False
+ ret['comment'] = 'An error was encountered while checking targets: ' \
+ '{0}'.format(targets)
+ return ret
+
+ if __opts__['test']:
+ summary = ', '.join(targets)
+ ret['comment'] = 'The following advisory patches would be ' \
+ 'downloaded: {0}'.format(summary)
+ return ret
+
+ try:
+ pkg_ret = __salt__['pkg.install'](name=name,
+ advisory_ids=advisory_ids,
+ downloadonly=downloadonly,
+ **kwargs)
+ ret['result'] = True
+ ret['changes'].update(pkg_ret)
+ except CommandExecutionError as exc:
+ ret = {'name': name, 'result': False}
+ if exc.info:
+ # Get information for state return from the exception.
+ ret['changes'] = exc.info.get('changes', {})
+ ret['comment'] = exc.strerror_without_changes
+ else:
+ ret['changes'] = {}
+ ret['comment'] = ('An error was encountered while downloading '
+ 'package(s): {0}'.format(exc))
+ return ret
+
+ if not ret['changes'] and not ret['comment']:
+ status = 'downloaded' if downloadonly else 'installed'
+ ret['result'] = True
+ ret['comment'] = 'Related packages are already {}'.format(status)
+
+ return ret
+
+
+def patch_downloaded(name, advisory_ids=None, **kwargs):
+ '''
+ .. versionadded:: Oxygen
+
+ Ensure that packages related to certain advisory ids are downloaded.
+
+ Currently supported for the following pkg providers:
+ :mod:`yumpkg <salt.modules.yumpkg>` and :mod:`zypper <salt.modules.zypper>`
+
+ CLI Example:
+
+ .. code-block:: yaml
+
+ preparing-to-fix-issues:
+ pkg.patch_downloaded:
+ - advisory_ids:
+ - SUSE-SLE-SERVER-12-SP2-2017-185
+ - SUSE-SLE-SERVER-12-SP2-2017-150
+ - SUSE-SLE-SERVER-12-SP2-2017-120
+ '''
+ if 'pkg.list_patches' not in __salt__:
+ return {'name': name,
+ 'result': False,
+ 'changes': {},
+ 'comment': 'The pkg.patch_downloaded state is not available on '
+ 'this platform'}
+
+ # It doesn't make sense here to received 'downloadonly' as kwargs
+ # as we're explicitely passing 'downloadonly=True' to execution module.
+ if 'downloadonly' in kwargs:
+ del kwargs['downloadonly']
+ return patch_installed(name=name, advisory_ids=advisory_ids, downloadonly=True, **kwargs)
+
+
def latest(
name,
refresh=None,
diff --git a/tests/unit/modules/zypp/zypper-patches.xml b/tests/unit/modules/zypp/zypper-patches.xml
new file mode 100644
index 0000000..2088634
--- /dev/null
+++ b/tests/unit/modules/zypp/zypper-patches.xml
@@ -0,0 +1,10 @@
+<?xml version='1.0'?>
+<stream>
+ <search-result version="0.0">
+ <solvable-list>
+ <solvable status="not-installed" name="SUSE-SLE-SERVER-12-SP2-2017-97" summary="Recommended update for ovmf" kind="patch"/>
+ <solvable status="installed" name="SUSE-SLE-SERVER-12-SP2-2017-98" summary="Recommended update for kmod" kind="patch"/>
+ <solvable status="not-installed" name="SUSE-SLE-SERVER-12-SP2-2017-99" summary="Security update for apache2" kind="patch"/>
+ </solvable-list>
+ </search-result>
+</stream>
diff --git a/tests/unit/modules/zypper_test.py b/tests/unit/modules/zypper_test.py
index c29d12c..39bd2e7 100644
--- a/tests/unit/modules/zypper_test.py
+++ b/tests/unit/modules/zypper_test.py
@@ -462,6 +462,48 @@ Repository 'DUMMY' not found by its alias, number, or URI.
self.assertTrue(pkgs.get(pkg_name))
self.assertEqual(pkgs[pkg_name], pkg_version)
+ def test_list_patches(self):
+ '''
+ Test advisory patches listing.
+
+ :return:
+ '''
+
+ ref_out = {
+ 'stdout': get_test_data('zypper-patches.xml'),
+ 'stderr': None,
+ 'retcode': 0
+ }
+
+ PATCHES_RET = {
+ 'SUSE-SLE-SERVER-12-SP2-2017-97': {'installed': False, 'summary': 'Recommended update for ovmf'},
+ 'SUSE-SLE-SERVER-12-SP2-2017-98': {'installed': True, 'summary': 'Recommended update for kmod'},
+ 'SUSE-SLE-SERVER-12-SP2-2017-99': {'installed': False, 'summary': 'Security update for apache2'}
+ }
+
+ with patch.dict(zypper.__salt__, {'cmd.run_all': MagicMock(return_value=ref_out)}):
+ list_patches = zypper.list_patches(refresh=False)
+ self.assertEqual(len(list_patches), 3)
+ self.assertDictEqual(list_patches, PATCHES_RET)
+
+ @patch('glob.glob', MagicMock(return_value=['/var/cache/zypper/packages/foo/bar/test_package.rpm']))
+ def test_list_downloaded(self):
+ '''
+ Test downloaded packages listing.
+
+ :return:
+ '''
+ DOWNLOADED_RET = {
+ 'test-package': {
+ '1.0': '/var/cache/zypper/packages/foo/bar/test_package.rpm'
+ }
+ }
+
+ with patch.dict(zypper.__salt__, {'lowpkg.bin_pkg_info': MagicMock(return_value={'name': 'test-package', 'version': '1.0'})}):
+ list_downloaded = zypper.list_downloaded()
+ self.assertEqual(len(list_downloaded), 1)
+ self.assertDictEqual(list_downloaded, DOWNLOADED_RET)
+
def test_download(self):
'''
Test package download
@@ -487,6 +529,83 @@ Repository 'DUMMY' not found by its alias, number, or URI.
test_out['_error'] = "The following package(s) failed to download: foo"
self.assertEqual(zypper.download("nmap", "foo"), test_out)
+ @patch('salt.modules.zypper._systemd_scope', MagicMock(return_value=False))
+ @patch('salt.modules.zypper.list_downloaded', MagicMock(side_effect=[{}, {'vim': {'1.1': '/foo/bar/test.rpm'}}]))
+ def test_install_with_downloadonly(self):
+ '''
+ Test a package installation with downloadonly=True.
+
+ :return:
+ '''
+ with patch.dict(zypper.__salt__, {'pkg_resource.parse_targets': MagicMock(return_value=({'vim': None}, 'repository'))}):
+ with patch('salt.modules.zypper.__zypper__.noraise.call', MagicMock()) as zypper_mock:
+ ret = zypper.install(pkgs=['vim'], downloadonly=True)
+ zypper_mock.assert_called_once_with(
+ '--no-refresh',
+ 'install',
+ '--name',
+ '--auto-agree-with-licenses',
+ '--download-only',
+ 'vim'
+ )
+ self.assertDictEqual(ret, {'vim': {'new': {'1.1': '/foo/bar/test.rpm'}, 'old': ''}})
+
+ @patch('salt.modules.zypper._systemd_scope', MagicMock(return_value=False))
+ @patch('salt.modules.zypper.list_downloaded', MagicMock(return_value={'vim': {'1.1': '/foo/bar/test.rpm'}}))
+ def test_install_with_downloadonly_already_downloaded(self):
+ '''
+ Test a package installation with downloadonly=True when package is already downloaded.
+
+ :return:
+ '''
+ with patch.dict(zypper.__salt__, {'pkg_resource.parse_targets': MagicMock(return_value=({'vim': None}, 'repository'))}):
+ with patch('salt.modules.zypper.__zypper__.noraise.call', MagicMock()) as zypper_mock:
+ ret = zypper.install(pkgs=['vim'], downloadonly=True)
+ zypper_mock.assert_called_once_with(
+ '--no-refresh',
+ 'install',
+ '--name',
+ '--auto-agree-with-licenses',
+ '--download-only',
+ 'vim'
+ )
+ self.assertDictEqual(ret, {})
+
+ @patch('salt.modules.zypper._systemd_scope', MagicMock(return_value=False))
+ @patch('salt.modules.zypper._get_patches', MagicMock(return_value={'SUSE-PATCH-1234': {'installed': False, 'summary': 'test'}}))
+ @patch('salt.modules.zypper.list_pkgs', MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.2"}]))
+ def test_install_advisory_patch_ok(self):
+ '''
+ Test successfully advisory patch installation.
+
+ :return:
+ '''
+ with patch.dict(zypper.__salt__, {'pkg_resource.parse_targets': MagicMock(return_value=({'SUSE-PATCH-1234': None}, 'advisory'))}):
+ with patch('salt.modules.zypper.__zypper__.noraise.call', MagicMock()) as zypper_mock:
+ ret = zypper.install(advisory_ids=['SUSE-PATCH-1234'])
+ zypper_mock.assert_called_once_with(
+ '--no-refresh',
+ 'install',
+ '--name',
+ '--auto-agree-with-licenses',
+ 'patch:SUSE-PATCH-1234'
+ )
+ self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.2"}})
+
+ @patch('salt.modules.zypper._systemd_scope', MagicMock(return_value=False))
+ @patch('salt.modules.zypper._get_patches', MagicMock(return_value={'SUSE-PATCH-1234': {'installed': False, 'summary': 'test'}}))
+ @patch('salt.modules.zypper.list_pkgs', MagicMock(return_value={"vim": "1.1"}))
+ def test_install_advisory_patch_failure(self):
+ '''
+ Test failing advisory patch installation because patch does not exist.
+
+ :return:
+ '''
+ with patch.dict(zypper.__salt__, {'pkg_resource.parse_targets': MagicMock(return_value=({'SUSE-PATCH-XXX': None}, 'advisory'))}):
+ with patch('salt.modules.zypper.__zypper__.noraise.call', MagicMock()) as zypper_mock:
+ with self.assertRaisesRegex(CommandExecutionError, '^Advisory id "SUSE-PATCH-XXX" not found$'):
+ zypper.install(advisory_ids=['SUSE-PATCH-XXX'])
+
def test_remove_purge(self):
'''
Test package removal
--
2.10.1

View File

@ -1,26 +0,0 @@
From b338b21fe340ee4efa0045894315fcf20be1dc49 Mon Sep 17 00:00:00 2001
From: Silvio Moioli <smoioli@suse.de>
Date: Wed, 14 Dec 2016 10:33:39 +0100
Subject: [PATCH] Avoid failures on SLES 12 SP2 because of new systemd
TaskMax limit (bsc#985112)
---
pkg/salt-master.service | 1 +
1 file changed, 1 insertion(+)
diff --git a/pkg/salt-master.service b/pkg/salt-master.service
index 59be50301a..ecd3edd467 100644
--- a/pkg/salt-master.service
+++ b/pkg/salt-master.service
@@ -6,6 +6,7 @@ After=network.target
LimitNOFILE=16384
Type=simple
ExecStart=/usr/bin/salt-master
+TasksMax=infinity
[Install]
WantedBy=multi-user.target
--
2.11.0

View File

@ -1,77 +0,0 @@
From 257e7dc14458e879844ae6dda2337b3f7fba441c Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 16 May 2017 12:06:51 +0200
Subject: [PATCH] Bugfix: unable to use 127 as hostname
Unit test for accepting hosts names as 127
Harden to 127. IP part
Add unit test for hostname can be started from 127
---
salt/utils/network.py | 4 ++--
tests/unit/utils/network_test.py | 32 ++++++++++++++++++++++++++++++++
2 files changed, 34 insertions(+), 2 deletions(-)
diff --git a/salt/utils/network.py b/salt/utils/network.py
index 8d2e9f5fb2..036c00d430 100644
--- a/salt/utils/network.py
+++ b/salt/utils/network.py
@@ -95,8 +95,8 @@ def _generate_minion_id():
Needs to work on Python 2.6, because of collections.OrderedDict only since 2.7 version.
Override 'filter()' for custom filtering.
'''
- localhost_matchers = ['localhost.*', 'ip6-.*', '127.*', r'0\.0\.0\.0',
- '::1.*', 'ipv6-.*', 'fe00::.*', 'fe02::.*', '1.0.0.*.ip6.arpa']
+ localhost_matchers = [r'localhost.*', r'ip6-.*', r'127[.]\d', r'0\.0\.0\.0',
+ r'::1.*', r'ipv6-.*', r'fe00::.*', r'fe02::.*', r'1.0.0.*.ip6.arpa']
def append(self, p_object):
if p_object and p_object not in self and not self.filter(p_object):
diff --git a/tests/unit/utils/network_test.py b/tests/unit/utils/network_test.py
index a13492f8f8..b7eea54eb1 100644
--- a/tests/unit/utils/network_test.py
+++ b/tests/unit/utils/network_test.py
@@ -266,6 +266,38 @@ class NetworkTestCase(TestCase):
self.assertEqual(network._generate_minion_id(),
['hostname.domainname.blank', 'nodename', 'hostname', '1.2.3.4', '5.6.7.8'])
+ @patch('platform.node', MagicMock(return_value='127'))
+ @patch('socket.gethostname', MagicMock(return_value='127'))
+ @patch('socket.getfqdn', MagicMock(return_value='127.domainname.blank'))
+ @patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'attrname', ('127.0.1.1', 0))]))
+ @patch('salt.utils.fopen', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=False))
+ @patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '5.6.7.8']))
+ def test_generate_minion_id_127_name(self):
+ '''
+ Test if minion IDs can be named 127.foo
+
+ :return:
+ '''
+ self.assertEqual(network._generate_minion_id(),
+ ['127.domainname.blank', '127', '1.2.3.4', '5.6.7.8'])
+
+ @patch('platform.node', MagicMock(return_value='127890'))
+ @patch('socket.gethostname', MagicMock(return_value='127890'))
+ @patch('socket.getfqdn', MagicMock(return_value='127890.domainname.blank'))
+ @patch('socket.getaddrinfo', MagicMock(return_value=[(2, 3, 0, 'attrname', ('127.0.1.1', 0))]))
+ @patch('salt.utils.fopen', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=False))
+ @patch('salt.utils.network.ip_addrs', MagicMock(return_value=['1.2.3.4', '5.6.7.8']))
+ def test_generate_minion_id_127_name_startswith(self):
+ '''
+ Test if minion IDs can be named starting from "127"
+
+ :return:
+ '''
+ self.assertEqual(network._generate_minion_id(),
+ ['127890.domainname.blank', '127890', '1.2.3.4', '5.6.7.8'])
+
@patch('platform.node', MagicMock(return_value='hostname'))
@patch('socket.gethostname', MagicMock(return_value='hostname'))
@patch('socket.getfqdn', MagicMock(return_value='hostname'))
--
2.13.0

View File

@ -1,72 +0,0 @@
From 79f9f4c06813d70cd03ad32c6c8ef8fec1656e88 Mon Sep 17 00:00:00 2001
From: Mihai Dinca <mdinca@suse.de>
Date: Fri, 14 Oct 2016 09:04:47 +0200
Subject: [PATCH] Change travis configuration file to use salt-toaster
---
.travis.yml | 47 +++++++++++++++++++----------------------------
1 file changed, 19 insertions(+), 28 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 7b4c8ce0e5..3101efb372 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,35 +1,26 @@
language: python
python:
- - '2.6'
- - '2.7'
+ - 2.7
-before_install:
- - sudo apt-get update
- - sudo apt-get install --fix-broken --ignore-missing -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" swig rabbitmq-server ruby python-apt mysql-server libmysqlclient-dev
- - (git describe && git fetch --tags) || (git remote add upstream git://github.com/saltstack/salt.git && git fetch --tags upstream)
- - pip install mock
- - pip install --allow-external http://dl.dropbox.com/u/174789/m2crypto-0.20.1.tar.gz
- - pip install --upgrade pep8 'pylint<=1.2.0'
- - pip install --upgrade coveralls
- - "if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then pip install unittest2 ordereddict; fi"
- - pip install git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting
-
-install:
- - pip install -r requirements/zeromq.txt -r requirements/cloud.txt
- - pip install --allow-all-external -r requirements/opt.txt
+services:
+ - docker
-before_script:
- - "/home/travis/virtualenv/python${TRAVIS_PYTHON_VERSION}/bin/pylint --rcfile=.testing.pylintrc salt/ && echo 'Finished Pylint Check Cleanly' || echo 'Finished Pylint Check With Errors'"
- - "/home/travis/virtualenv/python${TRAVIS_PYTHON_VERSION}/bin/pep8 --ignore=E501,E12 salt/ && echo 'Finished PEP-8 Check Cleanly' || echo 'Finished PEP-8 Check With Errors'"
+env:
+ global:
+ - VERSION=leap42sp1 FLAVOR=devel NOPULL=true SALT_REPO=..
+ matrix:
+ - TARGET=suse.tests PYTEST_CFG=./configs/$TARGET/$VERSION/$FLAVOR.cfg
+ - TARGET=saltstack.unit PYTEST_CFG=./configs/$TARGET/$VERSION/default.cfg
+ - TARGET=saltstack.integration PYTEST_CFG=./configs/$TARGET/$VERSION/default.cfg
-script: "sudo -E /home/travis/virtualenv/python${TRAVIS_PYTHON_VERSION}/bin/python setup.py test --runtests-opts='--run-destructive --sysinfo -v --coverage'"
-
-after_success:
- - coveralls
+before_install:
+- git clone --depth 1 https://github.com/openSUSE/salt-toaster.git
+- cd salt-toaster
+- echo "*" > .gitignore
+- tar xfz sandbox.tar.gz
+- sandbox/bin/pip install -r requirements.txt --exists-action w --upgrade
+- VERSION=leap42sp1 FLAVOR=default NOPULL=true make build_image
+- make build_image
-notifications:
- irc:
- channels: "irc.freenode.org#salt-devel"
- on_success: change
- on_failure: change
+script: make $TARGET PYTEST_CFG=$PYTEST_CFG
--
2.11.0

View File

@ -1,50 +0,0 @@
From df521307c4bff21ab7891e0086fc4dc8b7c2207c Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Mon, 18 Jan 2016 16:28:48 +0100
Subject: [PATCH] Check if byte strings are properly encoded in UTF-8
Rename keywords arguments variable to a default name.
---
salt/modules/zypper.py | 11 ++++++-----
1 file changed, 6 insertions(+), 5 deletions(-)
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 53837e5e73..75e529c3f9 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -366,9 +366,9 @@ def info_installed(*names, **kwargs):
summary, description.
:param errors:
- Handle RPM field errors (true|false). By default, various mistakes in the textual fields are simply ignored and
- omitted from the data. Otherwise a field with a mistake is not returned, instead a 'N/A (bad UTF-8)'
- (not available, broken) text is returned.
+ Handle RPM field errors. If 'ignore' is chosen, then various mistakes are simply ignored and omitted
+ from the texts or strings. If 'report' is chonen, then a field with a mistake is not returned, instead
+ a 'N/A (broken)' (not available, broken) text is placed.
Valid attributes are:
ignore, report
@@ -381,7 +381,8 @@ def info_installed(*names, **kwargs):
salt '*' pkg.info_installed <package1> <package2> <package3> ...
salt '*' pkg.info_installed <package1> attr=version,vendor
salt '*' pkg.info_installed <package1> <package2> <package3> ... attr=version,vendor
- salt '*' pkg.info_installed <package1> <package2> <package3> ... attr=version,vendor errors=true
+ salt '*' pkg.info_installed <package1> <package2> <package3> ... attr=version,vendor errors=ignore
+ salt '*' pkg.info_installed <package1> <package2> <package3> ... attr=version,vendor errors=report
'''
ret = dict()
for pkg_name, pkg_nfo in __salt__['lowpkg.info'](*names, **kwargs).items():
@@ -395,7 +396,7 @@ def info_installed(*names, **kwargs):
else:
value_ = value.decode('UTF-8', 'ignore').encode('UTF-8', 'ignore')
if value != value_:
- value = kwargs.get('errors') and value_ or 'N/A (invalid UTF-8)'
+ value = kwargs.get('errors', 'ignore') == 'ignore' and value_ or 'N/A (invalid UTF-8)'
log.error('Package {0} has bad UTF-8 code in {1}: {2}'.format(pkg_name, key, value))
if key == 'source_rpm':
t_nfo['source'] = value
--
2.11.0

View File

@ -1,84 +0,0 @@
From 58468a451d7d87450fbc36beb99dd39b10f06d61 Mon Sep 17 00:00:00 2001
From: "Peter V. Saveliev" <peter@svinota.eu>
Date: Mon, 29 May 2017 16:30:49 +0200
Subject: [PATCH] clean up `change` attribute from interface dict
The attribute is hidden in IPDB from the high-level logics since
pyroute2 version 0.4.2.
Bug-Url: https://github.com/saltstack/salt/issues/41461
unit tests: add pyroute2 interface dict test
Bug-Url: https://github.com/saltstack/salt/pull/41487
Bug-Url: https://github.com/saltstack/salt/issues/41461
unit tests: fix absolute imports in test_pyroute2
Bug-Url: https://github.com/saltstack/salt/pull/41533
unit tests: add encoding clause into test_pyroute2
Bug-Url: https://github.com/saltstack/salt/pull/41533
unit tests: test_pyroute2 -- add skipIf
... and comments
Bug-Url: https://github.com/saltstack/salt/pull/41533
---
salt/beacons/network_settings.py | 2 +-
tests/unit/modules/test_pyroute2.py | 27 +++++++++++++++++++++++++++
2 files changed, 28 insertions(+), 1 deletion(-)
create mode 100644 tests/unit/modules/test_pyroute2.py
diff --git a/salt/beacons/network_settings.py b/salt/beacons/network_settings.py
index 5af71a0804..78c387b2f2 100644
--- a/salt/beacons/network_settings.py
+++ b/salt/beacons/network_settings.py
@@ -25,7 +25,7 @@ __virtual_name__ = 'network_settings'
ATTRS = ['family', 'txqlen', 'ipdb_scope', 'index', 'operstate', 'group',
'carrier_changes', 'ipaddr', 'neighbours', 'ifname', 'promiscuity',
'linkmode', 'broadcast', 'address', 'num_tx_queues', 'ipdb_priority',
- 'change', 'kind', 'qdisc', 'mtu', 'num_rx_queues', 'carrier', 'flags',
+ 'kind', 'qdisc', 'mtu', 'num_rx_queues', 'carrier', 'flags',
'ifi_type', 'ports']
LAST_STATS = {}
diff --git a/tests/unit/modules/test_pyroute2.py b/tests/unit/modules/test_pyroute2.py
new file mode 100644
index 0000000000..a4ccce74e8
--- /dev/null
+++ b/tests/unit/modules/test_pyroute2.py
@@ -0,0 +1,27 @@
+# -*- coding: UTF-8 -*-
+
+from __future__ import absolute_import
+
+from tests.support.unit import TestCase
+from tests.support.unit import skipIf
+from salt.beacons.network_settings import ATTRS
+try:
+ from pyroute2 import IPDB
+ HAS_PYROUTE2 = True
+except ImportError:
+ HAS_PYROUTE2 = False
+
+
+@skipIf(not HAS_PYROUTE2, 'no pyroute2 installed, skipping')
+class Pyroute2TestCase(TestCase):
+
+ def test_interface_dict_fields(self):
+ with IPDB() as ipdb:
+ for attr in ATTRS:
+ # ipdb.interfaces is a dict-like object, that
+ # contains interface definitions. Interfaces can
+ # be referenced both with indices and names.
+ #
+ # ipdb.interfaces[1] is an interface with index 1,
+ # that is the loopback interface.
+ self.assertIn(attr, ipdb.interfaces[1])
--
2.13.0

View File

@ -1,28 +0,0 @@
From 7eeddadbf5ad309045b77762ac9f2f526af83b03 Mon Sep 17 00:00:00 2001
From: Michael Calmer <mc@suse.de>
Date: Fri, 4 Mar 2016 09:51:22 +0100
Subject: [PATCH] do not generate a date in a comment to prevent rebuilds
(bsc#969407)
---
setup.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/setup.py b/setup.py
index f9e9437e71..2356b2ada1 100755
--- a/setup.py
+++ b/setup.py
@@ -667,8 +667,7 @@ class Clean(clean):
INSTALL_VERSION_TEMPLATE = '''\
-# This file was auto-generated by salt's setup on \
-{date:%A, %d %B %Y @ %H:%m:%S UTC}.
+# This file was auto-generated by salt's setup
from salt.version import SaltStackVersion
--
2.11.0

View File

@ -1,132 +0,0 @@
From 067ef07513d86093fd5373ac62a4d5eb39bcc5b4 Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Tue, 16 May 2017 14:42:07 +0200
Subject: [PATCH] Fix grain for os_family on SUSE series
---
doc/topics/spm/spm_formula.rst | 2 +-
salt/modules/apache.py | 2 +-
salt/modules/inspectlib/collector.py | 6 +++---
salt/modules/iptables.py | 2 +-
salt/modules/localemod.py | 6 +++---
tests/integration/modules/pkg.py | 2 +-
6 files changed, 10 insertions(+), 10 deletions(-)
diff --git a/doc/topics/spm/spm_formula.rst b/doc/topics/spm/spm_formula.rst
index 2493527a22..aa53098e2e 100644
--- a/doc/topics/spm/spm_formula.rst
+++ b/doc/topics/spm/spm_formula.rst
@@ -11,7 +11,7 @@ describes the package. An example of this file is:
name: apache
os: RedHat, Debian, Ubuntu, SUSE, FreeBSD
- os_family: RedHat, Debian, SUSE, FreeBSD
+ os_family: RedHat, Debian, Suse, FreeBSD
version: 201506
release: 2
summary: Formula for installing Apache
diff --git a/salt/modules/apache.py b/salt/modules/apache.py
index ad502df530..5d2261175a 100644
--- a/salt/modules/apache.py
+++ b/salt/modules/apache.py
@@ -52,7 +52,7 @@ def _detect_os():
os_family = __grains__['os_family']
if os_family == 'RedHat':
return 'apachectl'
- elif os_family == 'Debian' or os_family == 'SUSE':
+ elif os_family == 'Debian' or os_family == 'Suse':
return 'apache2ctl'
else:
return 'apachectl'
diff --git a/salt/modules/inspectlib/collector.py b/salt/modules/inspectlib/collector.py
index 332c6efdec..b87a46b82f 100644
--- a/salt/modules/inspectlib/collector.py
+++ b/salt/modules/inspectlib/collector.py
@@ -87,7 +87,7 @@ class Inspector(EnvLoader):
'''
if self.grains_core.os_data().get('os_family') == 'Debian':
return self.__get_cfg_pkgs_dpkg()
- elif self.grains_core.os_data().get('os_family') in ['SUSE', 'redhat']:
+ elif self.grains_core.os_data().get('os_family') in ['Suse', 'redhat']:
return self.__get_cfg_pkgs_rpm()
else:
return dict()
@@ -163,7 +163,7 @@ class Inspector(EnvLoader):
if self.grains_core.os_data().get('os_family') == 'Debian':
cfg_data = salt.utils.to_str(self._syscall("dpkg", None, None, '--verify',
pkg_name)[0]).split(os.linesep)
- elif self.grains_core.os_data().get('os_family') in ['SUSE', 'redhat']:
+ elif self.grains_core.os_data().get('os_family') in ['Suse', 'redhat']:
cfg_data = salt.utils.to_str(self._syscall("rpm", None, None, '-V', '--nodeps', '--nodigest',
'--nosignature', '--nomtime', '--nolinkto',
pkg_name)[0]).split(os.linesep)
@@ -240,7 +240,7 @@ class Inspector(EnvLoader):
'''
if self.grains_core.os_data().get('os_family') == 'Debian':
return self.__get_managed_files_dpkg()
- elif self.grains_core.os_data().get('os_family') in ['SUSE', 'redhat']:
+ elif self.grains_core.os_data().get('os_family') in ['Suse', 'redhat']:
return self.__get_managed_files_rpm()
return list(), list(), list()
diff --git a/salt/modules/iptables.py b/salt/modules/iptables.py
index 322553d285..b1823e891a 100644
--- a/salt/modules/iptables.py
+++ b/salt/modules/iptables.py
@@ -80,7 +80,7 @@ def _conf(family='ipv4'):
return '/var/lib/ip6tables/rules-save'
else:
return '/var/lib/iptables/rules-save'
- elif __grains__['os_family'] == 'SUSE':
+ elif __grains__['os_family'] == 'Suse':
# SuSE does not seem to use separate files for IPv4 and IPv6
return '/etc/sysconfig/scripts/SuSEfirewall2-custom'
else:
diff --git a/salt/modules/localemod.py b/salt/modules/localemod.py
index 0bb8690fcc..b805cd429f 100644
--- a/salt/modules/localemod.py
+++ b/salt/modules/localemod.py
@@ -132,7 +132,7 @@ def get_locale():
return params.get('LANG', '')
elif 'RedHat' in __grains__['os_family']:
cmd = 'grep "^LANG=" /etc/sysconfig/i18n'
- elif 'SUSE' in __grains__['os_family']:
+ elif 'Suse' in __grains__['os_family']:
cmd = 'grep "^RC_LANG" /etc/sysconfig/language'
elif 'Debian' in __grains__['os_family']:
# this block only applies to Debian without systemd
@@ -172,7 +172,7 @@ def set_locale(locale):
'LANG="{0}"'.format(locale),
append_if_not_found=True
)
- elif 'SUSE' in __grains__['os_family']:
+ elif 'Suse' in __grains__['os_family']:
if not __salt__['file.file_exists']('/etc/sysconfig/language'):
__salt__['file.touch']('/etc/sysconfig/language')
__salt__['file.replace'](
@@ -261,7 +261,7 @@ def gen_locale(locale, **kwargs):
on_debian = __grains__.get('os') == 'Debian'
on_ubuntu = __grains__.get('os') == 'Ubuntu'
on_gentoo = __grains__.get('os_family') == 'Gentoo'
- on_suse = __grains__.get('os_family') == 'SUSE'
+ on_suse = __grains__.get('os_family') == 'Suse'
on_solaris = __grains__.get('os_family') == 'Solaris'
if on_solaris: # all locales are pre-generated
diff --git a/tests/integration/modules/pkg.py b/tests/integration/modules/pkg.py
index d00d93bd6e..7dd7f1330c 100644
--- a/tests/integration/modules/pkg.py
+++ b/tests/integration/modules/pkg.py
@@ -235,7 +235,7 @@ class PkgModuleTest(integration.ModuleCase,
keys = ret.keys()
self.assertIn('rpm', keys)
self.assertIn('yum', keys)
- elif os_family == 'SUSE':
+ elif os_family == 'Suse':
ret = self.run_function(func, ['less', 'zypper'])
keys = ret.keys()
self.assertIn('less', keys)
--
2.13.0

View File

@ -1,26 +0,0 @@
From 1d5e0e1c9d2ca8bb01cfe781289b4b03e0ce4c1e Mon Sep 17 00:00:00 2001
From: Michael Calmer <mc@suse.de>
Date: Fri, 19 May 2017 14:07:08 +0200
Subject: [PATCH] Fix os_family case in unittest
---
tests/unit/modules/inspect_collector_test.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/unit/modules/inspect_collector_test.py b/tests/unit/modules/inspect_collector_test.py
index 9105670526..293de1ad51 100644
--- a/tests/unit/modules/inspect_collector_test.py
+++ b/tests/unit/modules/inspect_collector_test.py
@@ -127,7 +127,7 @@ gcc-6-base:i386
inspector.grains_core.os_data = MagicMock()
inspector.grains_core.os_data().get = MagicMock(return_value='Debian')
self.assertEqual(inspector._get_cfg_pkgs(), 'dpkg')
- inspector.grains_core.os_data().get = MagicMock(return_value='SUSE')
+ inspector.grains_core.os_data().get = MagicMock(return_value='Suse')
self.assertEqual(inspector._get_cfg_pkgs(), 'rpm')
inspector.grains_core.os_data().get = MagicMock(return_value='redhat')
self.assertEqual(inspector._get_cfg_pkgs(), 'rpm')
--
2.13.0

View File

@ -1,31 +0,0 @@
From 4b59b328de2653310f845352c099efc25c2cafdf Mon Sep 17 00:00:00 2001
From: Erik Johnson <palehose@gmail.com>
Date: Wed, 1 Mar 2017 10:19:33 -0600
Subject: [PATCH] Fix regression in file.get_managed, add unit tests
This is no longer needed since we're invoking the state module directly
and not via the state compiler.
* Fix regression in file.get_managed when skip_verify=True
* Add integration tests for remote file sources
* Remove next(iter()) extraction
---
tests/integration/states/file.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/tests/integration/states/file.py b/tests/integration/states/file.py
index aad7fac441..54e6196c80 100644
--- a/tests/integration/states/file.py
+++ b/tests/integration/states/file.py
@@ -2404,6 +2404,7 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
PORT = 9999
FILE_SOURCE = 'http://localhost:{0}/grail/scene33'.format(PORT)
FILE_HASH = 'd2feb3beb323c79fc7a0f44f1408b4a3'
+STATE_DIR = os.path.join(integration.FILES, 'file', 'base')
class RemoteFileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
--
2.11.0

View File

@ -1,30 +0,0 @@
From 54fa5d2b6d47d242e98e9a7f4cc597e03084d4d2 Mon Sep 17 00:00:00 2001
From: Michael Calmer <mc@suse.de>
Date: Tue, 21 Jun 2016 13:12:48 +0200
Subject: [PATCH] fix salt --summary to count not responding minions
correctly (bsc#972311)
In case a minion is not responding a dict is returned instead of a string.
---
salt/cli/salt.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/salt/cli/salt.py b/salt/cli/salt.py
index 1fc9a83508..88ac65e7c2 100644
--- a/salt/cli/salt.py
+++ b/salt/cli/salt.py
@@ -281,7 +281,9 @@ class SaltCMD(parsers.SaltCMDOptionParser):
not_connected_minions = []
failed_minions = []
for each_minion in ret:
- minion_ret = ret[each_minion].get('ret')
+ minion_ret = ret[each_minion]
+ if (isinstance(minion_ret, dict) and 'ret' in minion_ret):
+ minion_ret = ret[each_minion].get('ret')
if (
isinstance(minion_ret, string_types)
and minion_ret.startswith("Minion did not return")
--
2.11.0

View File

@ -1,121 +0,0 @@
From 2bc2078d8549c277ba40836de4e36953af9efc78 Mon Sep 17 00:00:00 2001
From: Michael Calmer <mc@suse.de>
Date: Thu, 18 May 2017 19:46:50 +0200
Subject: [PATCH] fix setting language on SUSE systems
---
salt/modules/localemod.py | 28 +++++++++++++++-------------
tests/unit/modules/localemod_test.py | 32 +++++++++++++++++---------------
2 files changed, 32 insertions(+), 28 deletions(-)
diff --git a/salt/modules/localemod.py b/salt/modules/localemod.py
index b805cd429f..272aff4cc2 100644
--- a/salt/modules/localemod.py
+++ b/salt/modules/localemod.py
@@ -127,13 +127,14 @@ def get_locale():
salt '*' locale.get_locale
'''
cmd = ''
- if salt.utils.systemd.booted(__context__):
+ if 'Suse' in __grains__['os_family']:
+ # this block applies to all SUSE systems - also with systemd
+ cmd = 'grep "^RC_LANG" /etc/sysconfig/language'
+ elif salt.utils.systemd.booted(__context__):
params = _parse_dbus_locale() if HAS_DBUS else _parse_localectl()
return params.get('LANG', '')
elif 'RedHat' in __grains__['os_family']:
cmd = 'grep "^LANG=" /etc/sysconfig/i18n'
- elif 'Suse' in __grains__['os_family']:
- cmd = 'grep "^RC_LANG" /etc/sysconfig/language'
elif 'Debian' in __grains__['os_family']:
# this block only applies to Debian without systemd
cmd = 'grep "^LANG=" /etc/default/locale'
@@ -161,7 +162,17 @@ def set_locale(locale):
salt '*' locale.set_locale 'en_US.UTF-8'
'''
- if salt.utils.systemd.booted(__context__):
+ if 'Suse' in __grains__['os_family']:
+ # this block applies to all SUSE systems - also with systemd
+ if not __salt__['file.file_exists']('/etc/sysconfig/language'):
+ __salt__['file.touch']('/etc/sysconfig/language')
+ __salt__['file.replace'](
+ '/etc/sysconfig/language',
+ '^RC_LANG=.*',
+ 'RC_LANG="{0}"'.format(locale),
+ append_if_not_found=True
+ )
+ elif salt.utils.systemd.booted(__context__):
return _localectl_set(locale)
elif 'RedHat' in __grains__['os_family']:
if not __salt__['file.file_exists']('/etc/sysconfig/i18n'):
@@ -172,15 +183,6 @@ def set_locale(locale):
'LANG="{0}"'.format(locale),
append_if_not_found=True
)
- elif 'Suse' in __grains__['os_family']:
- if not __salt__['file.file_exists']('/etc/sysconfig/language'):
- __salt__['file.touch']('/etc/sysconfig/language')
- __salt__['file.replace'](
- '/etc/sysconfig/language',
- '^RC_LANG=.*',
- 'RC_LANG="{0}"'.format(locale),
- append_if_not_found=True
- )
elif 'Debian' in __grains__['os_family']:
# this block only applies to Debian without systemd
update_locale = salt.utils.which('update-locale')
diff --git a/tests/unit/modules/localemod_test.py b/tests/unit/modules/localemod_test.py
index b5cedfd8a6..069a3c6503 100644
--- a/tests/unit/modules/localemod_test.py
+++ b/tests/unit/modules/localemod_test.py
@@ -44,19 +44,20 @@ class LocalemodTestCase(TestCase):
Test for Get the current system locale
'''
with patch.dict(localemod.__context__, {'salt.utils.systemd.booted': True}):
- localemod.HAS_DBUS = True
- with patch.object(localemod,
- '_parse_dbus_locale',
- return_value={'LANG': 'A'}):
- self.assertEqual('A', localemod.get_locale())
- localemod._parse_dbus_locale.assert_called_once_with()
-
- localemod.HAS_DBUS = False
- with patch.object(localemod,
- '_parse_localectl',
- return_value={'LANG': 'A'}):
- self.assertEqual('A', localemod.get_locale())
- localemod._parse_localectl.assert_called_once_with()
+ with patch.dict(localemod.__grains__, {'os_family': ['Unknown']}):
+ localemod.HAS_DBUS = True
+ with patch.object(localemod,
+ '_parse_dbus_locale',
+ return_value={'LANG': 'A'}):
+ self.assertEqual('A', localemod.get_locale())
+ localemod._parse_dbus_locale.assert_called_once_with()
+
+ localemod.HAS_DBUS = False
+ with patch.object(localemod,
+ '_parse_localectl',
+ return_value={'LANG': 'A'}):
+ self.assertEqual('A', localemod.get_locale())
+ localemod._parse_localectl.assert_called_once_with()
with patch.dict(localemod.__context__, {'salt.utils.systemd.booted': False}):
with patch.dict(localemod.__grains__, {'os_family': ['Gentoo']}):
@@ -82,8 +83,9 @@ class LocalemodTestCase(TestCase):
Test for Sets the current system locale
'''
with patch.dict(localemod.__context__, {'salt.utils.systemd.booted': True}):
- with patch.object(localemod, '_localectl_set', return_value=True):
- self.assertTrue(localemod.set_locale('l'))
+ with patch.dict(localemod.__grains__, {'os_family': ['Unknown']}):
+ with patch.object(localemod, '_localectl_set', return_value=True):
+ self.assertTrue(localemod.set_locale('l'))
with patch.dict(localemod.__context__, {'salt.utils.systemd.booted': False}):
with patch.dict(localemod.__grains__, {'os_family': ['Gentoo']}):
--
2.13.0

View File

@ -1,41 +0,0 @@
From 36ab0b6a6f8830404e4cd1a9db1918d6703ed270 Mon Sep 17 00:00:00 2001
From: Graham Hayes <graham@hayes.ie>
Date: Mon, 22 May 2017 09:22:36 -0400
Subject: [PATCH] Fixed issue with parsing of master minion returns when
batching is enabled. (#30)
---
salt/states/saltmod.py | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
diff --git a/salt/states/saltmod.py b/salt/states/saltmod.py
index a9d1f6be93..35cd01fb4e 100644
--- a/salt/states/saltmod.py
+++ b/salt/states/saltmod.py
@@ -300,7 +300,7 @@ def state(
except KeyError:
m_state = False
if m_state:
- m_state = salt.utils.check_state_result(m_ret)
+ m_state = salt.utils.check_state_result(m_ret, recurse=True)
if not m_state:
if minion not in fail_minions:
@@ -309,9 +309,10 @@ def state(
continue
try:
for state_item in six.itervalues(m_ret):
- if 'changes' in state_item and state_item['changes']:
- changes[minion] = m_ret
- break
+ if isinstance(state_item, dict):
+ if 'changes' in state_item and state_item['changes']:
+ changes[minion] = m_ret
+ break
else:
no_change.add(minion)
except AttributeError:
--
2.13.0

View File

@ -1,26 +0,0 @@
From 9d303be7e9f856ab41bec24e6dd83a00a1a7a04e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Piotr=20Ka=C5=BAmierczak?= <me@piotrkazmierczak.com>
Date: Wed, 3 May 2017 18:38:15 +0200
Subject: [PATCH] fixing beacons.list integration test failure
---
tests/integration/modules/beacons.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/integration/modules/beacons.py b/tests/integration/modules/beacons.py
index e04aa92dd6..ee1e81b898 100644
--- a/tests/integration/modules/beacons.py
+++ b/tests/integration/modules/beacons.py
@@ -66,7 +66,7 @@ class BeaconsTest(integration.ModuleCase):
@classmethod
def tearDownClass(cls):
- if os.path.isfile(cls.beacons_config_file_path):
+ if cls.beacons_config_file_path and os.path.isfile(cls.beacons_config_file_path):
os.unlink(cls.beacons_config_file_path)
def setUp(self):
--
2.11.0

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0899b89ef230b42097f7c147a9babb30d65eac7968bd05318eac42ec9d8a7ec9
size 5486695
oid sha256:6b82846ed4005af7655290cd9bdc62c1d4912d4809acaca2ff37ebff0960b18c
size 1250836

View File

@ -1,33 +0,0 @@
From 2ac331ef9c2e28bb133bda04a5b3f667aff66c6c Mon Sep 17 00:00:00 2001
From: Silvio Moioli <smoioli@suse.de>
Date: Mon, 15 May 2017 07:44:05 +0200
Subject: [PATCH] rest_cherrypy: remove sleep call
---
salt/netapi/rest_cherrypy/app.py | 2 --
1 file changed, 2 deletions(-)
diff --git a/salt/netapi/rest_cherrypy/app.py b/salt/netapi/rest_cherrypy/app.py
index 3e89ff7882..221865a2ee 100644
--- a/salt/netapi/rest_cherrypy/app.py
+++ b/salt/netapi/rest_cherrypy/app.py
@@ -465,7 +465,6 @@ import json
import os
import signal
import tarfile
-import time
from multiprocessing import Process, Pipe
# Import third-party libs
@@ -2238,7 +2237,6 @@ class WebsocketEndpoint(object):
logger.error(
"Error: Salt event has non UTF-8 data:\n{0}"
.format(data))
- time.sleep(0.1)
parent_pipe, child_pipe = Pipe()
handler.pipe = parent_pipe
--
2.13.0

View File

@ -1,28 +0,0 @@
From 7bbbd3b6ebaf3988a4f97b905040b56be065f201 Mon Sep 17 00:00:00 2001
From: Michael Calmer <mc@suse.de>
Date: Fri, 29 Jul 2016 10:50:21 +0200
Subject: [PATCH] Run salt-api as user salt (bsc#990029)
---
pkg/salt-api.service | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pkg/salt-api.service b/pkg/salt-api.service
index c3e67d510c..9be2cb8ee6 100644
--- a/pkg/salt-api.service
+++ b/pkg/salt-api.service
@@ -3,8 +3,8 @@ Description=The Salt API
After=network.target
[Service]
-Type=notify
-NotifyAccess=all
+User=salt
+Type=simple
LimitNOFILE=8192
ExecStart=/usr/bin/salt-api
TimeoutStopSec=3
--
2.11.0

View File

@ -1,47 +0,0 @@
From 7641133d3d95d1f13116aabe0ec7b280ad7891c4 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Klaus=20K=C3=A4mpf?= <kkaempf@suse.de>
Date: Wed, 20 Jan 2016 11:01:06 +0100
Subject: [PATCH] Run salt master as dedicated salt user
* Minion runs always as a root
---
conf/master | 3 ++-
pkg/salt-common.logrotate | 2 ++
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/conf/master b/conf/master
index d89da74d58..fa29ca5827 100644
--- a/conf/master
+++ b/conf/master
@@ -25,7 +25,8 @@
# permissions to allow the specified user to run the master. The exception is
# the job cache, which must be deleted if this user is changed. If the
# modified files cause conflicts, set verify_env to False.
-#user: root
+user: salt
+syndic_user: salt
# The port used by the communication interface. The ret (return) port is the
# interface used for the file server, authentication, job returns, etc.
diff --git a/pkg/salt-common.logrotate b/pkg/salt-common.logrotate
index 3cd002308e..0d99d1b801 100644
--- a/pkg/salt-common.logrotate
+++ b/pkg/salt-common.logrotate
@@ -1,4 +1,5 @@
/var/log/salt/master {
+ su salt salt
weekly
missingok
rotate 7
@@ -15,6 +16,7 @@
}
/var/log/salt/key {
+ su salt salt
weekly
missingok
rotate 7
--
2.11.0

View File

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:fb4109b28d3fd898291c162e42ef843fbd4c3e57244075670fa8f366e705765f
size 9360198

File diff suppressed because it is too large Load Diff

244
salt.spec
View File

@ -1,7 +1,7 @@
#
# spec file for package salt
#
# Copyright (c) 2017 SUSE LINUX GmbH, Nuernberg, Germany.
# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
@ -16,7 +16,7 @@
#
%if 0%{?suse_version} > 1210
%if 0%{?suse_version} > 1210 || 0%{?rhel} >= 7 || 0%{?fedora}
%bcond_without systemd
%else
%bcond_with systemd
@ -36,84 +36,19 @@
%bcond_with builddocs
Name: salt
Version: 2016.11.4
Version: 2017.7.0
Release: 0
Summary: A parallel remote execution system
License: Apache-2.0
Group: System/Management
Url: http://saltstack.org/
# Git: https://github.com/openSUSE/salt.git
Source0: https://pypi.io/packages/source/s/%{name}/%{name}-%{version}.tar.gz
Source: https://github.com/saltstack/salt/archive/v2017.7.0.tar.gz
Source1: README.SUSE
Source2: salt-tmpfiles.d
Source3: html.tar.bz2
Source4: update-documentation.sh
Source5: travis.yml
# We do not upstream this patch because this is something that we have to fix on our side
# PATCH-FIX-OPENSUSE use-forking-daemon.patch tserong@suse.com -- We don't have python-systemd, so notify can't work
Patch1: tserong-suse.com-we-don-t-have-python-systemd-so-not.patch
# We do not upstream this patch because this is suse custom configuration
# PATCH-FIX-OPENSUSE use-salt-user-for-master.patch -- Run salt master as dedicated salt user
Patch2: run-salt-master-as-dedicated-salt-user.patch
# We do not upstream this patch because it has been fixed upstream
# (see: https://trello.com/c/wh96lCD4/1528-get-rid-of-0003-check-if-byte-strings-are-properly-encoded-in-utf-8-patch-in-the-salt-package)
# PATCH-FIX-OPENSUSE https://github.com/saltstack/salt/pull/30424
Patch3: check-if-byte-strings-are-properly-encoded-in-utf-8.patch
# We do not upstream this patch because the issue is on our side
# PATCH-FIX-OPENSUSE prevent rebuilds in OBS
Patch4: do-not-generate-a-date-in-a-comment-to-prevent-rebui.patch
# We do not upstream this because this is for SUSE only (15.08.2016) if Zypper has been used outside the Salt infrastructure
# PATCH-FIX-OPENSUSE Generate events from the Salt minion,
Patch5: add-zypp-notify-plugin.patch
# PATCH-FIX_OPENSUSE
Patch6: run-salt-api-as-user-salt-bsc-990029.patch
# PATCH-FIX_OPENSUSE
Patch7: change-travis-configuration-file-to-use-salt-toaster.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/37856 (pending to include in 2016.11)
Patch8: setting-up-os-grains-for-sles-expanded-support-suse-.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/34165
Patch9: fix-salt-summary-to-count-not-responding-minions-cor.patch
# PATCH-FIX_OPENSUSE
Patch10: avoid-failures-on-sles-12-sp2-because-of-new-systemd.patch
# PATCH-FIX_OPENSUSE
Patch11: add-yum-plugin.patch
# PATCH-FIX_OPENSUSE
Patch12: add-ssh-option-to-salt-ssh.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/38806
Patch13: add-a-salt-minion-service-control-file.patch
# PATCH-FIX-OPENSUSE
Patch14: add-options-for-dockerng.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/39762
Patch15: fix-regression-in-file.get_managed-add-unit-tests.patch
# PATCH-FIX_OPENSUSE
Patch16: translate-variable-arguments-if-they-contain-hidden-.patch
# PATCH-FIX_OPENSUSE
Patch17: special-salt-minion.service-file-for-rhel7.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/40266
Patch18: adding-support-for-installing-patches-in-yum-dnf-exe.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/40761
Patch19: search-the-entire-cache_dir-because-storage-paths-ch.patch
# PATCH-FIX_OPENSUSE
Patch20: fixing-beacons.list-integration-test-failure.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/40817
Patch21: add-unit-test-for-skip-false-values-from-preferred_i.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/40852
Patch22: use-correct-grain-constants-for-timezone.patch
# PATCH-FIX_OPENSUSE (upstream coming soon)
Patch23: fix-grain-for-os_family-on-suse-series.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/41269
Patch24: bugfix-unable-to-use-127-as-hostname.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/41336
Patch25: fix-setting-language-on-suse-systems.patch
Patch26: fix-os_family-case-in-unittest.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/41235
Patch27: rest_cherrypy-remove-sleep-call.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/40905
Patch28: fixed-issue-with-parsing-of-master-minion-returns-wh.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/41533
Patch29: clean-up-change-attribute-from-interface-dict.patch
BuildRoot: %{_tmppath}/%{name}-%{version}-build
BuildRequires: logrotate
BuildRequires: python
@ -124,18 +59,14 @@ BuildRequires: python-jinja2
%else
BuildRequires: python-Jinja2
%endif
BuildRequires: python-MarkupSafe
BuildRequires: python-PyYAML
BuildRequires: python-futures >= 2.0
BuildRequires: python-markupsafe
BuildRequires: python-msgpack-python > 0.3
BuildRequires: python-psutil
BuildRequires: python-requests >= 1.0.0
BuildRequires: python-tornado >= 4.2.1
# requirements/opt.txt (not all)
# BuildRequires: python-MySQL-python
# BuildRequires: python-timelib
# BuildRequires: python-gnupg
# BuildRequires: python-cherrypy >= 3.2.2
BuildRequires: python-yaml
# requirements/zeromq.txt
BuildRequires: python-pycrypto >= 2.6.1
BuildRequires: python-pyzmq >= 2.2.0
@ -170,6 +101,7 @@ Requires(pre): dbus-1
Requires(pre): dbus
%endif
Requires: procps
Requires: logrotate
Requires: python
#
@ -178,30 +110,30 @@ Requires: python-certifi
%endif
# requirements/base.txt
%if 0%{?rhel}
Requires: python-jinja2
Requires: yum
Requires: python-jinja2
Requires: yum
%if 0%{?rhel} == 6
Requires: yum-plugin-security
Requires: yum-plugin-security
%endif
%else
Requires: python-Jinja2
Requires: python-Jinja2
%endif
Requires: python-MarkupSafe
Requires: python-PyYAML
Requires: python-futures >= 2.0
Requires: python-markupsafe
Requires: python-msgpack-python > 0.3
Requires: python-psutil
Requires: python-requests >= 1.0.0
Requires: python-tornado >= 4.2.1
Requires: python-yaml
%if 0%{?suse_version}
# required for zypper.py
Requires: rpm-python
Requires(pre): libzypp(plugin:system) >= 0
Requires: zypp-plugin-python
# requirements/opt.txt (not all)
Recommends: python-MySQL-python
Recommends: python-timelib
Recommends: python-gnupg
# Suggests: python-MySQL-python ## Disabled for now, originally Recommended
Suggests: python-timelib
Suggests: python-gnupg
# requirements/zeromq.txt
%endif
Requires: python-pycrypto >= 2.6.1
@ -210,7 +142,7 @@ Requires: python-pyzmq >= 2.2.0
%if 0%{?suse_version}
# python-xml is part of python-base in all rhel versions
Requires: python-xml
Recommends: python-Mako
Suggests: python-Mako
Recommends: python-netaddr
%endif
@ -455,40 +387,6 @@ Zsh command line completion support for %{name}.
%setup -q -n salt-%{version}
cp %{S:1} .
cp %{S:5} ./.travis.yml
%patch1 -p1
%patch2 -p1
%patch3 -p1
%patch4 -p1
# This is SUSE-only patch
%if 0%{?suse_version}
%patch5 -p1
%endif
%patch6 -p1
%patch7 -p1
%patch8 -p1
%patch9 -p1
%patch10 -p1
%patch11 -p1
%patch12 -p1
%patch13 -p1
%patch14 -p1
%patch15 -p1
%patch16 -p1
%patch17 -p1
%patch18 -p1
%patch19 -p1
%patch20 -p1
%patch21 -p1
%patch22 -p1
%patch23 -p1
%patch24 -p1
%patch25 -p1
%patch26 -p1
%patch27 -p1
%patch28 -p1
%patch29 -p1
%build
%{__python} setup.py --salt-transport=both build
@ -550,23 +448,27 @@ install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/minion
## Install Zypper plugins only on SUSE machines
%if 0%{?suse_version}
install -Dd -m 0750 %{buildroot}%{_prefix}/lib/zypp/plugins/commit
%{__install} scripts/zypper/plugins/commit/zyppnotify %{buildroot}%{_prefix}/lib/zypp/plugins/commit/zyppnotify
%{__install} scripts/suse/zypper/plugins/commit/zyppnotify %{buildroot}%{_prefix}/lib/zypp/plugins/commit/zyppnotify
%endif
# Install Yum plugins only on RH machines
%if 0%{?fedora} || 0%{?rhel}
install -Dd %{buildroot}%{_prefix}/share/yum-plugins
install -Dd %{buildroot}/etc/yum/pluginconf.d
%{__install} scripts/yum/plugins/yumnotify.py %{buildroot}%{_prefix}/share/yum-plugins
%{__install} scripts/yum/plugins/yumnotify.conf %{buildroot}/etc/yum/pluginconf.d
%{__install} scripts/suse/yum/plugins/yumnotify.py %{buildroot}%{_prefix}/share/yum-plugins
%{__install} scripts/suse/yum/plugins/yumnotify.conf %{buildroot}/etc/yum/pluginconf.d
%endif
## install init and systemd scripts
%if %{with systemd}
install -Dpm 0644 pkg/salt-master.service %{buildroot}%{_unitdir}/salt-master.service
%if 0%{?suse_version}
install -Dpm 0644 pkg/suse/salt-minion.service %{buildroot}%{_unitdir}/salt-minion.service
%else
install -Dpm 0644 pkg/suse/salt-minion.service.rhel7 %{buildroot}%{_unitdir}/salt-minion.service
%endif
install -Dpm 0644 pkg/salt-syndic.service %{buildroot}%{_unitdir}/salt-syndic.service
install -Dpm 0644 pkg/salt-api.service %{buildroot}%{_unitdir}/salt-api.service
install -Dpm 0644 pkg/suse/salt-api.service %{buildroot}%{_unitdir}/salt-api.service
install -Dpm 0644 pkg/salt-proxy@.service %{buildroot}%{_unitdir}/salt-proxy@.service
ln -s service %{buildroot}%{_sbindir}/rcsalt-master
ln -s service %{buildroot}%{_sbindir}/rcsalt-syndic
@ -586,6 +488,11 @@ ln -sf %{_initddir}/salt-minion %{buildroot}%{_sbindir}/rcsalt-minion
ln -sf %{_initddir}/salt-api %{buildroot}%{_sbindir}/rcsalt-api
%endif
## Install sysV salt-minion watchdog for SLES11 and RHEL6
%if 0%{?rhel} == 6 || 0%{?suse_version} == 1110
install -Dpm 0755 scripts/suse/watchdog/salt-daemon-watcher %{buildroot}%{_bindir}/salt-daemon-watcher
%endif
#
## install config files
install -Dpm 0640 conf/minion %{buildroot}%{_sysconfdir}/salt/minion
@ -596,8 +503,12 @@ install -Dpm 0640 conf/cloud %{buildroot}%{_sysconfdir}/salt/cloud
install -Dpm 0640 conf/cloud.profiles %{buildroot}%{_sysconfdir}/salt/cloud.profiles
install -Dpm 0640 conf/cloud.providers %{buildroot}%{_sysconfdir}/salt/cloud.providers
#
## install logrotate file
## install logrotate file (for RHEL6 we use without sudo)
%if 0%{?rhel} > 6 || 0%{?suse_version}
install -Dpm 0644 pkg/suse/salt-common.logrotate %{buildroot}%{_sysconfdir}/logrotate.d/salt
%else
install -Dpm 0644 pkg/salt-common.logrotate %{buildroot}%{_sysconfdir}/logrotate.d/salt
%endif
#
## install SuSEfirewall2 rules
install -Dpm 0644 pkg/suse/salt.SuSEfirewall2 %{buildroot}%{_sysconfdir}/sysconfig/SuSEfirewall2.d/services/salt
@ -628,7 +539,7 @@ python setup.py test --runtests-opts=-u
%pre
S_HOME="/var/lib/salt"
S_PHOME="/srv/salt"
getent passwd salt | grep $S_PHOME >/dev/null && sed -i "s:$S_PHOME:$S_HOME:g" /etc/passwd
getent passwd salt | grep $S_PHOME >/dev/null && usermod -d $S_HOME salt
getent group salt >/dev/null || %{_sbindir}/groupadd -r salt
getent passwd salt >/dev/null || %{_sbindir}/useradd -r -g salt -d $S_HOME -s /bin/false -c "salt-master daemon" salt
if [[ -d "$S_PHOME/.ssh" ]]; then
@ -644,8 +555,12 @@ dbus-uuidgen --ensure
%preun proxy
%if %{with systemd}
%if 0%{?suse_version}
%service_del_preun salt-proxy@.service
%else
%systemd_preun salt-proxy@.service
%endif
%else
%if 0%{?suse_version}
%stop_on_removal salt-proxy
%endif
@ -653,14 +568,20 @@ dbus-uuidgen --ensure
%pre proxy
%if %{with systemd}
%if 0%{?suse_version}
%service_add_pre salt-proxy@.service
%endif
%endif
%post proxy
%if %{with systemd}
%if 0%{?suse_version}
%service_add_post salt-proxy@.service
%fillup_only
%else
%systemd_post salt-proxy@.service
%endif
%else
%if 0%{?suse_version}
%fillup_and_insserv
%endif
@ -668,8 +589,12 @@ dbus-uuidgen --ensure
%postun proxy
%if %{with systemd}
%if 0%{?suse_version}
%service_del_postun salt-proxy@.service
%else
%systemd_postun_with_restart salt-proxy@.service
%endif
%else
%if 0%{?suse_version}
%insserv_cleanup
%restart_on_update salt-proxy
@ -678,8 +603,12 @@ dbus-uuidgen --ensure
%preun syndic
%if %{with systemd}
%if 0%{?suse_version}
%service_del_preun salt-syndic.service
%else
%systemd_preun salt-syndic.service
%endif
%else
%if 0%{?suse_version}
%stop_on_removal salt-syndic
%else
@ -692,14 +621,20 @@ dbus-uuidgen --ensure
%pre syndic
%if %{with systemd}
%if 0%{?suse_version}
%service_add_pre salt-syndic.service
%endif
%endif
%post syndic
%if %{with systemd}
%if 0%{?suse_version}
%service_add_post salt-syndic.service
%fillup_only
%else
%systemd_post salt-syndic.service
%endif
%else
%if 0%{?suse_version}
%fillup_and_insserv
%endif
@ -707,8 +642,12 @@ dbus-uuidgen --ensure
%postun syndic
%if %{with systemd}
%if 0%{?suse_version}
%service_del_postun salt-syndic.service
%else
%systemd_postun_with_restart salt-syndic.service
%endif
%else
%if 0%{?suse_version}
%insserv_cleanup
%restart_on_update salt-syndic
@ -717,8 +656,12 @@ dbus-uuidgen --ensure
%preun master
%if %{with systemd}
%if 0%{?suse_version}
%service_del_preun salt-master.service
%else
%systemd_preun salt-master.service
%endif
%else
%if 0%{?suse_version}
%stop_on_removal salt-master
%else
@ -731,8 +674,10 @@ dbus-uuidgen --ensure
%pre master
%if %{with systemd}
%if 0%{?suse_version}
%service_add_pre salt-master.service
%endif
%endif
%post master
if [ $1 -eq 2 ] ; then
@ -744,15 +689,19 @@ if [ $1 -eq 2 ] ; then
for file in master.{pem,pub} ; do
[ -f /etc/salt/pki/master/$file ] && chown salt /etc/salt/pki/master/$file
done
for dir in file_lists minions jobs ; do
[ -d /var/cache/salt/master/$dir ] && chown -R salt:salt /var/cache/salt/master/$dir
done
MASTER_CACHE_DIR="/var/cache/salt/master"
[ -d $MASTER_CACHE_DIR ] && chown -R salt:salt $MASTER_CACHE_DIR
[ -f $MASTER_CACHE_DIR/.root_key ] && chown root:root $MASTER_CACHE_DIR/.root_key
true
fi
%if %{with systemd}
%if 0%{?suse_version}
%service_add_post salt-master.service
%fillup_only
%else
%systemd_post salt-master.service
%endif
%else
%if 0%{?suse_version}
%fillup_and_insserv
%else
@ -762,8 +711,12 @@ fi
%postun master
%if %{with systemd}
%if 0%{?suse_version}
%service_del_postun salt-master.service
%else
%systemd_postun_with_restart salt-master.service
%endif
%else
%if 0%{?suse_version}
%restart_on_update salt-master
%insserv_cleanup
@ -776,8 +729,12 @@ fi
%preun minion
%if %{with systemd}
%if 0%{?suse_version}
%service_del_preun salt-minion.service
%else
%systemd_preun salt-minion.service
%endif
%else
%if 0%{?suse_version}
%stop_on_removal salt-minion
%else
@ -790,14 +747,20 @@ fi
%pre minion
%if %{with systemd}
%if 0%{?suse_version}
%service_add_pre salt-minion.service
%endif
%endif
%post minion
%if %{with systemd}
%if 0%{?suse_version}
%service_add_post salt-minion.service
%fillup_only
%else
%systemd_post salt-minion.service
%endif
%else
%if 0%{?suse_version}
%fillup_and_insserv
%else
@ -807,8 +770,12 @@ fi
%postun minion
%if %{with systemd}
%if 0%{?suse_version}
%service_del_postun salt-minion.service
%else
%systemd_postun_with_restart salt-minion.service
%endif
%else
%if 0%{?suse_version}
%insserv_cleanup
%restart_on_update salt-minion
@ -821,20 +788,30 @@ fi
%preun api
%if %{with systemd}
%if 0%{?suse_version}
%service_del_preun salt-api.service
%else
%systemd_preun salt-api.service
%endif
%else
%stop_on_removal
%endif
%pre api
%if %{with systemd}
%if 0%{?suse_version}
%service_add_pre salt-api.service
%endif
%endif
%post api
%if %{with systemd}
%if 0%{?suse_version}
%service_add_post salt-api.service
%else
%systemd_post salt-api.service
%endif
%else
%if 0%{?suse_version}
%fillup_and_insserv
%endif
@ -842,8 +819,12 @@ fi
%postun api
%if %{with systemd}
%if 0%{?suse_version}
%service_del_postun salt-api.service
%else
%systemd_postun_with_restart salt-api.service
%endif
%else
%if 0%{?suse_version}
%insserv_cleanup
%restart_on_update
@ -920,6 +901,11 @@ fi
%config(noreplace) %{_initddir}/salt-minion
%endif
## Install sysV salt-minion watchdog for SLES11 and RHEL6
%if 0%{?rhel} == 6 || 0%{?suse_version} == 1110
%{_bindir}/salt-daemon-watcher
%endif
%files proxy
%defattr(-,root,root)
%{_bindir}/salt-proxy

View File

@ -1,185 +0,0 @@
From c9eb78888326d6ca6173a8d6059e1de26884030e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Tue, 18 Apr 2017 16:04:14 +0100
Subject: [PATCH] Search the entire CACHE_DIR because storage paths
change across versions
Prevents zero length error on Python 2.6
Fixes Zypper unit test
Enhances pkg.list_downloaded information of a package
Listing all patches instead of security ones only
Adapting Zypper test to new list_downloaded output
Fixes zypper test error after backporting
Pylint fixes
---
salt/modules/yumpkg.py | 18 +++++++++++++-----
salt/modules/zypper.py | 17 ++++++++++++-----
salt/states/pkg.py | 3 ++-
tests/unit/modules/zypper_test.py | 20 ++++++++++++++------
4 files changed, 41 insertions(+), 17 deletions(-)
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py
index f6777d770f..690d0c4e3a 100644
--- a/salt/modules/yumpkg.py
+++ b/salt/modules/yumpkg.py
@@ -18,8 +18,8 @@ Support for YUM/DNF
from __future__ import absolute_import
import contextlib
import copy
+import datetime
import fnmatch
-import glob
import itertools
import logging
import os
@@ -816,9 +816,17 @@ def list_downloaded():
CACHE_DIR = os.path.join('/var/cache/', _yum())
ret = {}
- for package_path in glob.glob(os.path.join(CACHE_DIR, '*/*/*/packages/*.rpm')):
- pkg_info = __salt__['lowpkg.bin_pkg_info'](package_path)
- ret.setdefault(pkg_info['name'], {})[pkg_info['version']] = package_path
+ for root, dirnames, filenames in os.walk(CACHE_DIR):
+ for filename in fnmatch.filter(filenames, '*.rpm'):
+ package_path = os.path.join(root, filename)
+ pkg_info = __salt__['lowpkg.bin_pkg_info'](package_path)
+ pkg_timestamp = int(os.path.getctime(package_path))
+ ret.setdefault(pkg_info['name'], {})[pkg_info['version']] = {
+ 'path': package_path,
+ 'size': os.path.getsize(package_path),
+ 'creation_date_time_t': pkg_timestamp,
+ 'creation_date_time': datetime.datetime.fromtimestamp(pkg_timestamp).isoformat(),
+ }
return ret
@@ -2804,7 +2812,7 @@ def _get_patches(installed_only=False):
'''
patches = {}
- cmd = [_yum(), '--quiet', 'updateinfo', 'list', 'security', 'all']
+ cmd = [_yum(), '--quiet', 'updateinfo', 'list', 'all']
ret = __salt__['cmd.run_stdout'](
cmd,
python_shell=False
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py
index 28087f5dbd..6055966904 100644
--- a/salt/modules/zypper.py
+++ b/salt/modules/zypper.py
@@ -15,7 +15,7 @@ Package support for openSUSE via the zypper package manager
# Import python libs
from __future__ import absolute_import
import copy
-import glob
+import fnmatch
import logging
import re
import os
@@ -1797,10 +1797,17 @@ def list_downloaded():
CACHE_DIR = '/var/cache/zypp/packages/'
ret = {}
- # Zypper storage is repository_tag/arch/package-version.rpm
- for package_path in glob.glob(os.path.join(CACHE_DIR, '*/*/*.rpm')):
- pkg_info = __salt__['lowpkg.bin_pkg_info'](package_path)
- ret.setdefault(pkg_info['name'], {})[pkg_info['version']] = package_path
+ for root, dirnames, filenames in os.walk(CACHE_DIR):
+ for filename in fnmatch.filter(filenames, '*.rpm'):
+ package_path = os.path.join(root, filename)
+ pkg_info = __salt__['lowpkg.bin_pkg_info'](package_path)
+ pkg_timestamp = int(os.path.getctime(package_path))
+ ret.setdefault(pkg_info['name'], {})[pkg_info['version']] = {
+ 'path': package_path,
+ 'size': os.path.getsize(package_path),
+ 'creation_date_time_t': pkg_timestamp,
+ 'creation_date_time': datetime.datetime.fromtimestamp(pkg_timestamp).isoformat(),
+ }
return ret
diff --git a/salt/states/pkg.py b/salt/states/pkg.py
index d185002d41..0983712b4c 100644
--- a/salt/states/pkg.py
+++ b/salt/states/pkg.py
@@ -2081,7 +2081,8 @@ def patch_installed(name, advisory_ids=None, downloadonly=None, **kwargs):
if not ret['changes'] and not ret['comment']:
status = 'downloaded' if downloadonly else 'installed'
ret['result'] = True
- ret['comment'] = 'Related packages are already {}'.format(status)
+ ret['comment'] = ('Advisory patch is not needed or related packages '
+ 'are already {0}'.format(status))
return ret
diff --git a/tests/unit/modules/zypper_test.py b/tests/unit/modules/zypper_test.py
index 39bd2e73e8..c9d44d102c 100644
--- a/tests/unit/modules/zypper_test.py
+++ b/tests/unit/modules/zypper_test.py
@@ -486,7 +486,10 @@ Repository 'DUMMY' not found by its alias, number, or URI.
self.assertEqual(len(list_patches), 3)
self.assertDictEqual(list_patches, PATCHES_RET)
- @patch('glob.glob', MagicMock(return_value=['/var/cache/zypper/packages/foo/bar/test_package.rpm']))
+ @patch('os.walk', MagicMock(return_value=[('test', 'test', 'test')]))
+ @patch('os.path.getsize', MagicMock(return_value=123456))
+ @patch('os.path.getctime', MagicMock(return_value=1234567890.123456))
+ @patch('fnmatch.filter', MagicMock(return_value=['/var/cache/zypper/packages/foo/bar/test_package.rpm']))
def test_list_downloaded(self):
'''
Test downloaded packages listing.
@@ -495,7 +498,12 @@ Repository 'DUMMY' not found by its alias, number, or URI.
'''
DOWNLOADED_RET = {
'test-package': {
- '1.0': '/var/cache/zypper/packages/foo/bar/test_package.rpm'
+ '1.0': {
+ 'path': '/var/cache/zypper/packages/foo/bar/test_package.rpm',
+ 'size': 123456,
+ 'creation_date_time_t': 1234567890,
+ 'creation_date_time': '2009-02-13T23:31:30',
+ }
}
}
@@ -530,7 +538,7 @@ Repository 'DUMMY' not found by its alias, number, or URI.
self.assertEqual(zypper.download("nmap", "foo"), test_out)
@patch('salt.modules.zypper._systemd_scope', MagicMock(return_value=False))
- @patch('salt.modules.zypper.list_downloaded', MagicMock(side_effect=[{}, {'vim': {'1.1': '/foo/bar/test.rpm'}}]))
+ @patch('salt.modules.zypper.list_downloaded', MagicMock(side_effect=[{}, {'vim': {'1.1': {'path': '/foo/bar/test.rpm', 'size': 1234, 'creation_date_time_t': 1234567890, 'creation_date_time': '2009-02-13T23:31:30'}}}]))
def test_install_with_downloadonly(self):
'''
Test a package installation with downloadonly=True.
@@ -548,10 +556,10 @@ Repository 'DUMMY' not found by its alias, number, or URI.
'--download-only',
'vim'
)
- self.assertDictEqual(ret, {'vim': {'new': {'1.1': '/foo/bar/test.rpm'}, 'old': ''}})
+ self.assertDictEqual(ret, {'vim': {'new': {'1.1': {'path': '/foo/bar/test.rpm', 'size': 1234, 'creation_date_time_t': 1234567890, 'creation_date_time': '2009-02-13T23:31:30'}}, 'old': ''}})
@patch('salt.modules.zypper._systemd_scope', MagicMock(return_value=False))
- @patch('salt.modules.zypper.list_downloaded', MagicMock(return_value={'vim': {'1.1': '/foo/bar/test.rpm'}}))
+ @patch('salt.modules.zypper.list_downloaded', MagicMock(return_value={'vim': {'1.1': {'path': '/foo/bar/test.rpm', 'size': 1234, 'creation_date_time_t': 1234567890, 'creation_date_time': '2017-01-01T11:00:00'}}}))
def test_install_with_downloadonly_already_downloaded(self):
'''
Test a package installation with downloadonly=True when package is already downloaded.
@@ -603,7 +611,7 @@ Repository 'DUMMY' not found by its alias, number, or URI.
'''
with patch.dict(zypper.__salt__, {'pkg_resource.parse_targets': MagicMock(return_value=({'SUSE-PATCH-XXX': None}, 'advisory'))}):
with patch('salt.modules.zypper.__zypper__.noraise.call', MagicMock()) as zypper_mock:
- with self.assertRaisesRegex(CommandExecutionError, '^Advisory id "SUSE-PATCH-XXX" not found$'):
+ with self.assertRaisesRegexp(CommandExecutionError, '^Advisory id "SUSE-PATCH-XXX" not found$'):
zypper.install(advisory_ids=['SUSE-PATCH-XXX'])
def test_remove_purge(self):
--
2.12.2

View File

@ -1,28 +0,0 @@
From 5a07f204d45b2b86d8bc0279527723e030cc4e21 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
<psuarezhernandez@suse.com>
Date: Fri, 30 Sep 2016 13:06:52 +0100
Subject: [PATCH] Setting up OS grains for SLES Expanded Support (SUSE's
Red Hat compatible platform)
core.py: quote style fixed
---
salt/grains/core.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/salt/grains/core.py b/salt/grains/core.py
index 6a42cc734f..fce35cb313 100644
--- a/salt/grains/core.py
+++ b/salt/grains/core.py
@@ -1049,6 +1049,7 @@ _OS_NAME_MAP = {
'sles': 'SUSE',
'slesexpand': 'RES',
'void': 'Void',
+ 'slesexpand': 'RES',
'linuxmint': 'Mint',
}
--
2.11.0

View File

@ -1,34 +0,0 @@
From 8860800e7a9af54757096014f91a25be4f3fa552 Mon Sep 17 00:00:00 2001
From: Michael Calmer <mc@suse.de>
Date: Tue, 7 Mar 2017 13:50:13 +0100
Subject: [PATCH] special salt-minion.service file for rhel7
---
pkg/salt-minion.service.rhel7 | 14 ++++++++++++++
1 file changed, 14 insertions(+)
create mode 100644 pkg/salt-minion.service.rhel7
diff --git a/pkg/salt-minion.service.rhel7 b/pkg/salt-minion.service.rhel7
new file mode 100644
index 0000000000..6917267714
--- /dev/null
+++ b/pkg/salt-minion.service.rhel7
@@ -0,0 +1,14 @@
+[Unit]
+Description=The Salt Minion
+After=network.target
+
+[Service]
+Type=simple
+LimitNOFILE=8192
+ExecStart=/usr/bin/salt-minion
+KillMode=process
+Restart=on-failure
+RestartSec=15
+
+[Install]
+WantedBy=multi-user.target
--
2.11.0

View File

@ -1,91 +0,0 @@
From 7313bf5574a72557a6389b9a991316d0b2c6f848 Mon Sep 17 00:00:00 2001
From: Michael Calmer <mc@suse.de>
Date: Wed, 1 Mar 2017 15:37:04 +0100
Subject: [PATCH] translate variable arguments if they contain hidden
keywords (bsc#1025896)
- includes a test
---
salt/states/module.py | 30 ++++++++++++++++++++++--------
tests/unit/states/module_test.py | 14 ++++++++++++++
2 files changed, 36 insertions(+), 8 deletions(-)
diff --git a/salt/states/module.py b/salt/states/module.py
index 686546832f..adc6e12c9d 100644
--- a/salt/states/module.py
+++ b/salt/states/module.py
@@ -218,16 +218,30 @@ def run(name, **kwargs):
ret['result'] = False
return ret
- if aspec.varargs and aspec.varargs in kwargs:
- varargs = kwargs.pop(aspec.varargs)
+ if aspec.varargs:
+ if aspec.varargs == 'name':
+ rarg = 'm_name'
+ elif aspec.varargs == 'fun':
+ rarg = 'm_fun'
+ elif aspec.varargs == 'names':
+ rarg = 'm_names'
+ elif aspec.varargs == 'state':
+ rarg = 'm_state'
+ elif aspec.varargs == 'saltenv':
+ rarg = 'm_saltenv'
+ else:
+ rarg = aspec.varargs
- if not isinstance(varargs, list):
- msg = "'{0}' must be a list."
- ret['comment'] = msg.format(aspec.varargs)
- ret['result'] = False
- return ret
+ if rarg in kwargs:
+ varargs = kwargs.pop(rarg)
+
+ if not isinstance(varargs, list):
+ msg = "'{0}' must be a list."
+ ret['comment'] = msg.format(aspec.varargs)
+ ret['result'] = False
+ return ret
- args.extend(varargs)
+ args.extend(varargs)
nkwargs = {}
if aspec.keywords and aspec.keywords in kwargs:
diff --git a/tests/unit/states/module_test.py b/tests/unit/states/module_test.py
index 0c025e3861..20dda73938 100644
--- a/tests/unit/states/module_test.py
+++ b/tests/unit/states/module_test.py
@@ -38,6 +38,10 @@ class ModuleStateTest(TestCase):
varargs=None,
keywords=None,
defaults=False)
+ bspec = ArgSpec(args=[],
+ varargs='names',
+ keywords='kwargs',
+ defaults=None)
def test_module_run_module_not_available(self):
'''
@@ -69,6 +73,16 @@ class ModuleStateTest(TestCase):
comment = 'The following arguments are missing: world hello'
self.assertEqual(ret['comment'], comment)
+ @patch('salt.utils.args.get_function_argspec', MagicMock(return_value=bspec))
+ def test_module_run_hidden_varargs(self):
+ '''
+ Tests the return of module.run state when hidden varargs are used with
+ wrong type.
+ '''
+ ret = module.run(CMD, m_names = 'anyname')
+ comment = "'names' must be a list."
+ self.assertEqual(ret['comment'], comment)
+
if __name__ == '__main__':
from integration import run_tests
--
2.11.0

View File

@ -1,28 +0,0 @@
From a9f1be35b0c158fcdd460dcc8c501fe039d97258 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Klaus=20K=C3=A4mpf?= <kkaempf@suse.de>
Date: Wed, 20 Jan 2016 11:00:15 +0100
Subject: [PATCH] tserong@suse.com -- We don't have python-systemd, so
notify can't work
---
pkg/salt-master.service | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/pkg/salt-master.service b/pkg/salt-master.service
index 1f4650f872..59be50301a 100644
--- a/pkg/salt-master.service
+++ b/pkg/salt-master.service
@@ -4,8 +4,7 @@ After=network.target
[Service]
LimitNOFILE=16384
-Type=notify
-NotifyAccess=all
+Type=simple
ExecStart=/usr/bin/salt-master
[Install]
--
2.11.0

View File

@ -4,10 +4,35 @@
# Author: Bo Maryniuk <bo@suse.de>
#
NO_SPHINX_PARAM="--without-sphinx"
function build_virtenv() {
virtualenv --system-site-packages $1
source $1/bin/activate
pip install --upgrade pip
if [ -z "$2" ]; then
pip install -I Sphinx
fi
}
function check_env() {
for cmd in "sphinx-build" "make" "quilt"; do
if [[ -z "$1" || "$1" != "$NO_SPHINX_PARAM" ]] && [ ! -z "$(which sphinx-build 2>/dev/null)" ]; then
cat <<EOF
You've installed Spinx globally. But it might be outdated or
clash with the version I am going to install into the temporary
virtual environment from PIP.
Please consider to remove Sphinx from your system, perhaps?
Or pass me "$NO_SPHINX_PARAM" param so I will try reusing yours
and see what happens. :)
EOF
exit 1;
fi
for cmd in "make" "quilt" "virtualenv" "pip"; do
if [ -z "$(which $cmd 2>/dev/null)" ]; then
echo "Error: '$cmd' is missing."
echo "Error: '$cmd' is still missing. Install it, please."
exit 1;
fi
done
@ -26,7 +51,7 @@ function build_docs() {
cd _build/html
chmod -R -x+X *
cd ..
tar cvf - html | bzip2 > /tmp/html.tar.bz2
tar cvf - html | bzip2 > $2/html.tar.bz2
}
function write_changelog() {
@ -46,19 +71,30 @@ EOF
}
if [ -z "$1" ]; then
echo "Usage: $0 <your e-mail>"
echo "Usage: $0 <your e-mail> [--without-sphinx]"
exit 1;
fi
check_env;
check_env $2;
START=$(pwd)
V_ENV="sphinx_doc_gen"
V_TMP=$(mktemp -d)
for f in "salt.spec" "salt*tar.gz"; do
cp -v $f $V_TMP
done
cd $V_TMP;
build_virtenv $V_ENV $2;
SRC_DIR="salt-$(cat salt.spec | grep ^Version: | cut -d: -f2 | sed -e 's/[[:blank:]]//g')";
quilt_setup $SRC_DIR
build_docs doc
build_docs doc $V_TMP
cd $START
rm -rf $SRC_DIR
mv /tmp/html.tar.bz2 $START
mv $V_TMP/html.tar.bz2 $START
rm -rf $V_TMP
echo "Done"
echo "---------------"

View File

@ -1,530 +0,0 @@
From 48cc3497eb19059a7acf14268a722e46b12e59be Mon Sep 17 00:00:00 2001
From: Bo Maryniuk <bo@suse.de>
Date: Fri, 21 Apr 2017 15:53:51 +0200
Subject: [PATCH] Use correct grain constants for timezone
Adjust the test so it is using the right grain for SUSE systems
Bugfix: use correct grain constant for platform
Refactor with setup/teardown
Add UT for RedHat's set_zone
Fix doc for RH UT
Remove unnecessary mock patch
Doc fix
Add UT for set_zone on SUSE series
Adjust UT to use correct grain for SUSE series
Bugfix: use correct os_family grain value for SUSE series
Add UT for gentoo on set_zone
Add UT for Debian on set_zone
Remove duplicate code
Add UT for get_hwclock on UTC/localtime
Remove dead code
Add UT for get_hwclock on SUSE platform
Bugfix: use correct grain for SUSE and RedHat platform
Add UT for RedHat/SUSE platforms on get_hwclock
Add UT for Debian on get_hwclock
Add UT on Solaris
Add UT for AIX on get_hwclock
Add UT for set_hwclock on AIX
Fix docstrings
Add UT for set_hwclock on solaris
Add UT for set_hwclock on Arch
Add UT for set_hwclock on RedHat
Fix UT names
Add UT set_hwclock on SUSE
Bugfix: use correct grain name for SUSE platform
Add UT for set_hwclock on Debian
Add UT on set_hw_clock on Gentoo
Fix lint issues
Rewrite test case for using no patch decorators
Disable the test for a while
Do not use multiple variables in "with" statement as of lint issues
---
salt/modules/timezone.py | 13 +-
tests/unit/modules/timezone_test.py | 390 ++++++++++++++++++++++++++++++++++++
2 files changed, 395 insertions(+), 8 deletions(-)
create mode 100644 tests/unit/modules/timezone_test.py
diff --git a/salt/modules/timezone.py b/salt/modules/timezone.py
index 69fb4fb663..e0d079f50a 100644
--- a/salt/modules/timezone.py
+++ b/salt/modules/timezone.py
@@ -160,7 +160,7 @@ def get_zone():
if __grains__['os'].lower() == 'centos':
return _get_zone_etc_localtime()
os_family = __grains__['os_family']
- for family in ('RedHat', 'SUSE'):
+ for family in ('RedHat', 'Suse'):
if family in os_family:
return _get_zone_sysconfig()
for family in ('Debian', 'Gentoo'):
@@ -273,16 +273,13 @@ def set_zone(timezone):
if 'RedHat' in __grains__['os_family']:
__salt__['file.sed'](
'/etc/sysconfig/clock', '^ZONE=.*', 'ZONE="{0}"'.format(timezone))
- elif 'SUSE' in __grains__['os_family']:
+ elif 'Suse' in __grains__['os_family']:
__salt__['file.sed'](
'/etc/sysconfig/clock', '^TIMEZONE=.*', 'TIMEZONE="{0}"'.format(timezone))
- elif 'Debian' in __grains__['os_family']:
+ elif 'Debian' in __grains__['os_family'] or 'Gentoo' in __grains__['os_family']:
with salt.utils.fopen('/etc/timezone', 'w') as ofh:
ofh.write(timezone.strip())
ofh.write('\n')
- elif 'Gentoo' in __grains__['os_family']:
- with salt.utils.fopen('/etc/timezone', 'w') as ofh:
- ofh.write(timezone)
return True
@@ -373,7 +370,7 @@ def get_hwclock():
else:
os_family = __grains__['os_family']
- for family in ('RedHat', 'SUSE'):
+ for family in ('RedHat', 'Suse'):
if family in os_family:
cmd = ['tail', '-n', '1', '/etc/adjtime']
return __salt__['cmd.run'](cmd, python_shell=False)
@@ -505,7 +502,7 @@ def set_hwclock(clock):
elif 'RedHat' in __grains__['os_family']:
__salt__['file.sed'](
'/etc/sysconfig/clock', '^ZONE=.*', 'ZONE="{0}"'.format(timezone))
- elif 'SUSE' in __grains__['os_family']:
+ elif 'Suse' in __grains__['os_family']:
__salt__['file.sed'](
'/etc/sysconfig/clock', '^TIMEZONE=.*', 'TIMEZONE="{0}"'.format(timezone))
elif 'Debian' in __grains__['os_family']:
diff --git a/tests/unit/modules/timezone_test.py b/tests/unit/modules/timezone_test.py
new file mode 100644
index 0000000000..ebf28e28ee
--- /dev/null
+++ b/tests/unit/modules/timezone_test.py
@@ -0,0 +1,390 @@
+# -*- coding: utf-8 -*-
+'''
+ :codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
+'''
+
+# Import Python Libs
+from __future__ import absolute_import
+
+# Import Salt Testing Libs
+from salttesting import TestCase, skipIf
+from salttesting.mock import (
+ MagicMock,
+ patch,
+ NO_MOCK,
+ NO_MOCK_REASON
+)
+
+from salttesting.helpers import ensure_in_syspath
+from salt.exceptions import SaltInvocationError
+
+ensure_in_syspath('../../')
+
+# Import Salt Libs
+from salt.modules import timezone
+
+
+@skipIf(NO_MOCK, NO_MOCK_REASON)
+class TimezoneTestCase(TestCase):
+ '''
+ Timezone test case
+ '''
+ TEST_TZ = 'UTC'
+
+ def setUp(self):
+ '''
+ Setup test
+ :return:
+ '''
+ timezone.__salt__ = {'file.sed': MagicMock(),
+ 'cmd.run': MagicMock(),
+ 'cmd.retcode': MagicMock(return_value=0)}
+ timezone.__grains__ = {'os': 'unknown'}
+
+ def tearDown(self):
+ '''
+ Teardown test
+ :return:
+ '''
+ timezone.__salt__ = timezone.__grains__ = None
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ def test_get_zone_centos(self):
+ '''
+ Test CentOS is recognized
+ :return:
+ '''
+ timezone.__grains__['os'] = 'centos'
+ with patch('salt.modules.timezone._get_zone_etc_localtime', MagicMock(return_value=self.TEST_TZ)):
+ assert timezone.get_zone() == self.TEST_TZ
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ def test_get_zone_os_family_rh_suse(self):
+ '''
+ Test RedHat and Suse are recognized
+ :return:
+ '''
+ for osfamily in ['RedHat', 'Suse']:
+ timezone.__grains__['os_family'] = [osfamily]
+ with patch('salt.modules.timezone._get_zone_sysconfig', MagicMock(return_value=self.TEST_TZ)):
+ assert timezone.get_zone() == self.TEST_TZ
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ def test_get_zone_os_family_debian_gentoo(self):
+ '''
+ Test Debian and Gentoo are recognized
+ :return:
+ '''
+ for osfamily in ['Debian', 'Gentoo']:
+ timezone.__grains__['os_family'] = [osfamily]
+ with patch('salt.modules.timezone._get_zone_etc_timezone', MagicMock(return_value=self.TEST_TZ)):
+ assert timezone.get_zone() == self.TEST_TZ
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ def test_get_zone_os_family_allbsd_nilinuxrt(self):
+ '''
+ Test *BSD and NILinuxRT are recognized
+ :return:
+ '''
+ for osfamily in ['FreeBSD', 'OpenBSD', 'NetBSD', 'NILinuxRT']:
+ timezone.__grains__['os_family'] = osfamily
+ with patch('salt.modules.timezone._get_zone_etc_localtime', MagicMock(return_value=self.TEST_TZ)):
+ assert timezone.get_zone() == self.TEST_TZ
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ def test_get_zone_os_family_slowlaris(self):
+ '''
+ Test Slowlaris is recognized
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['Solaris']
+ with patch('salt.modules.timezone._get_zone_solaris', MagicMock(return_value=self.TEST_TZ)):
+ assert timezone.get_zone() == self.TEST_TZ
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ def test_get_zone_os_family_aix(self):
+ '''
+ Test IBM AIX is recognized
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['AIX']
+ with patch('salt.modules.timezone._get_zone_aix', MagicMock(return_value=self.TEST_TZ)):
+ assert timezone.get_zone() == self.TEST_TZ
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ def test_set_zone_redhat(self):
+ '''
+ Test zone set on RH series
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['RedHat']
+ assert timezone.set_zone(self.TEST_TZ)
+ name, args, kwargs = timezone.__salt__['file.sed'].mock_calls[0]
+ assert args == ('/etc/sysconfig/clock', '^ZONE=.*', 'ZONE="UTC"')
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ def test_set_zone_suse(self):
+ '''
+ Test zone set on SUSE series
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['Suse']
+ assert timezone.set_zone(self.TEST_TZ)
+ name, args, kwargs = timezone.__salt__['file.sed'].mock_calls[0]
+ assert args == ('/etc/sysconfig/clock', '^TIMEZONE=.*', 'TIMEZONE="UTC"')
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ def test_set_zone_gentoo(self):
+ '''
+ Test zone set on Gentoo series
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['Gentoo']
+ _fopen = MagicMock(return_value=MagicMock(spec=file))
+ with patch('salt.utils.fopen', _fopen):
+ assert timezone.set_zone(self.TEST_TZ)
+ name, args, kwargs = _fopen.mock_calls[0]
+ assert args == ('/etc/timezone', 'w')
+ name, args, kwargs = _fopen.return_value.__enter__.return_value.write.mock_calls[0]
+ assert args == ('UTC',)
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ def test_set_zone_debian(self):
+ '''
+ Test zone set on Debian series
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['Debian']
+ _fopen = MagicMock(return_value=MagicMock(spec=file))
+ with patch('salt.utils.fopen', _fopen):
+ assert timezone.set_zone(self.TEST_TZ)
+ name, args, kwargs = _fopen.mock_calls[0]
+ assert args == ('/etc/timezone', 'w')
+ name, args, kwargs = _fopen.return_value.__enter__.return_value.write.mock_calls[0]
+ assert args == ('UTC',)
+
+ @patch('salt.utils.which', MagicMock(return_value=True))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ def test_get_hwclock_timedate_utc(self):
+ '''
+ Test get hwclock UTC/localtime
+ :return:
+ '''
+ with patch('salt.modules.timezone._timedatectl', MagicMock(return_value={'stdout': 'rtc in local tz'})):
+ assert timezone.get_hwclock() == 'UTC'
+ with patch('salt.modules.timezone._timedatectl', MagicMock(return_value={'stdout': 'rtc in local tz:yes'})):
+ assert timezone.get_hwclock() == 'localtime'
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ def test_get_hwclock_suse(self):
+ '''
+ Test get hwclock on SUSE
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['Suse']
+ timezone.get_hwclock()
+ name, args, kwarg = timezone.__salt__['cmd.run'].mock_calls[0]
+ assert args == (['tail', '-n', '1', '/etc/adjtime'],)
+ assert kwarg == {'python_shell': False}
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ def test_get_hwclock_redhat(self):
+ '''
+ Test get hwclock on RedHat
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['RedHat']
+ timezone.get_hwclock()
+ name, args, kwarg = timezone.__salt__['cmd.run'].mock_calls[0]
+ assert args == (['tail', '-n', '1', '/etc/adjtime'],)
+ assert kwarg == {'python_shell': False}
+
+ def _test_get_hwclock_debian(self): # TODO: Enable this when testing environment is working properly
+ '''
+ Test get hwclock on Debian
+ :return:
+ '''
+ with patch('salt.utils.which', MagicMock(return_value=False)):
+ with patch('os.path.exists', MagicMock(return_value=True)):
+ with patch('os.unlink', MagicMock()):
+ with patch('os.symlink', MagicMock()):
+ timezone.__grains__['os_family'] = ['Debian']
+ timezone.get_hwclock()
+ name, args, kwarg = timezone.__salt__['cmd.run'].mock_calls[0]
+ assert args == (['tail', '-n', '1', '/etc/adjtime'],)
+ assert kwarg == {'python_shell': False}
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ def test_get_hwclock_solaris(self):
+ '''
+ Test get hwclock on Solaris
+ :return:
+ '''
+ # Incomplete
+ timezone.__grains__['os_family'] = ['Solaris']
+ assert timezone.get_hwclock() == 'UTC'
+ _fopen = MagicMock(return_value=MagicMock(spec=file))
+ with patch('salt.utils.fopen', _fopen):
+ assert timezone.get_hwclock() == 'localtime'
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ def test_get_hwclock_aix(self):
+ '''
+ Test get hwclock on AIX
+ :return:
+ '''
+ # Incomplete
+ timezone.__grains__['os_family'] = ['AIX']
+ assert timezone.get_hwclock() == 'localtime'
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ def test_set_hwclock_aix(self):
+ '''
+ Test set hwclock on AIX
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['AIX']
+ with self.assertRaises(SaltInvocationError):
+ assert timezone.set_hwclock('forty two')
+ assert timezone.set_hwclock('UTC')
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ @patch('salt.modules.timezone.get_zone', MagicMock(return_value='TEST_TIMEZONE'))
+ def test_set_hwclock_solaris(self):
+ '''
+ Test set hwclock on Solaris
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['Solaris']
+ timezone.__grains__['cpuarch'] = 'x86'
+
+ with self.assertRaises(SaltInvocationError):
+ assert timezone.set_hwclock('forty two')
+ assert timezone.set_hwclock('UTC')
+ name, args, kwargs = timezone.__salt__['cmd.retcode'].mock_calls[0]
+ assert args == (['rtc', '-z', 'GMT'],)
+ assert kwargs == {'python_shell': False}
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ @patch('salt.modules.timezone.get_zone', MagicMock(return_value='TEST_TIMEZONE'))
+ def test_set_hwclock_arch(self):
+ '''
+ Test set hwclock on arch
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['Arch']
+
+ assert timezone.set_hwclock('UTC')
+ name, args, kwargs = timezone.__salt__['cmd.retcode'].mock_calls[0]
+ assert args == (['timezonectl', 'set-local-rtc', 'false'],)
+ assert kwargs == {'python_shell': False}
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ @patch('salt.modules.timezone.get_zone', MagicMock(return_value='TEST_TIMEZONE'))
+ def test_set_hwclock_redhat(self):
+ '''
+ Test set hwclock on RedHat
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['RedHat']
+
+ assert timezone.set_hwclock('UTC')
+ name, args, kwargs = timezone.__salt__['file.sed'].mock_calls[0]
+ assert args == ('/etc/sysconfig/clock', '^ZONE=.*', 'ZONE="TEST_TIMEZONE"')
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ @patch('salt.modules.timezone.get_zone', MagicMock(return_value='TEST_TIMEZONE'))
+ def test_set_hwclock_suse(self):
+ '''
+ Test set hwclock on SUSE
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['Suse']
+
+ assert timezone.set_hwclock('UTC')
+ name, args, kwargs = timezone.__salt__['file.sed'].mock_calls[0]
+ assert args == ('/etc/sysconfig/clock', '^TIMEZONE=.*', 'TIMEZONE="TEST_TIMEZONE"')
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ @patch('salt.modules.timezone.get_zone', MagicMock(return_value='TEST_TIMEZONE'))
+ def test_set_hwclock_debian(self):
+ '''
+ Test set hwclock on Debian
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['Debian']
+
+ assert timezone.set_hwclock('UTC')
+ name, args, kwargs = timezone.__salt__['file.sed'].mock_calls[0]
+ assert args == ('/etc/default/rcS', '^UTC=.*', 'UTC=yes')
+
+ assert timezone.set_hwclock('localtime')
+ name, args, kwargs = timezone.__salt__['file.sed'].mock_calls[1]
+ assert args == ('/etc/default/rcS', '^UTC=.*', 'UTC=no')
+
+ @patch('salt.utils.which', MagicMock(return_value=False))
+ @patch('os.path.exists', MagicMock(return_value=True))
+ @patch('os.unlink', MagicMock())
+ @patch('os.symlink', MagicMock())
+ @patch('salt.modules.timezone.get_zone', MagicMock(return_value='TEST_TIMEZONE'))
+ def test_set_hwclock_gentoo(self):
+ '''
+ Test set hwclock on Gentoo
+ :return:
+ '''
+ timezone.__grains__['os_family'] = ['Gentoo']
+
+ with self.assertRaises(SaltInvocationError):
+ timezone.set_hwclock('forty two')
+
+ timezone.set_hwclock('UTC')
+ name, args, kwargs = timezone.__salt__['file.sed'].mock_calls[0]
+ assert args == ('/etc/conf.d/hwclock', '^clock=.*', 'clock="UTC"')
+
+ timezone.set_hwclock('localtime')
+ name, args, kwargs = timezone.__salt__['file.sed'].mock_calls[1]
+ assert args == ('/etc/conf.d/hwclock', '^clock=.*', 'clock="local"')
--
2.13.0

3
v2017.7.0.tar.gz Normal file
View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:8b94b9e948783a35a870177ebdaf13aa78bfb1132225eee85d764c7580c88f3a
size 11417297