2021-01-08 13:41:50 +01:00
|
|
|
From 2a682f5ea32f6e37e778040032aff9332aac1a0e Mon Sep 17 00:00:00 2001
|
2019-06-21 10:17:26 +02:00
|
|
|
From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
|
|
|
|
<psuarezhernandez@suse.com>
|
|
|
|
Date: Thu, 20 Jun 2019 12:52:45 +0100
|
2021-01-08 13:41:50 +01:00
|
|
|
Subject: [PATCH] Provide the missing features required for Yomi (Yet
|
|
|
|
one more installer)
|
2019-06-21 10:17:26 +02:00
|
|
|
|
|
|
|
---
|
2021-01-08 13:41:50 +01:00
|
|
|
salt/grains/core.py | 6 +-
|
|
|
|
salt/modules/kubeadm.py | 91 +++++++++------
|
|
|
|
salt/modules/rpm_lowpkg.py | 42 +++----
|
|
|
|
salt/modules/systemd_service.py | 24 ++--
|
|
|
|
salt/modules/zypperpkg.py | 87 ++++++++------
|
|
|
|
salt/states/btrfs.py | 44 +++++--
|
|
|
|
salt/states/file.py | 7 +-
|
|
|
|
salt/states/loop.py | 15 +--
|
|
|
|
salt/states/pkgrepo.py | 5 -
|
|
|
|
salt/utils/oset.py | 8 +-
|
|
|
|
tests/unit/modules/test_kubeadm.py | 43 ++++---
|
|
|
|
tests/unit/modules/test_rpm_lowpkg.py | 15 ++-
|
|
|
|
tests/unit/modules/test_systemd_service.py | 13 +--
|
|
|
|
tests/unit/modules/test_zypperpkg.py | 60 ++--------
|
|
|
|
tests/unit/states/test_btrfs.py | 130 ++++++---------------
|
|
|
|
tests/unit/states/test_pkg.py | 39 ++-----
|
|
|
|
tests/unit/test_loader.py | 97 ++++++++++++++-
|
|
|
|
17 files changed, 373 insertions(+), 353 deletions(-)
|
2019-06-21 10:17:26 +02:00
|
|
|
|
|
|
|
diff --git a/salt/grains/core.py b/salt/grains/core.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index bebb4581bc..d7d03c5e70 100644
|
2019-06-21 10:17:26 +02:00
|
|
|
--- a/salt/grains/core.py
|
|
|
|
+++ b/salt/grains/core.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -2759,7 +2759,7 @@ def _hw_data(osdata):
|
|
|
|
contents_file = os.path.join("/sys/class/dmi/id", fw_file)
|
|
|
|
if os.path.exists(contents_file):
|
|
|
|
try:
|
|
|
|
- with salt.utils.files.fopen(contents_file, "rb") as ifile:
|
|
|
|
+ with salt.utils.files.fopen(contents_file, "r") as ifile:
|
|
|
|
grains[key] = salt.utils.stringutils.to_unicode(
|
|
|
|
ifile.read().strip(), errors="replace"
|
|
|
|
)
|
|
|
|
@@ -2768,9 +2768,7 @@ def _hw_data(osdata):
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
# Some firmwares provide non-valid 'product_name'
|
|
|
|
# files, ignore them
|
|
|
|
- log.debug(
|
|
|
|
- "The content in /sys/devices/virtual/dmi/id/product_name is not valid"
|
|
|
|
- )
|
2019-06-21 10:17:26 +02:00
|
|
|
+ pass
|
2021-01-08 13:41:50 +01:00
|
|
|
except OSError as err:
|
2019-06-21 10:17:26 +02:00
|
|
|
# PermissionError is new to Python 3, but corresponds to the EACESS and
|
|
|
|
# EPERM error numbers. Use those instead here for PY2 compatibility.
|
2020-04-07 14:14:01 +02:00
|
|
|
diff --git a/salt/modules/kubeadm.py b/salt/modules/kubeadm.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index 8baf5f85fd..966e9e848f 100644
|
|
|
|
--- a/salt/modules/kubeadm.py
|
2020-04-07 14:14:01 +02:00
|
|
|
+++ b/salt/modules/kubeadm.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1,3 +1,25 @@
|
2020-04-07 14:14:01 +02:00
|
|
|
+#
|
|
|
|
+# Author: Alberto Planas <aplanas@suse.com>
|
|
|
|
+#
|
|
|
|
+# Copyright 2019 SUSE LINUX GmbH, Nuernberg, Germany.
|
|
|
|
+#
|
|
|
|
+# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
+# or more contributor license agreements. See the NOTICE file
|
|
|
|
+# distributed with this work for additional information
|
|
|
|
+# regarding copyright ownership. The ASF licenses this file
|
|
|
|
+# to you under the Apache License, Version 2.0 (the
|
|
|
|
+# "License"); you may not use this file except in compliance
|
|
|
|
+# with the License. You may obtain a copy of the License at
|
|
|
|
+#
|
|
|
|
+# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
+#
|
|
|
|
+# Unless required by applicable law or agreed to in writing,
|
|
|
|
+# software distributed under the License is distributed on an
|
|
|
|
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
+# KIND, either express or implied. See the License for the
|
|
|
|
+# specific language governing permissions and limitations
|
|
|
|
+# under the License.
|
2019-06-21 10:17:26 +02:00
|
|
|
+
|
2021-01-08 13:41:50 +01:00
|
|
|
"""
|
|
|
|
:maintainer: Alberto Planas <aplanas@suse.com>
|
|
|
|
:maturity: new
|
|
|
|
@@ -11,6 +33,7 @@ import re
|
|
|
|
|
|
|
|
import salt.utils.files
|
|
|
|
from salt.exceptions import CommandExecutionError
|
2020-04-07 14:14:01 +02:00
|
|
|
+from salt.ext.six.moves import zip
|
2021-01-08 13:41:50 +01:00
|
|
|
|
|
|
|
ADMIN_CFG = "/etc/kubernetes/admin.conf"
|
|
|
|
|
|
|
|
@@ -37,23 +60,22 @@ def _api_server_endpoint(config=None):
|
|
|
|
endpoint = re.search(
|
|
|
|
r"^\s*server: https?://(.*)$", fp_.read(), re.MULTILINE
|
|
|
|
).group(1)
|
|
|
|
- # pylint:disable=broad-except
|
|
|
|
except Exception:
|
|
|
|
# Any error or exception is mapped to None
|
|
|
|
pass
|
|
|
|
return endpoint
|
|
|
|
|
|
|
|
|
|
|
|
-def _token(create_if_needed=False):
|
2020-04-07 14:14:01 +02:00
|
|
|
+def _token(create_if_needed=True):
|
2021-01-08 13:41:50 +01:00
|
|
|
"""
|
|
|
|
Return a valid bootstrap token
|
|
|
|
"""
|
|
|
|
tokens = token_list()
|
|
|
|
- if not tokens and create_if_needed:
|
2020-04-07 14:14:01 +02:00
|
|
|
+ if not tokens:
|
2021-01-08 13:41:50 +01:00
|
|
|
token_create(description="Token created by kubeadm salt module")
|
|
|
|
tokens = token_list()
|
|
|
|
- # We expect that the token is valid for authentication and signing
|
|
|
|
- return tokens[0]["token"] if tokens else None
|
2020-04-07 14:14:01 +02:00
|
|
|
+ # We expect that the token is valid for authestication and signing
|
2021-01-08 13:41:50 +01:00
|
|
|
+ return tokens[0]["token"]
|
|
|
|
|
|
|
|
|
|
|
|
def _discovery_token_ca_cert_hash():
|
|
|
|
@@ -92,10 +114,6 @@ def join_params(create_if_needed=False):
|
|
|
|
|
|
|
|
Return the parameters required for joining into the cluster
|
|
|
|
|
|
|
|
- create_if_needed
|
|
|
|
- If the token bucket is empty and this parameter is True, a new
|
|
|
|
- token will be created.
|
|
|
|
-
|
|
|
|
CLI Example:
|
|
|
|
|
|
|
|
.. code-block:: bash
|
|
|
|
@@ -169,7 +187,7 @@ def token_create(
|
|
|
|
Create bootstrap tokens on the server
|
|
|
|
|
|
|
|
token
|
|
|
|
- Token to write, if None one will be generated. The token must
|
2020-04-07 14:14:01 +02:00
|
|
|
+ Token to write, if None one will be gerenared. The token must
|
2021-01-08 13:41:50 +01:00
|
|
|
match a regular expression, that by default is
|
|
|
|
[a-z0-9]{6}.[a-z0-9]{16}
|
|
|
|
|
|
|
|
@@ -180,7 +198,7 @@ def token_create(
|
|
|
|
A human friendly description of how this token is used
|
|
|
|
|
|
|
|
groups
|
|
|
|
- List of extra groups that this token will authenticate, default
|
2020-04-07 14:14:01 +02:00
|
|
|
+ List of extra groups that this token will authenticate, defaut
|
2021-01-08 13:41:50 +01:00
|
|
|
to ['system:bootstrappers:kubeadm:default-node-token']
|
|
|
|
|
|
|
|
ttl
|
|
|
|
@@ -189,7 +207,7 @@ def token_create(
|
|
|
|
is 24h0m0s
|
|
|
|
|
|
|
|
usages
|
|
|
|
- Describes the ways in which this token can be used. The default
|
2020-04-07 14:14:01 +02:00
|
|
|
+ Describes the ways in wich this token can be used. The default
|
2021-01-08 13:41:50 +01:00
|
|
|
value is ['signing', 'authentication']
|
|
|
|
|
|
|
|
kubeconfig
|
|
|
|
@@ -239,7 +257,7 @@ def token_delete(token, kubeconfig=None, rootfs=None):
|
|
|
|
Delete bootstrap tokens on the server
|
|
|
|
|
|
|
|
token
|
|
|
|
- Token to write, if None one will be generated. The token must
|
2020-04-07 14:14:01 +02:00
|
|
|
+ Token to write, if None one will be gerenared. The token must
|
2021-01-08 13:41:50 +01:00
|
|
|
match a regular expression, that by default is
|
|
|
|
[a-z0-9]{6}.[a-z0-9]{16}
|
|
|
|
|
|
|
|
@@ -328,21 +346,20 @@ def token_list(kubeconfig=None, rootfs=None):
|
|
|
|
|
|
|
|
lines = _cmd(cmd).splitlines()
|
|
|
|
|
2020-04-07 14:14:01 +02:00
|
|
|
+ # Find the header and parse it. We do not need to validate the
|
|
|
|
+ # content, as the regex will take care of future changes.
|
|
|
|
+ header = lines.pop(0)
|
2021-01-08 13:41:50 +01:00
|
|
|
+ header = [i.lower() for i in re.findall(r"(\w+(?:\s\w+)*)", header)]
|
|
|
|
+
|
|
|
|
tokens = []
|
|
|
|
- if lines:
|
|
|
|
- # Find the header and parse it. We do not need to validate
|
|
|
|
- # the content, as the regex will take care of future changes.
|
|
|
|
- header = lines.pop(0)
|
|
|
|
- header = [i.lower() for i in re.findall(r"(\w+(?:\s\w+)*)", header)]
|
|
|
|
-
|
|
|
|
- for line in lines:
|
|
|
|
- # TODO(aplanas): descriptions with multiple spaces can
|
|
|
|
- # break the parser.
|
|
|
|
- values = re.findall(r"(\S+(?:\s\S+)*)", line)
|
|
|
|
- if len(header) != len(values):
|
|
|
|
- log.error("Error parsing line: {}".format(line))
|
|
|
|
- continue
|
|
|
|
- tokens.append({key: value for key, value in zip(header, values)})
|
2020-04-07 14:14:01 +02:00
|
|
|
+ for line in lines:
|
|
|
|
+ # TODO(aplanas): descriptions with multiple spaces can break
|
|
|
|
+ # the parser.
|
2021-01-08 13:41:50 +01:00
|
|
|
+ values = re.findall(r"(\S+(?:\s\S+)*)", line)
|
2020-04-07 14:14:01 +02:00
|
|
|
+ if len(header) != len(values):
|
2021-01-08 13:41:50 +01:00
|
|
|
+ log.error("Error parsing line: {}".format(line))
|
2020-04-07 14:14:01 +02:00
|
|
|
+ continue
|
|
|
|
+ tokens.append({key: value for key, value in zip(header, values)})
|
2021-01-08 13:41:50 +01:00
|
|
|
return tokens
|
2019-06-21 10:17:26 +02:00
|
|
|
|
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -869,7 +886,7 @@ def config_upload_from_flags(
|
|
|
|
flags
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
apiserver_advertise_address
|
|
|
|
- The IP address the API server will advertise it's listening on
|
|
|
|
+ The IP address the API server will adversite it's listening on
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
apiserver_bind_port
|
|
|
|
The port the API server is accessible on (default 6443)
|
|
|
|
@@ -900,11 +917,11 @@ def config_upload_from_flags(
|
|
|
|
Specify range of IP addresses for the pod network
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
service_cidr
|
|
|
|
- Use alternative range of IP address for service VIPs (default
|
|
|
|
+ Use alternative range of IP address dor service VIPs (default
|
|
|
|
"10.96.0.0/12")
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
service_dns_domain
|
|
|
|
- Use alternative domain for services (default "cluster.local")
|
|
|
|
+ Use alternative domain for serivces (default "cluster.local")
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
kubeconfig
|
|
|
|
The kubeconfig file to use when talking to the cluster. The
|
|
|
|
@@ -1004,7 +1021,7 @@ def init(
|
|
|
|
Command to set up the Kubernetes control plane
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
apiserver_advertise_address
|
|
|
|
- The IP address the API server will advertise it's listening on
|
|
|
|
+ The IP address the API server will adversite it's listening on
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
apiserver_bind_port
|
|
|
|
The port the API server is accessible on (default 6443)
|
|
|
|
@@ -1035,10 +1052,10 @@ def init(
|
|
|
|
various features
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
ignore_preflight_errors
|
|
|
|
- A list of checks whose errors will be shown as warnings
|
|
|
|
+ A list of checkt whose errors will be shown as warnings
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
image_repository
|
|
|
|
- Choose a container registry to pull control plane images from
|
|
|
|
+ Choose a container registry to pull controll plane images from
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
kubernetes_version
|
|
|
|
Choose a specifig Kubernetes version for the control plane
|
|
|
|
@@ -1051,11 +1068,11 @@ def init(
|
|
|
|
Specify range of IP addresses for the pod network
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
service_cidr
|
|
|
|
- Use alternative range of IP address for service VIPs (default
|
|
|
|
+ Use alternative range of IP address dor service VIPs (default
|
|
|
|
"10.96.0.0/12")
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
service_dns_domain
|
|
|
|
- Use alternative domain for services (default "cluster.local")
|
|
|
|
+ Use alternative domain for serivces (default "cluster.local")
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
skip_certificate_key_print
|
|
|
|
Don't print the key used to encrypt the control-plane
|
|
|
|
@@ -1190,10 +1207,10 @@ def join(
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
apiserver_advertise_address
|
|
|
|
If the node should host a new control plane instance, the IP
|
|
|
|
- address the API Server will advertise it's listening on
|
|
|
|
+ address the API Server will adversise it's listening on
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
apiserver_bind_port
|
|
|
|
- If the node should host a new control plane instance, the port
|
|
|
|
+ If the node shoult host a new control plane instance, the port
|
|
|
|
the API Server to bind to (default 6443)
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
certificate_key
|
2020-04-07 14:14:01 +02:00
|
|
|
diff --git a/salt/modules/rpm_lowpkg.py b/salt/modules/rpm_lowpkg.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index 54b7014440..393b0f453a 100644
|
2020-04-07 14:14:01 +02:00
|
|
|
--- a/salt/modules/rpm_lowpkg.py
|
|
|
|
+++ b/salt/modules/rpm_lowpkg.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1,17 +1,13 @@
|
|
|
|
-# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
|
|
Support for rpm
|
|
|
|
"""
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
-# Import python libs
|
|
|
|
-from __future__ import absolute_import, print_function, unicode_literals
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
import datetime
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
|
|
|
|
-# Import Salt libs
|
|
|
|
import salt.utils.decorators.path
|
|
|
|
import salt.utils.itertools
|
|
|
|
import salt.utils.path
|
|
|
|
@@ -105,14 +101,14 @@ def bin_pkg_info(path, saltenv="base"):
|
|
|
|
newpath = __salt__["cp.cache_file"](path, saltenv)
|
|
|
|
if not newpath:
|
2020-04-07 14:14:01 +02:00
|
|
|
raise CommandExecutionError(
|
2021-01-08 13:41:50 +01:00
|
|
|
- "Unable to retrieve {0} from saltenv '{1}'".format(path, saltenv)
|
|
|
|
+ "Unable to retrieve {} from saltenv '{}'".format(path, saltenv)
|
|
|
|
)
|
|
|
|
path = newpath
|
|
|
|
else:
|
|
|
|
if not os.path.exists(path):
|
|
|
|
- raise CommandExecutionError("{0} does not exist on minion".format(path))
|
|
|
|
+ raise CommandExecutionError("{} does not exist on minion".format(path))
|
|
|
|
elif not os.path.isabs(path):
|
|
|
|
- raise SaltInvocationError("{0} does not exist on minion".format(path))
|
|
|
|
+ raise SaltInvocationError("{} does not exist on minion".format(path))
|
|
|
|
|
|
|
|
# REPOID is not a valid tag for the rpm command. Remove it and replace it
|
|
|
|
# with 'none'
|
|
|
|
@@ -187,28 +183,26 @@ def verify(*packages, **kwargs):
|
|
|
|
ftypes = {"c": "config", "d": "doc", "g": "ghost", "l": "license", "r": "readme"}
|
|
|
|
ret = {}
|
|
|
|
ignore_types = kwargs.get("ignore_types", [])
|
|
|
|
- if not isinstance(ignore_types, (list, six.string_types)):
|
|
|
|
+ if not isinstance(ignore_types, (list, (str,))):
|
|
|
|
raise SaltInvocationError(
|
|
|
|
"ignore_types must be a list or a comma-separated string"
|
|
|
|
)
|
|
|
|
- if isinstance(ignore_types, six.string_types):
|
|
|
|
+ if isinstance(ignore_types, str):
|
|
|
|
try:
|
|
|
|
ignore_types = [x.strip() for x in ignore_types.split(",")]
|
|
|
|
except AttributeError:
|
|
|
|
- ignore_types = [x.strip() for x in six.text_type(ignore_types).split(",")]
|
|
|
|
+ ignore_types = [x.strip() for x in str(ignore_types).split(",")]
|
|
|
|
|
|
|
|
verify_options = kwargs.get("verify_options", [])
|
|
|
|
- if not isinstance(verify_options, (list, six.string_types)):
|
|
|
|
+ if not isinstance(verify_options, (list, (str,))):
|
|
|
|
raise SaltInvocationError(
|
|
|
|
"verify_options must be a list or a comma-separated string"
|
|
|
|
)
|
|
|
|
- if isinstance(verify_options, six.string_types):
|
|
|
|
+ if isinstance(verify_options, str):
|
|
|
|
try:
|
|
|
|
verify_options = [x.strip() for x in verify_options.split(",")]
|
|
|
|
except AttributeError:
|
|
|
|
- verify_options = [
|
|
|
|
- x.strip() for x in six.text_type(verify_options).split(",")
|
|
|
|
- ]
|
|
|
|
+ verify_options = [x.strip() for x in str(verify_options).split(",")]
|
|
|
|
|
|
|
|
cmd = ["rpm"]
|
|
|
|
if kwargs.get("root"):
|
|
|
|
@@ -229,7 +223,7 @@ def verify(*packages, **kwargs):
|
|
|
|
# succeeded, but if the retcode is nonzero, then the command failed.
|
|
|
|
msg = "Failed to verify package(s)"
|
|
|
|
if out["stderr"]:
|
|
|
|
- msg += ": {0}".format(out["stderr"])
|
|
|
|
+ msg += ": {}".format(out["stderr"])
|
|
|
|
raise CommandExecutionError(msg)
|
|
|
|
|
|
|
|
for line in salt.utils.itertools.split(out["stdout"], "\n"):
|
|
|
|
@@ -492,7 +486,7 @@ def diff(package_path, path):
|
|
|
|
)
|
|
|
|
res = __salt__["cmd.shell"](cmd.format(package_path, path), output_loglevel="trace")
|
|
|
|
if res and res.startswith("Binary file"):
|
|
|
|
- return "File '{0}' is binary and its content has been " "modified.".format(path)
|
|
|
|
+ return "File '{}' is binary and its content has been " "modified.".format(path)
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
return res
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -590,7 +584,7 @@ def info(*packages, **kwargs):
|
|
|
|
attr.append("edition")
|
|
|
|
query.append(attr_map["edition"])
|
|
|
|
else:
|
|
|
|
- for attr_k, attr_v in six.iteritems(attr_map):
|
|
|
|
+ for attr_k, attr_v in attr_map.items():
|
|
|
|
if attr_k != "description":
|
|
|
|
query.append(attr_v)
|
|
|
|
if attr and "description" in attr or not attr:
|
|
|
|
@@ -599,7 +593,7 @@ def info(*packages, **kwargs):
|
|
|
|
|
|
|
|
cmd = " ".join(cmd)
|
|
|
|
call = __salt__["cmd.run_all"](
|
|
|
|
- cmd + (" --queryformat '{0}'".format("".join(query))),
|
|
|
|
+ cmd + (" --queryformat '{}'".format("".join(query))),
|
|
|
|
output_loglevel="trace",
|
|
|
|
env={"TZ": "UTC"},
|
|
|
|
clean_env=True,
|
|
|
|
@@ -706,11 +700,7 @@ def version_cmp(ver1, ver2, ignore_epoch=False):
|
|
|
|
|
|
|
|
salt '*' pkg.version_cmp '0.2-001' '0.2.0.1-002'
|
|
|
|
"""
|
|
|
|
- normalize = (
|
|
|
|
- lambda x: six.text_type(x).split(":", 1)[-1]
|
|
|
|
- if ignore_epoch
|
|
|
|
- else six.text_type(x)
|
|
|
|
- )
|
|
|
|
+ normalize = lambda x: str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
|
|
|
ver1 = normalize(ver1)
|
|
|
|
ver2 = normalize(ver2)
|
|
|
|
|
|
|
|
@@ -747,7 +737,7 @@ def version_cmp(ver1, ver2, ignore_epoch=False):
|
|
|
|
# rpmdev-vercmp always uses epochs, even when zero
|
|
|
|
def _ensure_epoch(ver):
|
|
|
|
def _prepend(ver):
|
|
|
|
- return "0:{0}".format(ver)
|
|
|
|
+ return "0:{}".format(ver)
|
|
|
|
|
|
|
|
try:
|
|
|
|
if ":" not in ver:
|
|
|
|
@@ -798,7 +788,7 @@ def version_cmp(ver1, ver2, ignore_epoch=False):
|
|
|
|
cmp_result = cmp_func((ver1_e, ver1_v, ver1_r), (ver2_e, ver2_v, ver2_r))
|
|
|
|
if cmp_result not in (-1, 0, 1):
|
|
|
|
raise CommandExecutionError(
|
|
|
|
- "Comparison result '{0}' is invalid".format(cmp_result)
|
|
|
|
+ "Comparison result '{}' is invalid".format(cmp_result)
|
|
|
|
)
|
|
|
|
return cmp_result
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
diff --git a/salt/modules/systemd_service.py b/salt/modules/systemd_service.py
|
|
|
|
index 176e1dabaa..03e7268cd4 100644
|
|
|
|
--- a/salt/modules/systemd_service.py
|
|
|
|
+++ b/salt/modules/systemd_service.py
|
|
|
|
@@ -1,4 +1,3 @@
|
|
|
|
-# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
|
|
Provides the service module for systemd
|
|
|
|
|
|
|
|
@@ -15,8 +14,6 @@ Provides the service module for systemd
|
|
|
|
call it under the name 'service' and NOT 'systemd'. You can see that also
|
|
|
|
in the examples below.
|
|
|
|
"""
|
|
|
|
-# Import Python libs
|
|
|
|
-from __future__ import absolute_import, print_function, unicode_literals
|
|
|
|
|
|
|
|
import errno
|
|
|
|
import fnmatch
|
|
|
|
@@ -26,15 +23,12 @@ import os
|
|
|
|
import re
|
|
|
|
import shlex
|
|
|
|
|
|
|
|
-# Import Salt libs
|
|
|
|
import salt.utils.files
|
|
|
|
import salt.utils.itertools
|
|
|
|
import salt.utils.path
|
|
|
|
import salt.utils.stringutils
|
|
|
|
import salt.utils.systemd
|
|
|
|
from salt.exceptions import CommandExecutionError
|
|
|
|
-
|
|
|
|
-# Import 3rd-party libs
|
|
|
|
from salt.ext import six
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
@@ -94,8 +88,8 @@ def _canonical_unit_name(name):
|
|
|
|
Build a canonical unit name treating unit names without one
|
|
|
|
of the valid suffixes as a service.
|
|
|
|
"""
|
|
|
|
- if not isinstance(name, six.string_types):
|
|
|
|
- name = six.text_type(name)
|
|
|
|
+ if not isinstance(name, str):
|
|
|
|
+ name = str(name)
|
|
|
|
if any(name.endswith(suffix) for suffix in VALID_UNIT_TYPES):
|
|
|
|
return name
|
|
|
|
return "%s.service" % name
|
|
|
|
@@ -137,7 +131,7 @@ def _check_for_unit_changes(name):
|
|
|
|
Check for modified/updated unit files, and run a daemon-reload if any are
|
|
|
|
found.
|
|
|
|
"""
|
|
|
|
- contextkey = "systemd._check_for_unit_changes.{0}".format(name)
|
|
|
|
+ contextkey = "systemd._check_for_unit_changes.{}".format(name)
|
|
|
|
if contextkey not in __context__:
|
|
|
|
if _untracked_custom_unit_found(name) or _unit_file_changed(name):
|
|
|
|
systemctl_reload()
|
|
|
|
@@ -199,9 +193,7 @@ def _default_runlevel():
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
# The default runlevel can also be set via the kernel command-line.
|
|
|
|
try:
|
|
|
|
- valid_strings = set(
|
|
|
|
- ("0", "1", "2", "3", "4", "5", "6", "s", "S", "-s", "single")
|
|
|
|
- )
|
|
|
|
+ valid_strings = {"0", "1", "2", "3", "4", "5", "6", "s", "S", "-s", "single"}
|
|
|
|
with salt.utils.files.fopen("/proc/cmdline") as fp_:
|
|
|
|
for line in fp_:
|
|
|
|
line = salt.utils.stringutils.to_unicode(line)
|
|
|
|
@@ -291,7 +283,7 @@ def _get_service_exec():
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
raise CommandExecutionError(
|
|
|
|
- "Unable to find sysv service manager (tried {0})".format(
|
|
|
|
+ "Unable to find sysv service manager (tried {})".format(
|
|
|
|
", ".join(executables)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
@@ -345,7 +337,7 @@ def _systemctl_cmd(action, name=None, systemd_scope=False, no_block=False, root=
|
|
|
|
ret.append("--no-block")
|
|
|
|
if root:
|
|
|
|
ret.extend(["--root", root])
|
|
|
|
- if isinstance(action, six.string_types):
|
|
|
|
+ if isinstance(action, str):
|
|
|
|
action = shlex.split(action)
|
|
|
|
ret.extend(action)
|
|
|
|
if name is not None:
|
|
|
|
@@ -507,7 +499,7 @@ def get_enabled(root=None):
|
|
|
|
ret.add(unit_name if unit_type == "service" else fullname)
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
# Add in any sysvinit services that are enabled
|
|
|
|
- ret.update(set([x for x in _get_sysv_services(root) if _sysv_enabled(x, root)]))
|
|
|
|
+ ret.update({x for x in _get_sysv_services(root) if _sysv_enabled(x, root)})
|
|
|
|
return sorted(ret)
|
2020-04-07 14:14:01 +02:00
|
|
|
|
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -549,7 +541,7 @@ def get_disabled(root=None):
|
|
|
|
ret.add(unit_name if unit_type == "service" else fullname)
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
# Add in any sysvinit services that are disabled
|
|
|
|
- ret.update(set([x for x in _get_sysv_services(root) if not _sysv_enabled(x, root)]))
|
|
|
|
+ ret.update({x for x in _get_sysv_services(root) if not _sysv_enabled(x, root)})
|
|
|
|
return sorted(ret)
|
2020-04-07 14:14:01 +02:00
|
|
|
|
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py
|
|
|
|
index dfaaf420a1..75cb5ce4a8 100644
|
|
|
|
--- a/salt/modules/zypperpkg.py
|
|
|
|
+++ b/salt/modules/zypperpkg.py
|
|
|
|
@@ -879,6 +879,7 @@ def list_pkgs(versions_as_list=False, root=None, includes=None, **kwargs):
|
|
|
|
# inclusion types are passed
|
|
|
|
contextkey = "pkg.list_pkgs_{}_{}".format(root, includes)
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
+ # TODO(aplanas): this cached value depends on the parameters
|
|
|
|
if contextkey not in __context__:
|
|
|
|
ret = {}
|
|
|
|
cmd = ["rpm"]
|
|
|
|
@@ -958,6 +959,28 @@ def list_pkgs(versions_as_list=False, root=None, includes=None, **kwargs):
|
|
|
|
}
|
|
|
|
]
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
+ for include in includes:
|
|
|
|
+ if include in ("pattern", "patch"):
|
|
|
|
+ if include == "pattern":
|
|
|
|
+ pkgs = list_installed_patterns(root=root)
|
|
|
|
+ elif include == "patch":
|
|
|
|
+ pkgs = list_installed_patches(root=root)
|
|
|
|
+ else:
|
|
|
|
+ pkgs = []
|
|
|
|
+ for pkg in pkgs:
|
|
|
|
+ pkg_extended_name = "{}:{}".format(include, pkg)
|
|
|
|
+ info = info_available(pkg_extended_name, refresh=False, root=root)
|
|
|
|
+ _ret[pkg_extended_name] = [
|
|
|
|
+ {
|
|
|
|
+ "epoch": None,
|
|
|
|
+ "version": info[pkg]["version"],
|
|
|
|
+ "release": None,
|
|
|
|
+ "arch": info[pkg]["arch"],
|
|
|
|
+ "install_date": None,
|
|
|
|
+ "install_date_time_t": None,
|
|
|
|
+ }
|
|
|
|
+ ]
|
2020-04-07 14:14:01 +02:00
|
|
|
+
|
2021-01-08 13:41:50 +01:00
|
|
|
__context__[contextkey] = _ret
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
return __salt__["pkg_resource.format_pkg_list"](
|
|
|
|
@@ -1401,7 +1424,9 @@ def refresh_db(force=None, root=None):
|
2019-06-21 10:17:26 +02:00
|
|
|
|
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
def _find_types(pkgs):
|
|
|
|
- """Form a package names list, find prefixes of packages types."""
|
|
|
|
+ """
|
|
|
|
+ Form a package names list, find prefixes of packages types.
|
|
|
|
+ """
|
|
|
|
return sorted({pkg.split(":", 1)[0] for pkg in pkgs if len(pkg.split(":", 1)) == 2})
|
2019-06-21 10:17:26 +02:00
|
|
|
|
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1596,12 +1621,7 @@ def install(
|
|
|
|
'Advisory id "{}" not found'.format(advisory_id)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
- # If we add here the `patch:` prefix, the
|
|
|
|
- # `_find_types` helper will take the patches into the
|
|
|
|
- # list of packages. Usually this is the correct thing
|
|
|
|
- # to do, but we can break software the depends on the
|
|
|
|
- # old behaviour.
|
|
|
|
- targets.append(advisory_id)
|
|
|
|
+ targets.append("patch:{}".format(advisory_id))
|
|
|
|
else:
|
|
|
|
targets = pkg_params
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1639,16 +1659,6 @@ def install(
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
errors = []
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
- # If the type is 'advisory', we manually add the 'patch:'
|
|
|
|
- # prefix. This kind of package will not appear in pkg_list in this
|
|
|
|
- # way.
|
|
|
|
- #
|
|
|
|
- # Note that this enable a different mechanism to install a patch;
|
|
|
|
- # if the name of the package is already prefixed with 'patch:' we
|
|
|
|
- # can avoid listing them in the `advisory_ids` field.
|
|
|
|
- if pkg_type == "advisory":
|
|
|
|
- targets = ["patch:{}".format(t) for t in targets]
|
|
|
|
-
|
|
|
|
# Split the targets into batches of 500 packages each, so that
|
|
|
|
# the maximal length of the command line is not broken
|
|
|
|
systemd_scope = _systemd_scope()
|
|
|
|
@@ -1805,6 +1815,10 @@ def upgrade(
|
|
|
|
cmd_update.append("--no-recommends")
|
|
|
|
log.info("Disabling recommendations")
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
+ if no_recommends:
|
|
|
|
+ cmd_update.append("--no-recommends")
|
|
|
|
+ log.info("Disabling recommendations")
|
2020-04-07 14:14:01 +02:00
|
|
|
+
|
2021-01-08 13:41:50 +01:00
|
|
|
if dryrun:
|
|
|
|
# Creates a solver test case for debugging.
|
|
|
|
log.info("Executing debugsolver and performing a dry-run dist-upgrade")
|
|
|
|
@@ -2035,13 +2049,13 @@ def list_locks(root=None):
|
|
|
|
for element in [el for el in meta if el]:
|
|
|
|
if ":" in element:
|
|
|
|
lock.update(
|
|
|
|
- dict([tuple([i.strip() for i in element.split(":", 1)])])
|
|
|
|
+ dict([tuple([i.strip() for i in element.split(":", 1)]),])
|
|
|
|
)
|
|
|
|
if lock.get("solvable_name"):
|
|
|
|
locks[lock.pop("solvable_name")] = lock
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
- except Exception: # pylint: disable=broad-except
|
|
|
|
+ except Exception:
|
|
|
|
log.warning("Detected a problem when accessing {}".format(_locks))
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
return locks
|
|
|
|
@@ -2092,12 +2106,13 @@ def unhold(name=None, pkgs=None, **kwargs):
|
|
|
|
salt '*' pkg.remove_lock pkgs='["foo", "bar"]'
|
|
|
|
"""
|
2020-04-07 14:14:01 +02:00
|
|
|
ret = {}
|
2021-01-08 13:41:50 +01:00
|
|
|
+ root = kwargs.get("root")
|
|
|
|
if (not name and not pkgs) or (name and pkgs):
|
|
|
|
raise CommandExecutionError("Name or packages must be specified.")
|
|
|
|
elif name:
|
|
|
|
pkgs = [name]
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
- locks = list_locks()
|
|
|
|
+ locks = list_locks(root)
|
|
|
|
try:
|
|
|
|
pkgs = list(__salt__["pkg_resource.parse_targets"](pkgs)[0].keys())
|
|
|
|
except MinionError as exc:
|
|
|
|
@@ -2114,15 +2129,18 @@ def unhold(name=None, pkgs=None, **kwargs):
|
|
|
|
ret[pkg]["comment"] = "Package {} unable to be unheld.".format(pkg)
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
if removed:
|
|
|
|
- __zypper__.call("rl", *removed)
|
|
|
|
+ __zypper__(root=root).call("rl", *removed)
|
2020-04-07 14:14:01 +02:00
|
|
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
-def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
|
|
|
|
+def remove_lock(packages, root=None, **kwargs): # pylint: disable=unused-argument
|
|
|
|
"""
|
|
|
|
Remove specified package lock.
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2020-04-07 14:14:01 +02:00
|
|
|
+ root
|
|
|
|
+ operate on a different root directory.
|
|
|
|
+
|
2021-01-08 13:41:50 +01:00
|
|
|
CLI Example:
|
2020-04-07 14:14:01 +02:00
|
|
|
|
|
|
|
.. code-block:: bash
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -2134,7 +2152,7 @@ def remove_lock(packages, **kwargs): # pylint: disable=unused-argument
|
|
|
|
salt.utils.versions.warn_until(
|
|
|
|
"Sodium", "This function is deprecated. Please use unhold() instead."
|
|
|
|
)
|
|
|
|
- locks = list_locks()
|
|
|
|
+ locks = list_locks(root)
|
|
|
|
try:
|
|
|
|
packages = list(__salt__["pkg_resource.parse_targets"](packages)[0].keys())
|
|
|
|
except MinionError as exc:
|
|
|
|
@@ -2158,6 +2176,9 @@ def hold(name=None, pkgs=None, **kwargs):
|
|
|
|
"""
|
|
|
|
Add a package lock. Specify packages to lock by exact name.
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2020-04-07 14:14:01 +02:00
|
|
|
+ root
|
|
|
|
+ operate on a different root directory.
|
|
|
|
+
|
2021-01-08 13:41:50 +01:00
|
|
|
CLI Example:
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2020-04-07 14:14:01 +02:00
|
|
|
.. code-block:: bash
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -2172,12 +2193,13 @@ def hold(name=None, pkgs=None, **kwargs):
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
ret = {}
|
|
|
|
+ root = kwargs.get("root")
|
|
|
|
if (not name and not pkgs) or (name and pkgs):
|
|
|
|
raise CommandExecutionError("Name or packages must be specified.")
|
|
|
|
elif name:
|
|
|
|
pkgs = [name]
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
- locks = list_locks()
|
|
|
|
+ locks = list_locks(root=root)
|
|
|
|
added = []
|
|
|
|
try:
|
|
|
|
pkgs = list(__salt__["pkg_resource.parse_targets"](pkgs)[0].keys())
|
|
|
|
@@ -2193,12 +2215,12 @@ def hold(name=None, pkgs=None, **kwargs):
|
|
|
|
ret[pkg]["comment"] = "Package {} is already set to be held.".format(pkg)
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
if added:
|
|
|
|
- __zypper__.call("al", *added)
|
|
|
|
+ __zypper__(root=root).call("al", *added)
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
return ret
|
2019-06-21 10:17:26 +02:00
|
|
|
|
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
-def add_lock(packages, **kwargs): # pylint: disable=unused-argument
|
|
|
|
+def add_lock(packages, root=None, **kwargs): # pylint: disable=unused-argument
|
|
|
|
"""
|
|
|
|
Add a package lock. Specify packages to lock by exact name.
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -2216,7 +2238,7 @@ def add_lock(packages, **kwargs): # pylint: disable=unused-argument
|
|
|
|
salt.utils.versions.warn_until(
|
|
|
|
"Sodium", "This function is deprecated. Please use hold() instead."
|
|
|
|
)
|
|
|
|
- locks = list_locks()
|
|
|
|
+ locks = list_locks(root)
|
|
|
|
added = []
|
|
|
|
try:
|
|
|
|
packages = list(__salt__["pkg_resource.parse_targets"](packages)[0].keys())
|
|
|
|
@@ -2410,14 +2432,11 @@ def _get_installed_patterns(root=None):
|
|
|
|
# a real error.
|
|
|
|
output = __salt__["cmd.run"](cmd, ignore_retcode=True)
|
|
|
|
|
|
|
|
- # On <= SLE12SP4 we have patterns that have multiple names (alias)
|
|
|
|
- # and that are duplicated. The alias start with ".", so we filter
|
|
|
|
- # them.
|
|
|
|
- installed_patterns = {
|
|
|
|
+ installed_patterns = [
|
|
|
|
_pattern_name(line)
|
|
|
|
for line in output.splitlines()
|
|
|
|
- if line.startswith("pattern() = ") and not _pattern_name(line).startswith(".")
|
|
|
|
- }
|
|
|
|
+ if line.startswith("pattern() = ")
|
|
|
|
+ ]
|
|
|
|
|
|
|
|
patterns = {
|
|
|
|
k: v for k, v in _get_visible_patterns(root=root).items() if v["installed"]
|
|
|
|
@@ -2735,7 +2754,7 @@ def download(*packages, **kwargs):
|
|
|
|
)
|
2019-06-21 10:17:26 +02:00
|
|
|
|
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
-def list_downloaded(root=None, **kwargs):
|
|
|
|
+def list_downloaded(root=None):
|
|
|
|
"""
|
|
|
|
.. versionadded:: 2017.7.0
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2020-04-07 14:14:01 +02:00
|
|
|
diff --git a/salt/states/btrfs.py b/salt/states/btrfs.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index ec84d862c3..1374bbffb4 100644
|
|
|
|
--- a/salt/states/btrfs.py
|
2020-04-07 14:14:01 +02:00
|
|
|
+++ b/salt/states/btrfs.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1,10 +1,31 @@
|
2020-04-07 14:14:01 +02:00
|
|
|
+#
|
|
|
|
+# Author: Alberto Planas <aplanas@suse.com>
|
|
|
|
+#
|
|
|
|
+# Copyright 2018 SUSE LINUX GmbH, Nuernberg, Germany.
|
|
|
|
+#
|
|
|
|
+# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
+# or more contributor license agreements. See the NOTICE file
|
|
|
|
+# distributed with this work for additional information
|
|
|
|
+# regarding copyright ownership. The ASF licenses this file
|
|
|
|
+# to you under the Apache License, Version 2.0 (the
|
|
|
|
+# "License"); you may not use this file except in compliance
|
|
|
|
+# with the License. You may obtain a copy of the License at
|
|
|
|
+#
|
|
|
|
+# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
+#
|
|
|
|
+# Unless required by applicable law or agreed to in writing,
|
|
|
|
+# software distributed under the License is distributed on an
|
|
|
|
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
+# KIND, either express or implied. See the License for the
|
|
|
|
+# specific language governing permissions and limitations
|
|
|
|
+# under the License.
|
2019-06-21 10:17:26 +02:00
|
|
|
+
|
2021-01-08 13:41:50 +01:00
|
|
|
"""
|
|
|
|
:maintainer: Alberto Planas <aplanas@suse.com>
|
|
|
|
:maturity: new
|
|
|
|
:depends: None
|
|
|
|
:platform: Linux
|
|
|
|
"""
|
|
|
|
-
|
|
|
|
import functools
|
|
|
|
import logging
|
|
|
|
import os.path
|
|
|
|
@@ -22,7 +43,7 @@ def _mount(device, use_default):
|
|
|
|
"""
|
|
|
|
Mount the device in a temporary place.
|
|
|
|
"""
|
|
|
|
- opts = "defaults" if use_default else "subvol=/"
|
|
|
|
+ opts = "subvol=/" if not use_default else "defaults"
|
|
|
|
dest = tempfile.mkdtemp()
|
|
|
|
res = __states__["mount.mounted"](
|
|
|
|
dest, device=device, fstype="btrfs", opts=opts, persist=False
|
|
|
|
@@ -82,8 +103,8 @@ def __mount_device(action):
|
|
|
|
|
|
|
|
@functools.wraps(action)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
- name = kwargs.get("name", args[0] if args else None)
|
|
|
|
- device = kwargs.get("device", args[1] if len(args) > 1 else None)
|
|
|
|
+ name = kwargs["name"]
|
|
|
|
+ device = kwargs["device"]
|
|
|
|
use_default = kwargs.get("use_default", False)
|
|
|
|
|
|
|
|
ret = {
|
|
|
|
@@ -100,9 +121,10 @@ def __mount_device(action):
|
|
|
|
ret["comment"].append(msg)
|
|
|
|
kwargs["__dest"] = dest
|
|
|
|
ret = action(*args, **kwargs)
|
|
|
|
- except Exception as e: # pylint: disable=broad-except
|
|
|
|
- log.error("""Traceback: {}""".format(traceback.format_exc()))
|
|
|
|
- ret["comment"].append(e)
|
2020-04-07 14:14:01 +02:00
|
|
|
+ except Exception:
|
2021-01-08 13:41:50 +01:00
|
|
|
+ tb = str(traceback.format_exc())
|
|
|
|
+ log.exception("Exception captured in wrapper %s", tb)
|
|
|
|
+ ret["comment"].append(tb)
|
|
|
|
finally:
|
|
|
|
if device:
|
|
|
|
_umount(dest)
|
|
|
|
@@ -165,7 +187,7 @@ def subvolume_created(
|
|
|
|
if __opts__["test"]:
|
|
|
|
ret["result"] = None
|
|
|
|
if not exists:
|
|
|
|
- ret["changes"][name] = "Subvolume {} will be created".format(name)
|
|
|
|
+ ret["comment"].append("Subvolume {} will be created".format(name))
|
|
|
|
return ret
|
|
|
|
|
|
|
|
if not exists:
|
|
|
|
@@ -231,7 +253,7 @@ def subvolume_deleted(name, device, commit=False, __dest=None):
|
|
|
|
if __opts__["test"]:
|
|
|
|
ret["result"] = None
|
|
|
|
if exists:
|
|
|
|
- ret["changes"][name] = "Subvolume {} will be removed".format(name)
|
|
|
|
+ ret["comment"].append("Subvolume {} will be removed".format(name))
|
|
|
|
return ret
|
|
|
|
|
|
|
|
# If commit is set, we wait until all is over
|
|
|
|
@@ -344,10 +366,10 @@ def properties(name, device, use_default=False, __dest=None, **properties):
|
|
|
|
if __opts__["test"]:
|
|
|
|
ret["result"] = None
|
|
|
|
if properties_to_set:
|
|
|
|
- ret["changes"] = properties_to_set
|
|
|
|
+ msg = "Properties {} will be changed in {}".format(properties_to_set, name)
|
|
|
|
else:
|
|
|
|
msg = "No properties will be changed in {}".format(name)
|
|
|
|
- ret["comment"].append(msg)
|
|
|
|
+ ret["comment"].append(msg)
|
|
|
|
return ret
|
|
|
|
|
|
|
|
if properties_to_set:
|
2020-04-07 14:14:01 +02:00
|
|
|
diff --git a/salt/states/file.py b/salt/states/file.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index 9873f8dcc7..9e24e389d8 100644
|
2020-04-07 14:14:01 +02:00
|
|
|
--- a/salt/states/file.py
|
|
|
|
+++ b/salt/states/file.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -290,7 +290,6 @@ import sys
|
2020-04-07 14:14:01 +02:00
|
|
|
import time
|
|
|
|
import traceback
|
2021-01-08 13:41:50 +01:00
|
|
|
from collections import defaultdict
|
|
|
|
-from collections.abc import Iterable, Mapping
|
|
|
|
from datetime import date, datetime # python3 problem in the making?
|
|
|
|
|
|
|
|
import salt.loader
|
|
|
|
@@ -312,6 +311,12 @@ from salt.ext.six.moves.urllib.parse import urlparse as _urlparse
|
|
|
|
from salt.serializers import DeserializationError
|
|
|
|
from salt.state import get_accumulator_dir as _get_accumulator_dir
|
|
|
|
|
2020-04-07 14:14:01 +02:00
|
|
|
+try:
|
|
|
|
+ from collections.abc import Iterable, Mapping
|
|
|
|
+except ImportError:
|
|
|
|
+ from collections import Iterable, Mapping
|
2021-01-08 13:41:50 +01:00
|
|
|
+
|
|
|
|
+
|
|
|
|
if salt.utils.platform.is_windows():
|
|
|
|
import salt.utils.win_dacl
|
|
|
|
import salt.utils.win_functions
|
2020-04-07 14:14:01 +02:00
|
|
|
diff --git a/salt/states/loop.py b/salt/states/loop.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index 25e54e1faf..de37b7d60c 100644
|
2020-04-07 14:14:01 +02:00
|
|
|
--- a/salt/states/loop.py
|
|
|
|
+++ b/salt/states/loop.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1,4 +1,3 @@
|
|
|
|
-# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
|
|
Loop state
|
|
|
|
|
|
|
|
@@ -58,8 +57,6 @@ The function :py:func:`data.subdict_match <salt.utils.data.subdict_match>` check
|
|
|
|
instances: "{{ instance }}"
|
|
|
|
"""
|
|
|
|
|
|
|
|
-# Import python libs
|
|
|
|
-from __future__ import absolute_import, print_function, unicode_literals
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import operator
|
|
|
|
@@ -99,7 +96,7 @@ def until(name, m_args=None, m_kwargs=None, condition=None, period=1, timeout=60
|
|
|
|
m_kwargs = {}
|
|
|
|
|
|
|
|
if name not in __salt__:
|
|
|
|
- ret["comment"] = "Cannot find module {0}".format(name)
|
|
|
|
+ ret["comment"] = "Cannot find module {}".format(name)
|
|
|
|
elif condition is None:
|
|
|
|
ret["comment"] = "An exit condition must be specified"
|
|
|
|
elif not isinstance(period, (int, float)):
|
|
|
|
@@ -107,7 +104,7 @@ def until(name, m_args=None, m_kwargs=None, condition=None, period=1, timeout=60
|
|
|
|
elif not isinstance(timeout, (int, float)):
|
|
|
|
ret["comment"] = "Timeout must be specified as a float in seconds"
|
|
|
|
elif __opts__["test"]:
|
|
|
|
- ret["comment"] = "The execution module {0} will be run".format(name)
|
|
|
|
+ ret["comment"] = "The execution module {} will be run".format(name)
|
|
|
|
ret["result"] = None
|
2020-04-07 14:14:01 +02:00
|
|
|
else:
|
2021-01-08 13:41:50 +01:00
|
|
|
if m_args is None:
|
|
|
|
@@ -120,11 +117,11 @@ def until(name, m_args=None, m_kwargs=None, condition=None, period=1, timeout=60
|
|
|
|
m_ret = __salt__[name](*m_args, **m_kwargs)
|
|
|
|
if eval(condition): # pylint: disable=W0123
|
|
|
|
ret["result"] = True
|
|
|
|
- ret["comment"] = "Condition {0} was met".format(condition)
|
|
|
|
+ ret["comment"] = "Condition {} was met".format(condition)
|
|
|
|
break
|
|
|
|
time.sleep(period)
|
2020-04-07 14:14:01 +02:00
|
|
|
else:
|
2021-01-08 13:41:50 +01:00
|
|
|
- ret["comment"] = "Timed out while waiting for condition {0}".format(
|
|
|
|
+ ret["comment"] = "Timed out while waiting for condition {}".format(
|
|
|
|
condition
|
|
|
|
)
|
|
|
|
return ret
|
|
|
|
@@ -185,6 +182,10 @@ def until_no_eval(
|
|
|
|
)
|
|
|
|
if ret["comment"]:
|
2020-04-07 14:14:01 +02:00
|
|
|
return ret
|
2021-01-08 13:41:50 +01:00
|
|
|
+ if not m_args:
|
|
|
|
+ m_args = []
|
|
|
|
+ if not m_kwargs:
|
|
|
|
+ m_kwargs = {}
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
if init_wait:
|
|
|
|
time.sleep(init_wait)
|
2020-04-07 14:14:01 +02:00
|
|
|
diff --git a/salt/states/pkgrepo.py b/salt/states/pkgrepo.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index 99440874c2..70cb7a1c7e 100644
|
2020-04-07 14:14:01 +02:00
|
|
|
--- a/salt/states/pkgrepo.py
|
|
|
|
+++ b/salt/states/pkgrepo.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -92,7 +92,6 @@ package managers are APT, DNF, YUM and Zypper. Here is some example SLS:
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
-# Import Python libs
|
|
|
|
|
|
|
|
import sys
|
|
|
|
|
|
|
|
@@ -101,11 +100,7 @@ import salt.utils.files
|
|
|
|
import salt.utils.pkg.deb
|
|
|
|
import salt.utils.pkg.rpm
|
|
|
|
import salt.utils.versions
|
|
|
|
-
|
|
|
|
-# Import salt libs
|
|
|
|
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
|
|
|
-
|
|
|
|
-# Import 3rd-party libs
|
|
|
|
from salt.state import STATE_INTERNAL_KEYWORDS as _STATE_INTERNAL_KEYWORDS
|
2020-04-07 14:14:01 +02:00
|
|
|
|
|
|
|
|
|
|
|
diff --git a/salt/utils/oset.py b/salt/utils/oset.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index d6fb961ede..31a6a4acca 100644
|
2020-04-07 14:14:01 +02:00
|
|
|
--- a/salt/utils/oset.py
|
|
|
|
+++ b/salt/utils/oset.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1,4 +1,3 @@
|
|
|
|
-# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
|
|
|
|
|
|
Available at repository https://github.com/LuminosoInsight/ordered-set
|
|
|
|
@@ -21,9 +20,10 @@ Rob Speer's changes are as follows:
|
|
|
|
- added a __getstate__ and __setstate__ so it can be pickled
|
2020-04-07 14:14:01 +02:00
|
|
|
- added __getitem__
|
2021-01-08 13:41:50 +01:00
|
|
|
"""
|
|
|
|
-from __future__ import absolute_import, print_function, unicode_literals
|
|
|
|
-
|
|
|
|
-from collections.abc import MutableSet
|
2020-04-07 14:14:01 +02:00
|
|
|
+try:
|
|
|
|
+ from collections.abc import MutableSet
|
|
|
|
+except ImportError:
|
|
|
|
+ from collections import MutableSet
|
|
|
|
|
|
|
|
SLICE_ALL = slice(None)
|
2021-01-08 13:41:50 +01:00
|
|
|
__version__ = "2.0.1"
|
2019-06-21 10:17:26 +02:00
|
|
|
diff --git a/tests/unit/modules/test_kubeadm.py b/tests/unit/modules/test_kubeadm.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index af319e01b1..91e4a9e68e 100644
|
|
|
|
--- a/tests/unit/modules/test_kubeadm.py
|
2019-06-21 10:17:26 +02:00
|
|
|
+++ b/tests/unit/modules/test_kubeadm.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1,20 +1,41 @@
|
2019-06-21 10:17:26 +02:00
|
|
|
+#
|
|
|
|
+# Author: Alberto Planas <aplanas@suse.com>
|
|
|
|
+#
|
|
|
|
+# Copyright 2019 SUSE LINUX GmbH, Nuernberg, Germany.
|
|
|
|
+#
|
|
|
|
+# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
+# or more contributor license agreements. See the NOTICE file
|
|
|
|
+# distributed with this work for additional information
|
|
|
|
+# regarding copyright ownership. The ASF licenses this file
|
|
|
|
+# to you under the Apache License, Version 2.0 (the
|
|
|
|
+# "License"); you may not use this file except in compliance
|
|
|
|
+# with the License. You may obtain a copy of the License at
|
|
|
|
+#
|
|
|
|
+# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
+#
|
|
|
|
+# Unless required by applicable law or agreed to in writing,
|
|
|
|
+# software distributed under the License is distributed on an
|
|
|
|
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
+# KIND, either express or implied. See the License for the
|
|
|
|
+# specific language governing permissions and limitations
|
|
|
|
+# under the License.
|
|
|
|
+
|
2021-01-08 13:41:50 +01:00
|
|
|
import pytest
|
|
|
|
import salt.modules.kubeadm as kubeadm
|
|
|
|
from salt.exceptions import CommandExecutionError
|
|
|
|
-
|
|
|
|
-# Import Salt Testing Libs
|
|
|
|
from tests.support.mixins import LoaderModuleMockMixin
|
|
|
|
-from tests.support.mock import MagicMock, patch
|
|
|
|
-from tests.support.unit import TestCase
|
|
|
|
+from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
|
2019-06-21 10:17:26 +02:00
|
|
|
+from tests.support.unit import TestCase, skipIf
|
2021-01-08 13:41:50 +01:00
|
|
|
|
|
|
|
|
2019-06-21 10:17:26 +02:00
|
|
|
+@skipIf(NO_MOCK, NO_MOCK_REASON)
|
2021-01-08 13:41:50 +01:00
|
|
|
class KubeAdmTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
"""
|
|
|
|
Test cases for salt.modules.kubeadm
|
|
|
|
"""
|
|
|
|
|
|
|
|
def setup_loader_modules(self):
|
|
|
|
- return {kubeadm: {"__salt__": {}, "__utils__": {}}}
|
|
|
|
+ return {kubeadm: {"__salt__": {}, "__utils__": {},}}
|
|
|
|
|
|
|
|
def test_version(self):
|
|
|
|
"""
|
|
|
|
@@ -223,18 +244,6 @@ class KubeAdmTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
with pytest.raises(CommandExecutionError):
|
|
|
|
assert kubeadm.token_generate()
|
|
|
|
|
|
|
|
- def test_token_empty(self):
|
|
|
|
- """
|
|
|
|
- Test kuebadm.token_list when no outout
|
|
|
|
- """
|
|
|
|
- result = {"retcode": 0, "stdout": ""}
|
|
|
|
- salt_mock = {
|
|
|
|
- "cmd.run_all": MagicMock(return_value=result),
|
|
|
|
- }
|
|
|
|
- with patch.dict(kubeadm.__salt__, salt_mock):
|
|
|
|
- assert kubeadm.token_list() == []
|
|
|
|
- salt_mock["cmd.run_all"].assert_called_with(["kubeadm", "token", "list"])
|
|
|
|
-
|
|
|
|
def test_token_list(self):
|
|
|
|
"""
|
|
|
|
Test kuebadm.token_list without parameters
|
2019-06-21 10:17:26 +02:00
|
|
|
diff --git a/tests/unit/modules/test_rpm_lowpkg.py b/tests/unit/modules/test_rpm_lowpkg.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index e7e8230510..b41e8daf17 100644
|
2019-06-21 10:17:26 +02:00
|
|
|
--- a/tests/unit/modules/test_rpm_lowpkg.py
|
|
|
|
+++ b/tests/unit/modules/test_rpm_lowpkg.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1,15 +1,9 @@
|
|
|
|
-# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
|
|
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
|
|
|
"""
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
-# Import Python Libs
|
|
|
|
-from __future__ import absolute_import
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
-# Import Salt Libs
|
|
|
|
import salt.modules.rpm_lowpkg as rpm
|
|
|
|
-
|
|
|
|
-# Import Salt Testing Libs
|
|
|
|
from tests.support.mixins import LoaderModuleMockMixin
|
|
|
|
from tests.support.mock import MagicMock, patch
|
|
|
|
from tests.support.unit import TestCase
|
|
|
|
@@ -108,6 +102,15 @@ class RpmTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
self.assertDictEqual(rpm.file_dict("httpd"), {"errors": [], "packages": {}})
|
|
|
|
self.assertFalse(_called_with_root(mock))
|
2020-04-07 14:14:01 +02:00
|
|
|
|
2019-06-21 10:17:26 +02:00
|
|
|
+ def test_file_dict_root(self):
|
2021-01-08 13:41:50 +01:00
|
|
|
+ """
|
2019-06-21 10:17:26 +02:00
|
|
|
+ Test if it list the files that belong to a package
|
2021-01-08 13:41:50 +01:00
|
|
|
+ """
|
|
|
|
+ mock = MagicMock(return_value="")
|
|
|
|
+ with patch.dict(rpm.__salt__, {"cmd.run": mock}):
|
|
|
|
+ self.assertDictEqual(rpm.file_dict("httpd"), {"errors": [], "packages": {}})
|
2019-06-21 10:17:26 +02:00
|
|
|
+ self.assertFalse(_called_with_root(mock))
|
2020-04-07 14:14:01 +02:00
|
|
|
+
|
2021-01-08 13:41:50 +01:00
|
|
|
def test_file_dict_root(self):
|
|
|
|
"""
|
|
|
|
Test if it list the files that belong to a package
|
2019-06-21 10:17:26 +02:00
|
|
|
diff --git a/tests/unit/modules/test_systemd_service.py b/tests/unit/modules/test_systemd_service.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index 32741969ce..bbd89bb3d0 100644
|
2019-06-21 10:17:26 +02:00
|
|
|
--- a/tests/unit/modules/test_systemd_service.py
|
|
|
|
+++ b/tests/unit/modules/test_systemd_service.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1,23 +1,16 @@
|
|
|
|
-# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
|
|
:codeauthor: Rahul Handay <rahulha@saltstack.com>
|
|
|
|
"""
|
|
|
|
|
|
|
|
-# Import Python libs
|
|
|
|
-from __future__ import absolute_import, print_function, unicode_literals
|
|
|
|
|
2019-06-21 10:17:26 +02:00
|
|
|
import os
|
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
import pytest
|
|
|
|
-
|
|
|
|
-# Import Salt Libs
|
|
|
|
import salt.modules.systemd_service as systemd
|
|
|
|
import salt.utils.systemd
|
|
|
|
from salt.exceptions import CommandExecutionError
|
2019-06-21 10:17:26 +02:00
|
|
|
from tests.support.mixins import LoaderModuleMockMixin
|
2021-01-08 13:41:50 +01:00
|
|
|
-from tests.support.mock import MagicMock, patch
|
|
|
|
-
|
|
|
|
-# Import Salt Testing Libs
|
|
|
|
+from tests.support.mock import MagicMock, Mock, patch
|
2020-04-07 14:14:01 +02:00
|
|
|
from tests.support.unit import TestCase
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
_SYSTEMCTL_STATUS = {
|
|
|
|
@@ -93,7 +86,7 @@ class SystemdTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
cmd_mock = MagicMock(return_value=_LIST_UNIT_FILES)
|
|
|
|
listdir_mock = MagicMock(return_value=["foo", "bar", "baz", "README"])
|
|
|
|
sd_mock = MagicMock(
|
|
|
|
- return_value=set([x.replace(".service", "") for x in _SYSTEMCTL_STATUS])
|
|
|
|
+ return_value={x.replace(".service", "") for x in _SYSTEMCTL_STATUS}
|
|
|
|
)
|
|
|
|
access_mock = MagicMock(
|
|
|
|
side_effect=lambda x, y: x
|
|
|
|
@@ -124,7 +117,7 @@ class SystemdTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
# only 'baz' will be considered an enabled sysv service).
|
|
|
|
listdir_mock = MagicMock(return_value=["foo", "bar", "baz", "README"])
|
|
|
|
sd_mock = MagicMock(
|
|
|
|
- return_value=set([x.replace(".service", "") for x in _SYSTEMCTL_STATUS])
|
|
|
|
+ return_value={x.replace(".service", "") for x in _SYSTEMCTL_STATUS}
|
|
|
|
)
|
|
|
|
access_mock = MagicMock(
|
|
|
|
side_effect=lambda x, y: x
|
2019-06-21 10:17:26 +02:00
|
|
|
diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index b07f9a3af7..032785395e 100644
|
2019-06-21 10:17:26 +02:00
|
|
|
--- a/tests/unit/modules/test_zypperpkg.py
|
|
|
|
+++ b/tests/unit/modules/test_zypperpkg.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1639,6 +1639,9 @@ Repository 'DUMMY' not found by its alias, number, or URI.
|
|
|
|
self.assertTrue(
|
|
|
|
zypper.__zypper__(root=None).refreshable.xml.call.call_count == 0
|
2019-06-21 10:17:26 +02:00
|
|
|
)
|
2021-01-08 13:41:50 +01:00
|
|
|
+ self.assertTrue(
|
|
|
|
+ zypper.__zypper__(root=None).refreshable.xml.call.call_count == 0
|
|
|
|
+ )
|
2019-06-21 10:17:26 +02:00
|
|
|
|
|
|
|
def test_repo_noadd_nomod_ref(self):
|
2021-01-08 13:41:50 +01:00
|
|
|
"""
|
|
|
|
@@ -1919,8 +1922,8 @@ Repository 'DUMMY' not found by its alias, number, or URI.
|
|
|
|
def test__get_installed_patterns(self, get_visible_patterns):
|
|
|
|
"""Test installed patterns in the system"""
|
|
|
|
get_visible_patterns.return_value = {
|
|
|
|
- "package-a": {"installed": True, "summary": "description a"},
|
|
|
|
- "package-b": {"installed": False, "summary": "description b"},
|
|
|
|
+ "package-a": {"installed": True, "summary": "description a",},
|
|
|
|
+ "package-b": {"installed": False, "summary": "description b",},
|
|
|
|
}
|
|
|
|
|
|
|
|
salt_mock = {
|
|
|
|
@@ -1932,59 +1935,18 @@ pattern() = package-c"""
|
|
|
|
}
|
|
|
|
with patch.dict("salt.modules.zypperpkg.__salt__", salt_mock):
|
|
|
|
assert zypper._get_installed_patterns() == {
|
|
|
|
- "package-a": {"installed": True, "summary": "description a"},
|
|
|
|
- "package-c": {"installed": True, "summary": "Non-visible pattern"},
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- @patch("salt.modules.zypperpkg._get_visible_patterns")
|
|
|
|
- def test__get_installed_patterns_with_alias(self, get_visible_patterns):
|
|
|
|
- """Test installed patterns in the system if they have alias"""
|
|
|
|
- get_visible_patterns.return_value = {
|
|
|
|
- "package-a": {"installed": True, "summary": "description a"},
|
|
|
|
- "package-b": {"installed": False, "summary": "description b"},
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- salt_mock = {
|
|
|
|
- "cmd.run": MagicMock(
|
|
|
|
- return_value="""pattern() = .package-a-alias
|
|
|
|
-pattern() = package-a
|
|
|
|
-pattern-visible()
|
|
|
|
-pattern() = package-c"""
|
|
|
|
- ),
|
|
|
|
- }
|
|
|
|
- with patch.dict("salt.modules.zypperpkg.__salt__", salt_mock):
|
|
|
|
- assert zypper._get_installed_patterns() == {
|
|
|
|
- "package-a": {"installed": True, "summary": "description a"},
|
|
|
|
- "package-c": {"installed": True, "summary": "Non-visible pattern"},
|
|
|
|
+ "package-a": {"installed": True, "summary": "description a",},
|
|
|
|
+ "package-c": {"installed": True, "summary": "Non-visible pattern",},
|
|
|
|
}
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
@patch("salt.modules.zypperpkg._get_visible_patterns")
|
|
|
|
def test_list_patterns(self, get_visible_patterns):
|
|
|
|
"""Test available patterns in the repo"""
|
|
|
|
get_visible_patterns.return_value = {
|
|
|
|
- "package-a": {"installed": True, "summary": "description a"},
|
|
|
|
- "package-b": {"installed": False, "summary": "description b"},
|
|
|
|
+ "package-a": {"installed": True, "summary": "description a",},
|
|
|
|
+ "package-b": {"installed": False, "summary": "description b",},
|
|
|
|
}
|
|
|
|
assert zypper.list_patterns() == {
|
|
|
|
- "package-a": {"installed": True, "summary": "description a"},
|
|
|
|
- "package-b": {"installed": False, "summary": "description b"},
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- def test__clean_cache_empty(self):
|
|
|
|
- """Test that an empty cached can be cleaned"""
|
|
|
|
- context = {}
|
|
|
|
- with patch.dict(zypper.__context__, context):
|
|
|
|
- zypper._clean_cache()
|
|
|
|
- assert context == {}
|
|
|
|
-
|
|
|
|
- def test__clean_cache_filled(self):
|
|
|
|
- """Test that a filled cached can be cleaned"""
|
|
|
|
- context = {
|
|
|
|
- "pkg.list_pkgs_/mnt_[]": None,
|
|
|
|
- "pkg.list_pkgs_/mnt_[patterns]": None,
|
|
|
|
- "pkg.list_provides": None,
|
|
|
|
- "pkg.other_data": None,
|
|
|
|
+ "package-a": {"installed": True, "summary": "description a",},
|
|
|
|
+ "package-b": {"installed": False, "summary": "description b",},
|
|
|
|
}
|
|
|
|
- with patch.dict(zypper.__context__, context):
|
|
|
|
- zypper._clean_cache()
|
|
|
|
- self.assertEqual(zypper.__context__, {"pkg.other_data": None})
|
2019-06-21 10:17:26 +02:00
|
|
|
diff --git a/tests/unit/states/test_btrfs.py b/tests/unit/states/test_btrfs.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index fdbf06bd13..74e44641b8 100644
|
|
|
|
--- a/tests/unit/states/test_btrfs.py
|
2019-06-21 10:17:26 +02:00
|
|
|
+++ b/tests/unit/states/test_btrfs.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1,27 +1,45 @@
|
2019-06-21 10:17:26 +02:00
|
|
|
+#
|
|
|
|
+# Author: Alberto Planas <aplanas@suse.com>
|
|
|
|
+#
|
|
|
|
+# Copyright 2018 SUSE LINUX GmbH, Nuernberg, Germany.
|
|
|
|
+#
|
|
|
|
+# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
+# or more contributor license agreements. See the NOTICE file
|
|
|
|
+# distributed with this work for additional information
|
|
|
|
+# regarding copyright ownership. The ASF licenses this file
|
|
|
|
+# to you under the Apache License, Version 2.0 (the
|
|
|
|
+# "License"); you may not use this file except in compliance
|
|
|
|
+# with the License. You may obtain a copy of the License at
|
|
|
|
+#
|
|
|
|
+# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
+#
|
|
|
|
+# Unless required by applicable law or agreed to in writing,
|
|
|
|
+# software distributed under the License is distributed on an
|
|
|
|
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
+# KIND, either express or implied. See the License for the
|
|
|
|
+# specific language governing permissions and limitations
|
|
|
|
+# under the License.
|
|
|
|
+
|
2021-01-08 13:41:50 +01:00
|
|
|
"""
|
|
|
|
:maintainer: Alberto Planas <aplanas@suse.com>
|
|
|
|
:platform: Linux
|
|
|
|
"""
|
|
|
|
-
|
|
|
|
import pytest
|
|
|
|
import salt.states.btrfs as btrfs
|
|
|
|
-import salt.utils.platform
|
|
|
|
from salt.exceptions import CommandExecutionError
|
|
|
|
-
|
|
|
|
-# Import Salt Testing Libs
|
|
|
|
from tests.support.mixins import LoaderModuleMockMixin
|
|
|
|
-from tests.support.mock import MagicMock, patch
|
|
|
|
+from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
|
|
|
|
from tests.support.unit import TestCase, skipIf
|
|
|
|
|
|
|
|
|
|
|
|
-@skipIf(salt.utils.platform.is_windows(), "Non-Windows feature")
|
2019-06-21 10:17:26 +02:00
|
|
|
+@skipIf(NO_MOCK, NO_MOCK_REASON)
|
2021-01-08 13:41:50 +01:00
|
|
|
class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
"""
|
|
|
|
Test cases for salt.states.btrfs
|
|
|
|
"""
|
|
|
|
|
|
|
|
def setup_loader_modules(self):
|
|
|
|
- return {btrfs: {"__salt__": {}, "__states__": {}, "__utils__": {}}}
|
|
|
|
+ return {btrfs: {"__salt__": {}, "__states__": {}, "__utils__": {},}}
|
|
|
|
|
|
|
|
@patch("salt.states.btrfs._umount")
|
|
|
|
@patch("tempfile.mkdtemp")
|
|
|
|
@@ -112,9 +130,9 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
"""
|
|
|
|
salt_mock = {
|
|
|
|
"btrfs.subvolume_show": MagicMock(
|
|
|
|
- return_value={"@/var": {"subvolume id": "256"}}
|
|
|
|
+ return_value={"@/var": {"subvolume id": "256"},}
|
|
|
|
),
|
|
|
|
- "btrfs.subvolume_get_default": MagicMock(return_value={"id": "5"}),
|
|
|
|
+ "btrfs.subvolume_get_default": MagicMock(return_value={"id": "5",}),
|
|
|
|
}
|
|
|
|
with patch.dict(btrfs.__salt__, salt_mock):
|
|
|
|
assert not btrfs._is_default("/tmp/xxx/@/var", "/tmp/xxx", "@/var")
|
|
|
|
@@ -127,9 +145,9 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
"""
|
|
|
|
salt_mock = {
|
|
|
|
"btrfs.subvolume_show": MagicMock(
|
|
|
|
- return_value={"@/var": {"subvolume id": "256"}}
|
|
|
|
+ return_value={"@/var": {"subvolume id": "256"},}
|
|
|
|
),
|
|
|
|
- "btrfs.subvolume_get_default": MagicMock(return_value={"id": "256"}),
|
|
|
|
+ "btrfs.subvolume_get_default": MagicMock(return_value={"id": "256",}),
|
|
|
|
}
|
|
|
|
with patch.dict(btrfs.__salt__, salt_mock):
|
|
|
|
assert btrfs._is_default("/tmp/xxx/@/var", "/tmp/xxx", "@/var")
|
|
|
|
@@ -142,7 +160,7 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
"""
|
|
|
|
salt_mock = {
|
|
|
|
"btrfs.subvolume_show": MagicMock(
|
|
|
|
- return_value={"@/var": {"subvolume id": "256"}}
|
|
|
|
+ return_value={"@/var": {"subvolume id": "256"},}
|
|
|
|
),
|
|
|
|
"btrfs.subvolume_set_default": MagicMock(return_value=True),
|
|
|
|
}
|
|
|
|
@@ -158,7 +176,7 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
Test if the subvolume is copy on write.
|
|
|
|
"""
|
|
|
|
salt_mock = {
|
|
|
|
- "file.lsattr": MagicMock(return_value={"/tmp/xxx/@/var": ["C"]}),
|
|
|
|
+ "file.lsattr": MagicMock(return_value={"/tmp/xxx/@/var": ["C"],}),
|
|
|
|
}
|
|
|
|
with patch.dict(btrfs.__salt__, salt_mock):
|
|
|
|
assert not btrfs._is_cow("/tmp/xxx/@/var")
|
|
|
|
@@ -169,7 +187,7 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
Test if the subvolume is copy on write.
|
|
|
|
"""
|
|
|
|
salt_mock = {
|
|
|
|
- "file.lsattr": MagicMock(return_value={"/tmp/xxx/@/var": []}),
|
|
|
|
+ "file.lsattr": MagicMock(return_value={"/tmp/xxx/@/var": [],}),
|
|
|
|
}
|
|
|
|
with patch.dict(btrfs.__salt__, salt_mock):
|
|
|
|
assert btrfs._is_cow("/tmp/xxx/@/var")
|
|
|
|
@@ -188,7 +206,6 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
"/tmp/xxx/@/var", operator="add", attributes="C"
|
|
|
|
)
|
|
|
|
|
|
|
|
- @skipIf(salt.utils.platform.is_windows(), "Skip on Windows")
|
|
|
|
@patch("salt.states.btrfs._umount")
|
|
|
|
@patch("salt.states.btrfs._mount")
|
|
|
|
def test_subvolume_created_exists(self, mount, umount):
|
|
|
|
@@ -215,34 +232,6 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
mount.assert_called_once()
|
|
|
|
umount.assert_called_once()
|
|
|
|
|
|
|
|
- @skipIf(salt.utils.platform.is_windows(), "Skip on Windows")
|
|
|
|
- @patch("salt.states.btrfs._umount")
|
|
|
|
- @patch("salt.states.btrfs._mount")
|
|
|
|
- def test_subvolume_created_exists_decorator(self, mount, umount):
|
|
|
|
- """
|
|
|
|
- Test creating a subvolume using a non-kwargs call
|
|
|
|
- """
|
|
|
|
- mount.return_value = "/tmp/xxx"
|
|
|
|
- salt_mock = {
|
|
|
|
- "btrfs.subvolume_exists": MagicMock(return_value=True),
|
|
|
|
- }
|
|
|
|
- opts_mock = {
|
|
|
|
- "test": False,
|
|
|
|
- }
|
|
|
|
- with patch.dict(btrfs.__salt__, salt_mock), patch.dict(
|
|
|
|
- btrfs.__opts__, opts_mock
|
|
|
|
- ):
|
|
|
|
- assert btrfs.subvolume_created("@/var", "/dev/sda1") == {
|
|
|
|
- "name": "@/var",
|
|
|
|
- "result": True,
|
|
|
|
- "changes": {},
|
|
|
|
- "comment": ["Subvolume @/var already present"],
|
|
|
|
- }
|
|
|
|
- salt_mock["btrfs.subvolume_exists"].assert_called_with("/tmp/xxx/@/var")
|
|
|
|
- mount.assert_called_once()
|
|
|
|
- umount.assert_called_once()
|
|
|
|
-
|
|
|
|
- @skipIf(salt.utils.platform.is_windows(), "Skip on Windows")
|
|
|
|
@patch("salt.states.btrfs._umount")
|
|
|
|
@patch("salt.states.btrfs._mount")
|
|
|
|
def test_subvolume_created_exists_test(self, mount, umount):
|
|
|
|
@@ -269,7 +258,6 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
mount.assert_called_once()
|
|
|
|
umount.assert_called_once()
|
|
|
|
|
|
|
|
- @skipIf(salt.utils.platform.is_windows(), "Skip on Windows")
|
|
|
|
@patch("salt.states.btrfs._is_default")
|
|
|
|
@patch("salt.states.btrfs._umount")
|
|
|
|
@patch("salt.states.btrfs._mount")
|
|
|
|
@@ -300,7 +288,6 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
mount.assert_called_once()
|
|
|
|
umount.assert_called_once()
|
|
|
|
|
|
|
|
- @skipIf(salt.utils.platform.is_windows(), "Skip on Windows")
|
|
|
|
@patch("salt.states.btrfs._set_default")
|
|
|
|
@patch("salt.states.btrfs._is_default")
|
|
|
|
@patch("salt.states.btrfs._umount")
|
|
|
|
@@ -335,7 +322,6 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
mount.assert_called_once()
|
|
|
|
umount.assert_called_once()
|
|
|
|
|
|
|
|
- @skipIf(salt.utils.platform.is_windows(), "Skip on Windows")
|
|
|
|
@patch("salt.states.btrfs._set_default")
|
|
|
|
@patch("salt.states.btrfs._is_default")
|
|
|
|
@patch("salt.states.btrfs._umount")
|
|
|
|
@@ -373,7 +359,6 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
mount.assert_called_once()
|
|
|
|
umount.assert_called_once()
|
|
|
|
|
|
|
|
- @skipIf(salt.utils.platform.is_windows(), "Skip on Windows")
|
|
|
|
@patch("salt.states.btrfs._is_cow")
|
|
|
|
@patch("salt.states.btrfs._umount")
|
|
|
|
@patch("salt.states.btrfs._mount")
|
|
|
|
@@ -404,7 +389,6 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
mount.assert_called_once()
|
|
|
|
umount.assert_called_once()
|
|
|
|
|
|
|
|
- @skipIf(salt.utils.platform.is_windows(), "Skip on Windows")
|
|
|
|
@patch("salt.states.btrfs._unset_cow")
|
|
|
|
@patch("salt.states.btrfs._is_cow")
|
|
|
|
@patch("salt.states.btrfs._umount")
|
|
|
|
@@ -437,7 +421,6 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
mount.assert_called_once()
|
|
|
|
umount.assert_called_once()
|
|
|
|
|
|
|
|
- @skipIf(salt.utils.platform.is_windows(), "Skip on Windows")
|
|
|
|
@patch("salt.states.btrfs._umount")
|
|
|
|
@patch("salt.states.btrfs._mount")
|
|
|
|
def test_subvolume_created(self, mount, umount):
|
|
|
|
@@ -469,7 +452,6 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
mount.assert_called_once()
|
|
|
|
umount.assert_called_once()
|
|
|
|
|
|
|
|
- @skipIf(salt.utils.platform.is_windows(), "Skip on Windows")
|
|
|
|
@patch("salt.states.btrfs._umount")
|
|
|
|
@patch("salt.states.btrfs._mount")
|
|
|
|
def test_subvolume_created_fails_directory(self, mount, umount):
|
|
|
|
@@ -499,7 +481,6 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
mount.assert_called_once()
|
|
|
|
umount.assert_called_once()
|
|
|
|
|
|
|
|
- @skipIf(salt.utils.platform.is_windows(), "Skip on Windows")
|
|
|
|
@patch("salt.states.btrfs._umount")
|
|
|
|
@patch("salt.states.btrfs._mount")
|
|
|
|
def test_subvolume_created_fails(self, mount, umount):
|
|
|
|
@@ -541,7 +522,7 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
"description": "Set/get compression for a file or directory",
|
|
|
|
"value": "N/A",
|
|
|
|
},
|
|
|
|
- "label": {"description": "Set/get label of device.", "value": "N/A"},
|
|
|
|
+ "label": {"description": "Set/get label of device.", "value": "N/A",},
|
|
|
|
"ro": {
|
|
|
|
"description": "Set/get read-only flag or subvolume",
|
|
|
|
"value": "N/A",
|
|
|
|
@@ -560,7 +541,7 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
"description": "Set/get compression for a file or directory",
|
|
|
|
"value": "N/A",
|
|
|
|
},
|
|
|
|
- "label": {"description": "Set/get label of device.", "value": "N/A"},
|
|
|
|
+ "label": {"description": "Set/get label of device.", "value": "N/A",},
|
|
|
|
"ro": {
|
|
|
|
"description": "Set/get read-only flag or subvolume",
|
|
|
|
"value": "N/A",
|
|
|
|
@@ -578,7 +559,7 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
"description": "Set/get compression for a file or directory",
|
|
|
|
"value": "N/A",
|
|
|
|
},
|
|
|
|
- "label": {"description": "Set/get label of device.", "value": "mylabel"},
|
|
|
|
+ "label": {"description": "Set/get label of device.", "value": "mylabel",},
|
|
|
|
"ro": {
|
|
|
|
"description": "Set/get read-only flag or subvolume",
|
|
|
|
"value": "N/A",
|
|
|
|
@@ -596,7 +577,7 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
"description": "Set/get compression for a file or directory",
|
|
|
|
"value": "N/A",
|
|
|
|
},
|
|
|
|
- "label": {"description": "Set/get label of device.", "value": "N/A"},
|
|
|
|
+ "label": {"description": "Set/get label of device.", "value": "N/A",},
|
|
|
|
"ro": {
|
|
|
|
"description": "Set/get read-only flag or subvolume",
|
|
|
|
"value": True,
|
|
|
|
@@ -614,7 +595,7 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
"description": "Set/get compression for a file or directory",
|
|
|
|
"value": "N/A",
|
|
|
|
},
|
|
|
|
- "label": {"description": "Set/get label of device.", "value": "N/A"},
|
|
|
|
+ "label": {"description": "Set/get label of device.", "value": "N/A",},
|
|
|
|
"ro": {
|
|
|
|
"description": "Set/get read-only flag or subvolume",
|
|
|
|
"value": "N/A",
|
|
|
|
@@ -755,40 +736,3 @@ class BtrfsTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
)
|
|
|
|
mount.assert_called_once()
|
|
|
|
umount.assert_called_once()
|
|
|
|
-
|
|
|
|
- @patch("salt.states.btrfs._umount")
|
|
|
|
- @patch("salt.states.btrfs._mount")
|
|
|
|
- @patch("os.path.exists")
|
|
|
|
- def test_properties_test(self, exists, mount, umount):
|
|
|
|
- """
|
|
|
|
- Test setting a property in test mode.
|
|
|
|
- """
|
|
|
|
- exists.return_value = True
|
|
|
|
- mount.return_value = "/tmp/xxx"
|
|
|
|
- salt_mock = {
|
|
|
|
- "btrfs.properties": MagicMock(
|
|
|
|
- side_effect=[
|
|
|
|
- {
|
|
|
|
- "ro": {
|
|
|
|
- "description": "Set/get read-only flag or subvolume",
|
|
|
|
- "value": "N/A",
|
|
|
|
- },
|
|
|
|
- },
|
|
|
|
- ]
|
|
|
|
- ),
|
|
|
|
- }
|
|
|
|
- opts_mock = {
|
|
|
|
- "test": True,
|
|
|
|
- }
|
|
|
|
- with patch.dict(btrfs.__salt__, salt_mock), patch.dict(
|
|
|
|
- btrfs.__opts__, opts_mock
|
|
|
|
- ):
|
|
|
|
- assert btrfs.properties(name="@/var", device="/dev/sda1", ro=True) == {
|
|
|
|
- "name": "@/var",
|
|
|
|
- "result": None,
|
|
|
|
- "changes": {"ro": "true"},
|
|
|
|
- "comment": [],
|
|
|
|
- }
|
|
|
|
- salt_mock["btrfs.properties"].assert_called_with("/tmp/xxx/@/var")
|
|
|
|
- mount.assert_called_once()
|
|
|
|
- umount.assert_called_once()
|
2019-06-21 10:17:26 +02:00
|
|
|
diff --git a/tests/unit/states/test_pkg.py b/tests/unit/states/test_pkg.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index 15ca937e13..a7ddfece14 100644
|
2019-06-21 10:17:26 +02:00
|
|
|
--- a/tests/unit/states/test_pkg.py
|
|
|
|
+++ b/tests/unit/states/test_pkg.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1,15 +1,6 @@
|
|
|
|
-# -*- coding: utf-8 -*-
|
|
|
|
-
|
|
|
|
-# Import Python libs
|
|
|
|
-from __future__ import absolute_import
|
|
|
|
-
|
|
|
|
import salt.states.pkg as pkg
|
|
|
|
-
|
|
|
|
-# Import Salt Libs
|
|
|
|
from salt.ext import six
|
|
|
|
from salt.ext.six.moves import zip
|
|
|
|
-
|
|
|
|
-# Import Salt Testing Libs
|
|
|
|
from tests.support.mixins import LoaderModuleMockMixin
|
|
|
|
from tests.support.mock import MagicMock, patch
|
|
|
|
from tests.support.unit import TestCase
|
|
|
|
@@ -35,7 +26,7 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
"""
|
|
|
|
list_upgrades = MagicMock(
|
|
|
|
return_value={
|
|
|
|
- pkgname: pkgver["new"] for pkgname, pkgver in six.iteritems(self.pkgs)
|
|
|
|
+ pkgname: pkgver["new"] for pkgname, pkgver in self.pkgs.items()
|
|
|
|
}
|
|
|
|
)
|
2019-06-21 10:17:26 +02:00
|
|
|
upgrade = MagicMock(return_value=self.pkgs)
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -75,7 +66,7 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin):
|
2019-06-21 10:17:26 +02:00
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
list_upgrades = MagicMock(
|
|
|
|
return_value={
|
|
|
|
- pkgname: pkgver["new"] for pkgname, pkgver in six.iteritems(self.pkgs)
|
|
|
|
+ pkgname: pkgver["new"] for pkgname, pkgver in self.pkgs.items()
|
|
|
|
}
|
|
|
|
)
|
2019-06-21 10:17:26 +02:00
|
|
|
upgrade = MagicMock(return_value=self.pkgs)
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -92,9 +83,7 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
# Run state with test=false
|
|
|
|
with patch.dict(pkg.__opts__, {"test": False}):
|
|
|
|
ret = pkg.uptodate(
|
|
|
|
- "dummy",
|
|
|
|
- test=True,
|
|
|
|
- pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)],
|
|
|
|
+ "dummy", test=True, pkgs=[pkgname for pkgname in self.pkgs.keys()],
|
|
|
|
)
|
|
|
|
self.assertTrue(ret["result"])
|
|
|
|
self.assertDictEqual(ret["changes"], pkgs)
|
|
|
|
@@ -102,9 +91,7 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
# Run state with test=true
|
|
|
|
with patch.dict(pkg.__opts__, {"test": True}):
|
|
|
|
ret = pkg.uptodate(
|
|
|
|
- "dummy",
|
|
|
|
- test=True,
|
|
|
|
- pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)],
|
|
|
|
+ "dummy", test=True, pkgs=[pkgname for pkgname in self.pkgs.keys()],
|
|
|
|
)
|
|
|
|
self.assertIsNone(ret["result"])
|
|
|
|
self.assertDictEqual(ret["changes"], pkgs)
|
|
|
|
@@ -146,9 +133,7 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
# Run state with test=false
|
|
|
|
with patch.dict(pkg.__opts__, {"test": False}):
|
|
|
|
ret = pkg.uptodate(
|
|
|
|
- "dummy",
|
|
|
|
- test=True,
|
|
|
|
- pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)],
|
|
|
|
+ "dummy", test=True, pkgs=[pkgname for pkgname in self.pkgs.keys()],
|
|
|
|
)
|
|
|
|
self.assertTrue(ret["result"])
|
|
|
|
self.assertDictEqual(ret["changes"], {})
|
|
|
|
@@ -156,9 +141,7 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
# Run state with test=true
|
|
|
|
with patch.dict(pkg.__opts__, {"test": True}):
|
|
|
|
ret = pkg.uptodate(
|
|
|
|
- "dummy",
|
|
|
|
- test=True,
|
|
|
|
- pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)],
|
|
|
|
+ "dummy", test=True, pkgs=[pkgname for pkgname in self.pkgs.keys()],
|
|
|
|
)
|
|
|
|
self.assertTrue(ret["result"])
|
|
|
|
self.assertDictEqual(ret["changes"], {})
|
|
|
|
@@ -176,7 +159,7 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
|
|
|
|
list_upgrades = MagicMock(
|
|
|
|
return_value={
|
|
|
|
- pkgname: pkgver["new"] for pkgname, pkgver in six.iteritems(self.pkgs)
|
|
|
|
+ pkgname: pkgver["new"] for pkgname, pkgver in self.pkgs.items()
|
|
|
|
}
|
|
|
|
)
|
2019-06-21 10:17:26 +02:00
|
|
|
upgrade = MagicMock(return_value={})
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -193,9 +176,7 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
# Run state with test=false
|
|
|
|
with patch.dict(pkg.__opts__, {"test": False}):
|
|
|
|
ret = pkg.uptodate(
|
|
|
|
- "dummy",
|
|
|
|
- test=True,
|
|
|
|
- pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)],
|
|
|
|
+ "dummy", test=True, pkgs=[pkgname for pkgname in self.pkgs.keys()],
|
|
|
|
)
|
|
|
|
self.assertFalse(ret["result"])
|
|
|
|
self.assertDictEqual(ret["changes"], {})
|
|
|
|
@@ -203,9 +184,7 @@ class PkgTestCase(TestCase, LoaderModuleMockMixin):
|
|
|
|
# Run state with test=true
|
|
|
|
with patch.dict(pkg.__opts__, {"test": True}):
|
|
|
|
ret = pkg.uptodate(
|
|
|
|
- "dummy",
|
|
|
|
- test=True,
|
|
|
|
- pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)],
|
|
|
|
+ "dummy", test=True, pkgs=[pkgname for pkgname in self.pkgs.keys()],
|
|
|
|
)
|
|
|
|
self.assertIsNone(ret["result"])
|
|
|
|
self.assertDictEqual(ret["changes"], pkgs)
|
2019-06-21 10:17:26 +02:00
|
|
|
diff --git a/tests/unit/test_loader.py b/tests/unit/test_loader.py
|
2021-01-08 13:41:50 +01:00
|
|
|
index 9f826e007f..863e2182b9 100644
|
2019-06-21 10:17:26 +02:00
|
|
|
--- a/tests/unit/test_loader.py
|
|
|
|
+++ b/tests/unit/test_loader.py
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -215,6 +215,96 @@ class LazyLoaderUtilsTest(TestCase):
|
|
|
|
self.assertTrue(self.module_name + ".run" not in loader)
|
2019-06-21 10:17:26 +02:00
|
|
|
|
|
|
|
|
2021-01-08 13:41:50 +01:00
|
|
|
+loader_template_module = """
|
2019-06-21 10:17:26 +02:00
|
|
|
+import my_utils
|
|
|
|
+
|
|
|
|
+def run():
|
|
|
|
+ return my_utils.run()
|
2021-01-08 13:41:50 +01:00
|
|
|
+"""
|
2019-06-21 10:17:26 +02:00
|
|
|
+
|
2021-01-08 13:41:50 +01:00
|
|
|
+loader_template_utils = """
|
2019-06-21 10:17:26 +02:00
|
|
|
+def run():
|
|
|
|
+ return True
|
2021-01-08 13:41:50 +01:00
|
|
|
+"""
|
2019-06-21 10:17:26 +02:00
|
|
|
+
|
|
|
|
+
|
|
|
|
+class LazyLoaderUtilsTest(TestCase):
|
2021-01-08 13:41:50 +01:00
|
|
|
+ """
|
2019-06-21 10:17:26 +02:00
|
|
|
+ Test the loader
|
2021-01-08 13:41:50 +01:00
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ module_name = "lazyloaderutilstest"
|
|
|
|
+ utils_name = "my_utils"
|
2019-06-21 10:17:26 +02:00
|
|
|
+
|
|
|
|
+ @classmethod
|
|
|
|
+ def setUpClass(cls):
|
|
|
|
+ cls.opts = salt.config.minion_config(None)
|
2021-01-08 13:41:50 +01:00
|
|
|
+ cls.opts["grains"] = salt.loader.grains(cls.opts)
|
2019-06-21 10:17:26 +02:00
|
|
|
+ if not os.path.isdir(TMP):
|
|
|
|
+ os.makedirs(TMP)
|
|
|
|
+
|
|
|
|
+ def setUp(self):
|
|
|
|
+ # Setup the module
|
|
|
|
+ self.module_dir = tempfile.mkdtemp(dir=TMP)
|
2021-01-08 13:41:50 +01:00
|
|
|
+ self.module_file = os.path.join(
|
|
|
|
+ self.module_dir, "{}.py".format(self.module_name)
|
|
|
|
+ )
|
|
|
|
+ with salt.utils.files.fopen(self.module_file, "w") as fh:
|
2019-06-21 10:17:26 +02:00
|
|
|
+ fh.write(salt.utils.stringutils.to_str(loader_template_module))
|
|
|
|
+ fh.flush()
|
|
|
|
+ os.fsync(fh.fileno())
|
|
|
|
+
|
|
|
|
+ self.utils_dir = tempfile.mkdtemp(dir=TMP)
|
2021-01-08 13:41:50 +01:00
|
|
|
+ self.utils_file = os.path.join(self.utils_dir, "{}.py".format(self.utils_name))
|
|
|
|
+ with salt.utils.files.fopen(self.utils_file, "w") as fh:
|
2019-06-21 10:17:26 +02:00
|
|
|
+ fh.write(salt.utils.stringutils.to_str(loader_template_utils))
|
|
|
|
+ fh.flush()
|
|
|
|
+ os.fsync(fh.fileno())
|
|
|
|
+
|
|
|
|
+ def tearDown(self):
|
|
|
|
+ shutil.rmtree(self.module_dir)
|
|
|
|
+ if os.path.isdir(self.module_dir):
|
|
|
|
+ shutil.rmtree(self.module_dir)
|
|
|
|
+ shutil.rmtree(self.utils_dir)
|
|
|
|
+ if os.path.isdir(self.utils_dir):
|
|
|
|
+ shutil.rmtree(self.utils_dir)
|
|
|
|
+ del self.module_dir
|
|
|
|
+ del self.module_file
|
|
|
|
+ del self.utils_dir
|
|
|
|
+ del self.utils_file
|
|
|
|
+
|
|
|
|
+ if self.module_name in sys.modules:
|
|
|
|
+ del sys.modules[self.module_name]
|
|
|
|
+ if self.utils_name in sys.modules:
|
|
|
|
+ del sys.modules[self.utils_name]
|
|
|
|
+
|
|
|
|
+ @classmethod
|
|
|
|
+ def tearDownClass(cls):
|
|
|
|
+ del cls.opts
|
|
|
|
+
|
|
|
|
+ def test_utils_found(self):
|
2021-01-08 13:41:50 +01:00
|
|
|
+ """
|
2019-06-21 10:17:26 +02:00
|
|
|
+ Test that the extra module directory is available for imports
|
2021-01-08 13:41:50 +01:00
|
|
|
+ """
|
2019-06-21 10:17:26 +02:00
|
|
|
+ loader = salt.loader.LazyLoader(
|
|
|
|
+ [self.module_dir],
|
|
|
|
+ copy.deepcopy(self.opts),
|
2021-01-08 13:41:50 +01:00
|
|
|
+ tag="module",
|
|
|
|
+ extra_module_dirs=[self.utils_dir],
|
|
|
|
+ )
|
|
|
|
+ self.assertTrue(inspect.isfunction(loader[self.module_name + ".run"]))
|
|
|
|
+ self.assertTrue(loader[self.module_name + ".run"]())
|
2019-06-21 10:17:26 +02:00
|
|
|
+
|
|
|
|
+ def test_utils_not_found(self):
|
2021-01-08 13:41:50 +01:00
|
|
|
+ """
|
2019-06-21 10:17:26 +02:00
|
|
|
+ Test that the extra module directory is not available for imports
|
2021-01-08 13:41:50 +01:00
|
|
|
+ """
|
2019-06-21 10:17:26 +02:00
|
|
|
+ loader = salt.loader.LazyLoader(
|
2021-01-08 13:41:50 +01:00
|
|
|
+ [self.module_dir], copy.deepcopy(self.opts), tag="module"
|
|
|
|
+ )
|
|
|
|
+ self.assertTrue(self.module_name + ".run" not in loader)
|
2019-06-21 10:17:26 +02:00
|
|
|
+
|
|
|
|
+
|
|
|
|
class LazyLoaderVirtualEnabledTest(TestCase):
|
2021-01-08 13:41:50 +01:00
|
|
|
"""
|
2019-06-21 10:17:26 +02:00
|
|
|
Test the base loader of salt.
|
2021-01-08 13:41:50 +01:00
|
|
|
@@ -1342,9 +1432,10 @@ class LoaderGlobalsTest(ModuleCase):
|
|
|
|
)
|
2019-06-21 10:17:26 +02:00
|
|
|
|
|
|
|
# Now, test each module!
|
2021-01-08 13:41:50 +01:00
|
|
|
- for item in global_vars.values():
|
2019-06-21 10:17:26 +02:00
|
|
|
- for name in names:
|
|
|
|
- self.assertIn(name, list(item.keys()))
|
2021-01-08 13:41:50 +01:00
|
|
|
+ for item in global_vars:
|
|
|
|
+ if item["__name__"].startswith("salt.loaded"):
|
2019-06-21 10:17:26 +02:00
|
|
|
+ for name in names:
|
|
|
|
+ self.assertIn(name, list(item.keys()))
|
|
|
|
|
|
|
|
def test_auth(self):
|
2021-01-08 13:41:50 +01:00
|
|
|
"""
|
2019-06-21 10:17:26 +02:00
|
|
|
--
|
2021-01-08 13:41:50 +01:00
|
|
|
2.29.2
|
2019-06-21 10:17:26 +02:00
|
|
|
|
|
|
|
|