diff --git a/_lastrevision b/_lastrevision
index e724caf..c322b87 100644
--- a/_lastrevision
+++ b/_lastrevision
@@ -1 +1 @@
-d0c2f35ff4a0b21786b20c884cbb191ad2e63904
\ No newline at end of file
+6f191fc01de41fe2c1c4b659d5738e80aeb89b4c
\ No newline at end of file
diff --git a/_multibuild b/_multibuild
new file mode 100644
index 0000000..a0cd1a3
--- /dev/null
+++ b/_multibuild
@@ -0,0 +1,3 @@
+
+ testsuite
+
diff --git a/decode-oscap-byte-stream-to-string-bsc-1219001.patch b/decode-oscap-byte-stream-to-string-bsc-1219001.patch
new file mode 100644
index 0000000..3cca99d
--- /dev/null
+++ b/decode-oscap-byte-stream-to-string-bsc-1219001.patch
@@ -0,0 +1,80 @@
+From 45b97042766e15a4336b141b40a03d68156771bc Mon Sep 17 00:00:00 2001
+From: Marek Czernek
+Date: Thu, 14 Mar 2024 16:16:02 +0100
+Subject: [PATCH] Decode oscap byte stream to string (bsc#1219001)
+
+---
+ salt/modules/openscap.py | 5 +++--
+ tests/unit/modules/test_openscap.py | 10 +++++-----
+ 2 files changed, 8 insertions(+), 7 deletions(-)
+
+diff --git a/salt/modules/openscap.py b/salt/modules/openscap.py
+index 216fd89eef..89712ae722 100644
+--- a/salt/modules/openscap.py
++++ b/salt/modules/openscap.py
+@@ -152,10 +152,11 @@ def xccdf_eval(xccdffile, ovalfiles=None, **kwargs):
+ if success:
+ tempdir = tempfile.mkdtemp()
+ proc = Popen(cmd_opts, stdout=PIPE, stderr=PIPE, cwd=tempdir)
+- (stdoutdata, error) = proc.communicate()
++ (_, error) = proc.communicate()
++ error = error.decode('ascii', errors='ignore')
+ success = _OSCAP_EXIT_CODES_MAP.get(proc.returncode, False)
+ if proc.returncode < 0:
+- error += "\nKilled by signal {}\n".format(proc.returncode).encode('ascii')
++ error += "\nKilled by signal {}\n".format(proc.returncode)
+ returncode = proc.returncode
+ if success:
+ __salt__["cp.push_dir"](tempdir)
+diff --git a/tests/unit/modules/test_openscap.py b/tests/unit/modules/test_openscap.py
+index 301c1869ec..6fbdfed7cf 100644
+--- a/tests/unit/modules/test_openscap.py
++++ b/tests/unit/modules/test_openscap.py
+@@ -218,7 +218,7 @@ class OpenscapTestCase(TestCase):
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+- **{"returncode": 0, "communicate.return_value": ("", "")}
++ **{"returncode": 0, "communicate.return_value": (bytes(0), bytes(0))}
+ )
+ ),
+ ):
+@@ -269,7 +269,7 @@ class OpenscapTestCase(TestCase):
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+- **{"returncode": 0, "communicate.return_value": ("", "")}
++ **{"returncode": 0, "communicate.return_value": (bytes(0), bytes(0))}
+ )
+ ),
+ ):
+@@ -323,7 +323,7 @@ class OpenscapTestCase(TestCase):
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+- **{"returncode": 2, "communicate.return_value": ("", "some error")}
++ **{"returncode": 2, "communicate.return_value": (bytes(0), bytes("some error", "UTF-8"))}
+ )
+ ),
+ ):
+@@ -374,7 +374,7 @@ class OpenscapTestCase(TestCase):
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+- **{"returncode": 2, "communicate.return_value": ("", "some error")}
++ **{"returncode": 2, "communicate.return_value": (bytes(0), bytes("some error", "UTF-8"))}
+ )
+ ),
+ ):
+@@ -423,7 +423,7 @@ class OpenscapTestCase(TestCase):
+ return_value=Mock(
+ **{
+ "returncode": 1,
+- "communicate.return_value": ("", "evaluation error"),
++ "communicate.return_value": (bytes(0), bytes("evaluation error", "UTF-8")),
+ }
+ )
+ ),
+--
+2.43.0
+
diff --git a/discover-both-.yml-and-.yaml-playbooks-bsc-1211888.patch b/discover-both-.yml-and-.yaml-playbooks-bsc-1211888.patch
new file mode 100644
index 0000000..5aefe29
--- /dev/null
+++ b/discover-both-.yml-and-.yaml-playbooks-bsc-1211888.patch
@@ -0,0 +1,188 @@
+From 05fbd376090c5d7f997c510db0abb62be54d6d40 Mon Sep 17 00:00:00 2001
+From: Johannes Hahn
+Date: Tue, 20 Feb 2024 15:38:08 +0100
+Subject: [PATCH] Discover both *.yml and *.yaml playbooks (bsc#1211888)
+
+Allow for 'playbook_extension' to be either a string or a tuple and
+change the default behavior to discover both.
+---
+ changelog/66048.changed.md | 1 +
+ salt/modules/ansiblegate.py | 46 +++++++++----------
+ .../pytests/unit/modules/test_ansiblegate.py | 3 ++
+ .../example_playbooks/playbook1.yaml | 5 ++
+ 4 files changed, 30 insertions(+), 25 deletions(-)
+ create mode 100644 changelog/66048.changed.md
+ create mode 100644 tests/unit/files/playbooks/example_playbooks/playbook1.yaml
+
+diff --git a/changelog/66048.changed.md b/changelog/66048.changed.md
+new file mode 100644
+index 0000000000..b042e0d313
+--- /dev/null
++++ b/changelog/66048.changed.md
+@@ -0,0 +1 @@
++Ansiblegate discover_playbooks was changed to find playbooks as either *.yml or *.yaml files
+diff --git a/salt/modules/ansiblegate.py b/salt/modules/ansiblegate.py
+index 2f60a7444f..920c374e5a 100644
+--- a/salt/modules/ansiblegate.py
++++ b/salt/modules/ansiblegate.py
+@@ -111,7 +111,7 @@ def __virtual__():
+ if proc.returncode != 0:
+ return (
+ False,
+- "Failed to get the listing of ansible modules:\n{}".format(proc.stderr),
++ f"Failed to get the listing of ansible modules:\n{proc.stderr}",
+ )
+
+ module_funcs = dir(sys.modules[__name__])
+@@ -240,7 +240,7 @@ def call(module, *args, **kwargs):
+ _kwargs = {k: v for (k, v) in kwargs.items() if not k.startswith("__pub")}
+
+ for key, value in _kwargs.items():
+- module_args.append("{}={}".format(key, salt.utils.json.dumps(value)))
++ module_args.append(f"{key}={salt.utils.json.dumps(value)}")
+
+ with NamedTemporaryFile(mode="w") as inventory:
+
+@@ -367,15 +367,15 @@ def playbooks(
+ if diff:
+ command.append("--diff")
+ if isinstance(extra_vars, dict):
+- command.append("--extra-vars='{}'".format(json.dumps(extra_vars)))
++ command.append(f"--extra-vars='{json.dumps(extra_vars)}'")
+ elif isinstance(extra_vars, str) and extra_vars.startswith("@"):
+- command.append("--extra-vars={}".format(extra_vars))
++ command.append(f"--extra-vars={extra_vars}")
+ if flush_cache:
+ command.append("--flush-cache")
+ if inventory:
+- command.append("--inventory={}".format(inventory))
++ command.append(f"--inventory={inventory}")
+ if limit:
+- command.append("--limit={}".format(limit))
++ command.append(f"--limit={limit}")
+ if list_hosts:
+ command.append("--list-hosts")
+ if list_tags:
+@@ -383,25 +383,25 @@ def playbooks(
+ if list_tasks:
+ command.append("--list-tasks")
+ if module_path:
+- command.append("--module-path={}".format(module_path))
++ command.append(f"--module-path={module_path}")
+ if skip_tags:
+- command.append("--skip-tags={}".format(skip_tags))
++ command.append(f"--skip-tags={skip_tags}")
+ if start_at_task:
+- command.append("--start-at-task={}".format(start_at_task))
++ command.append(f"--start-at-task={start_at_task}")
+ if syntax_check:
+ command.append("--syntax-check")
+ if tags:
+- command.append("--tags={}".format(tags))
++ command.append(f"--tags={tags}")
+ if playbook_kwargs:
+ for key, value in playbook_kwargs.items():
+ key = key.replace("_", "-")
+ if value is True:
+- command.append("--{}".format(key))
++ command.append(f"--{key}")
+ elif isinstance(value, str):
+- command.append("--{}={}".format(key, value))
++ command.append(f"--{key}={value}")
+ elif isinstance(value, dict):
+- command.append("--{}={}".format(key, json.dumps(value)))
+- command.append("--forks={}".format(forks))
++ command.append(f"--{key}={json.dumps(value)}")
++ command.append(f"--forks={forks}")
+ cmd_kwargs = {
+ "env": {
+ "ANSIBLE_STDOUT_CALLBACK": "json",
+@@ -502,7 +502,7 @@ def discover_playbooks(
+ List of paths to discover playbooks from.
+
+ :param playbook_extension:
+- File extension of playbooks file to search for. Default: "yml"
++ File extension(s) of playbook files to search for, can be a string or tuple of strings. Default: (".yml", ".yaml")
+
+ :param hosts_filename:
+ Filename of custom playbook inventory to search for. Default: "hosts"
+@@ -533,19 +533,17 @@ def discover_playbooks(
+ )
+
+ if not playbook_extension:
+- playbook_extension = "yml"
++ playbook_extension = (".yml", ".yaml")
+ if not hosts_filename:
+ hosts_filename = "hosts"
+
+ if path:
+ if not os.path.isabs(path):
+ raise CommandExecutionError(
+- "The given path is not an absolute path: {}".format(path)
++ f"The given path is not an absolute path: {path}"
+ )
+ if not os.path.isdir(path):
+- raise CommandExecutionError(
+- "The given path is not a directory: {}".format(path)
+- )
++ raise CommandExecutionError(f"The given path is not a directory: {path}")
+ return {
+ path: _explore_path(path, playbook_extension, hosts_filename, syntax_check)
+ }
+@@ -573,7 +571,7 @@ def _explore_path(path, playbook_extension, hosts_filename, syntax_check):
+ # Check files in the given path
+ for _f in os.listdir(path):
+ _path = os.path.join(path, _f)
+- if os.path.isfile(_path) and _path.endswith("." + playbook_extension):
++ if os.path.isfile(_path) and _path.endswith(playbook_extension):
+ ret[_f] = {"fullpath": _path}
+ # Check for custom inventory file
+ if os.path.isfile(os.path.join(path, hosts_filename)):
+@@ -584,9 +582,7 @@ def _explore_path(path, playbook_extension, hosts_filename, syntax_check):
+ # Check files in the 1st level of subdirectories
+ for _f2 in os.listdir(_path):
+ _path2 = os.path.join(_path, _f2)
+- if os.path.isfile(_path2) and _path2.endswith(
+- "." + playbook_extension
+- ):
++ if os.path.isfile(_path2) and _path2.endswith(playbook_extension):
+ ret[os.path.join(_f, _f2)] = {"fullpath": _path2}
+ # Check for custom inventory file
+ if os.path.isfile(os.path.join(_path, hosts_filename)):
+@@ -599,7 +595,7 @@ def _explore_path(path, playbook_extension, hosts_filename, syntax_check):
+ )
+ except Exception as exc:
+ raise CommandExecutionError(
+- "There was an exception while discovering playbooks: {}".format(exc)
++ f"There was an exception while discovering playbooks: {exc}"
+ )
+
+ # Run syntax check validation
+diff --git a/tests/pytests/unit/modules/test_ansiblegate.py b/tests/pytests/unit/modules/test_ansiblegate.py
+index 6201809c22..272da721bf 100644
+--- a/tests/pytests/unit/modules/test_ansiblegate.py
++++ b/tests/pytests/unit/modules/test_ansiblegate.py
+@@ -198,6 +198,9 @@ def test_ansible_discover_playbooks_single_path():
+ assert ret[playbooks_dir]["playbook1.yml"] == {
+ "fullpath": os.path.join(playbooks_dir, "playbook1.yml")
+ }
++ assert ret[playbooks_dir]["playbook1.yaml"] == {
++ "fullpath": os.path.join(playbooks_dir, "playbook1.yaml")
++ }
+ assert ret[playbooks_dir]["example-playbook2/site.yml"] == {
+ "fullpath": os.path.join(playbooks_dir, "example-playbook2/site.yml"),
+ "custom_inventory": os.path.join(playbooks_dir, "example-playbook2/hosts"),
+diff --git a/tests/unit/files/playbooks/example_playbooks/playbook1.yaml b/tests/unit/files/playbooks/example_playbooks/playbook1.yaml
+new file mode 100644
+index 0000000000..e258a101e1
+--- /dev/null
++++ b/tests/unit/files/playbooks/example_playbooks/playbook1.yaml
+@@ -0,0 +1,5 @@
++---
++- hosts: all
++ gather_facts: false
++ tasks:
++ - ping:
+--
+2.43.1
+
diff --git a/fix-problematic-tests-and-allow-smooth-tests-executi.patch b/fix-problematic-tests-and-allow-smooth-tests-executi.patch
new file mode 100644
index 0000000..ecc3de8
--- /dev/null
+++ b/fix-problematic-tests-and-allow-smooth-tests-executi.patch
@@ -0,0 +1,2695 @@
+From 1b1bbc3e46ab2eed98f07a23368877fc068dbc06 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
+
+Date: Mon, 26 Feb 2024 11:25:22 +0000
+Subject: [PATCH] Fix problematic tests and allow smooth tests
+ executions on containers
+
+* Align boto imports in tests with Salt modules
+
+Some modules `import boto` to set log levels. The related tests don't
+import `boto`. This can cause a problem when `boto` is not available.
+
+Tests are skipped when HAS_BOTO in the test_boto_*.py is False. Not
+trying to `import boto` can leave HAS_BOTO=True in the test file even
+though HAS_BOTO is False on the application side. In this case, tests
+are not skipped and fail.
+
+* Fix mock order in test_dig (test_network.py)
+
+`salt.utils.path.which` should not be mocked before `network.__utils__`. The
+Salt loader calls `salt.utils.network.linux_interfaces`, which needs the real
+`salt.utils.path.which`.
+
+* Fix mock calls
+
+Signed-off-by: Pedro Algarvio
+(cherry picked from commit 3506e7fd0e84320b2873370f1fe527025c244dca)
+
+* Skip venafiapi test if vcert module not available
+
+The same HAS_VCERT check is done in the runner module.
+
+* Moving tests/integration/modules/test_cmdmod.py to pytest, Gareth J Greenaway original author
+
+(cherry picked from commit 2c1040b4c2885efaa86576fd88eb36bb550b5996)
+
+* The repo.saltproject.io `index.html` file changed it's contents. Fix tests.
+
+Signed-off-by: Pedro Algarvio
+(cherry picked from commit 672f6586d7c3cdb0e8c5ee42524895035aafcc23)
+
+* Skip hwclock test when executed inside a container
+
+* Skip git pillar tests when executed inside a container
+
+These tests require a Git repository container, which is hard to correctly set
+up when executing the tests in the container in GH Actions.
+
+Using --network host can help, but there was still an error (the git repos were
+empty) when I tried to set this up.
+
+* Skip test requiring systemd inside container
+
+* Exclude tests for hgfs if missing hglib
+
+* Skip and fix tests when running on containers
+
+* Fix some failing test causing problem in SUSE environments
+
+* Skip more tests when running on containers
+
+* Use skipif instead of skip_if as it seems not behaving equally
+
+* Skip more tests that cannot run in a container
+
+* Remove SSH test which doesn't make sense after lock mechanism
+
+* Fix failing boto tests
+
+* Skip unmaintained tests upstream around zcbuildout
+
+* Skip some tests that does not run well in GH Actions
+
+---------
+
+Co-authored-by: Pedro Algarvio
+Co-authored-by: Gareth J. Greenaway
+Co-authored-by: Alexander Graul
+---
+ .../integration/externalapi/test_venafiapi.py | 10 +-
+ tests/integration/modules/test_cmdmod.py | 634 ------------------
+ tests/integration/modules/test_cp.py | 24 +-
+ tests/integration/modules/test_timezone.py | 3 +
+ tests/integration/pillar/test_git_pillar.py | 3 +
+ tests/integration/ssh/test_state.py | 47 --
+ tests/pytests/functional/cache/test_consul.py | 4 +
+ tests/pytests/functional/cache/test_mysql.py | 4 +
+ .../functional/fileserver/hgfs/test_hgfs.py | 2 +
+ .../pytests/functional/modules/test_cmdmod.py | 561 ++++++++++++++++
+ .../functional/modules/test_dockermod.py | 4 +
+ .../pytests/functional/modules/test_swarm.py | 5 +
+ .../pytests/functional/modules/test_system.py | 3 +
+ .../pillar/hg_pillar/test_hg_pillar.py | 1 +
+ .../states/rabbitmq/test_cluster.py | 4 +
+ .../functional/states/rabbitmq/test_plugin.py | 4 +
+ .../functional/states/rabbitmq/test_policy.py | 4 +
+ .../states/rabbitmq/test_upstream.py | 4 +
+ .../functional/states/rabbitmq/test_user.py | 4 +
+ .../functional/states/rabbitmq/test_vhost.py | 4 +
+ .../functional/states/test_docker_network.py | 5 +
+ tests/pytests/functional/states/test_pkg.py | 6 +-
+ .../integration/cli/test_syndic_eauth.py | 3 +
+ .../integration/daemons/test_memory_leak.py | 4 +
+ .../integration/modules/test_cmdmod.py | 93 +++
+ .../pytests/integration/modules/test_virt.py | 4 +
+ tests/pytests/integration/ssh/test_log.py | 3 +
+ tests/pytests/integration/ssh/test_master.py | 5 +
+ .../integration/ssh/test_py_versions.py | 3 +
+ .../pytests/integration/ssh/test_ssh_setup.py | 2 +
+ .../scenarios/compat/test_with_versions.py | 4 +
+ .../multimaster/test_failover_master.py | 3 +
+ tests/pytests/scenarios/setup/test_install.py | 6 +
+ tests/pytests/unit/modules/test_aptpkg.py | 12 +-
+ .../pytests/unit/modules/test_linux_sysctl.py | 8 +-
+ tests/pytests/unit/modules/test_win_ip.py | 4 +-
+ tests/pytests/unit/test_master.py | 2 +-
+ tests/pytests/unit/test_minion.py | 4 +-
+ tests/pytests/unit/utils/event/test_event.py | 24 +-
+ tests/unit/modules/test_boto_apigateway.py | 1 +
+ .../unit/modules/test_boto_cognitoidentity.py | 1 +
+ .../modules/test_boto_elasticsearch_domain.py | 1 +
+ tests/unit/modules/test_boto_lambda.py | 1 +
+ tests/unit/modules/test_network.py | 6 +-
+ tests/unit/modules/test_nilrt_ip.py | 4 +-
+ tests/unit/modules/test_zcbuildout.py | 2 +
+ .../unit/netapi/rest_tornado/test_saltnado.py | 22 +-
+ tests/unit/states/test_boto_apigateway.py | 1 +
+ .../unit/states/test_boto_cognitoidentity.py | 1 +
+ tests/unit/states/test_zcbuildout.py | 1 +
+ 50 files changed, 824 insertions(+), 741 deletions(-)
+ delete mode 100644 tests/integration/modules/test_cmdmod.py
+ create mode 100644 tests/pytests/functional/modules/test_cmdmod.py
+
+diff --git a/tests/integration/externalapi/test_venafiapi.py b/tests/integration/externalapi/test_venafiapi.py
+index ad08605430f..3ae1e3392d8 100644
+--- a/tests/integration/externalapi/test_venafiapi.py
++++ b/tests/integration/externalapi/test_venafiapi.py
+@@ -13,6 +13,14 @@ from cryptography.hazmat.backends import default_backend
+ from cryptography.hazmat.primitives import serialization
+ from cryptography.x509.oid import NameOID
+
++try:
++ import vcert
++ from vcert.common import CertificateRequest
++
++ HAS_VCERT = True
++except ImportError:
++ HAS_VCERT = False
++
+ from tests.support.case import ShellCase
+
+
+@@ -36,6 +44,7 @@ def with_random_name(func):
+ return wrapper
+
+
++@pytest.mark.skipif(HAS_VCERT is False, reason="The vcert module must be installed.")
+ class VenafiTest(ShellCase):
+ """
+ Test the venafi runner
+@@ -86,7 +95,6 @@ class VenafiTest(ShellCase):
+ @with_random_name
+ @pytest.mark.slow_test
+ def test_sign(self, name):
+-
+ csr_pem = """-----BEGIN CERTIFICATE REQUEST-----
+ MIIFbDCCA1QCAQAwgbQxCzAJBgNVBAYTAlVTMQ0wCwYDVQQIDARVdGFoMRIwEAYD
+ VQQHDAlTYWx0IExha2UxFDASBgNVBAoMC1ZlbmFmaSBJbmMuMRQwEgYDVQQLDAtJ
+diff --git a/tests/integration/modules/test_cmdmod.py b/tests/integration/modules/test_cmdmod.py
+deleted file mode 100644
+index 800111174f0..00000000000
+--- a/tests/integration/modules/test_cmdmod.py
++++ /dev/null
+@@ -1,634 +0,0 @@
+-import os
+-import random
+-import sys
+-import tempfile
+-from contextlib import contextmanager
+-
+-import pytest
+-
+-import salt.utils.path
+-import salt.utils.platform
+-import salt.utils.user
+-from tests.support.case import ModuleCase
+-from tests.support.helpers import SKIP_INITIAL_PHOTONOS_FAILURES, dedent
+-from tests.support.runtests import RUNTIME_VARS
+-
+-AVAILABLE_PYTHON_EXECUTABLE = salt.utils.path.which_bin(
+- ["python", "python2", "python2.6", "python2.7"]
+-)
+-
+-
+-@pytest.mark.windows_whitelisted
+-class CMDModuleTest(ModuleCase):
+- """
+- Validate the cmd module
+- """
+-
+- def setUp(self):
+- self.runas_usr = "nobody"
+- if salt.utils.platform.is_darwin():
+- self.runas_usr = "macsalttest"
+-
+- @contextmanager
+- def _ensure_user_exists(self, name):
+- if name in self.run_function("user.info", [name]).values():
+- # User already exists; don't touch
+- yield
+- else:
+- # Need to create user for test
+- self.run_function("user.add", [name])
+- try:
+- yield
+- finally:
+- self.run_function("user.delete", [name], remove=True)
+-
+- @pytest.mark.slow_test
+- @pytest.mark.skip_on_windows
+- def test_run(self):
+- """
+- cmd.run
+- """
+- shell = os.environ.get("SHELL")
+- if shell is None:
+- # Failed to get the SHELL var, don't run
+- self.skipTest("Unable to get the SHELL environment variable")
+-
+- self.assertTrue(self.run_function("cmd.run", ["echo $SHELL"]))
+- self.assertEqual(
+- self.run_function(
+- "cmd.run", ["echo $SHELL", "shell={}".format(shell)], python_shell=True
+- ).rstrip(),
+- shell,
+- )
+- self.assertEqual(
+- self.run_function("cmd.run", ["ls / | grep etc"], python_shell=True), "etc"
+- )
+- self.assertEqual(
+- self.run_function(
+- "cmd.run",
+- ['echo {{grains.id}} | awk "{print $1}"'],
+- template="jinja",
+- python_shell=True,
+- ),
+- "minion",
+- )
+- self.assertEqual(
+- self.run_function(
+- "cmd.run", ["grep f"], stdin="one\ntwo\nthree\nfour\nfive\n"
+- ),
+- "four\nfive",
+- )
+- self.assertEqual(
+- self.run_function(
+- "cmd.run", ['echo "a=b" | sed -e s/=/:/g'], python_shell=True
+- ),
+- "a:b",
+- )
+-
+- @pytest.mark.slow_test
+- def test_stdout(self):
+- """
+- cmd.run_stdout
+- """
+- self.assertEqual(
+- self.run_function("cmd.run_stdout", ['echo "cheese"']).rstrip(),
+- "cheese" if not salt.utils.platform.is_windows() else '"cheese"',
+- )
+-
+- @pytest.mark.slow_test
+- def test_stderr(self):
+- """
+- cmd.run_stderr
+- """
+- if sys.platform.startswith(("freebsd", "openbsd")):
+- shell = "/bin/sh"
+- else:
+- shell = "/bin/bash"
+-
+- self.assertEqual(
+- self.run_function(
+- "cmd.run_stderr",
+- ['echo "cheese" 1>&2', "shell={}".format(shell)],
+- python_shell=True,
+- ).rstrip(),
+- "cheese" if not salt.utils.platform.is_windows() else '"cheese"',
+- )
+-
+- @pytest.mark.slow_test
+- def test_run_all(self):
+- """
+- cmd.run_all
+- """
+- if sys.platform.startswith(("freebsd", "openbsd")):
+- shell = "/bin/sh"
+- else:
+- shell = "/bin/bash"
+-
+- ret = self.run_function(
+- "cmd.run_all",
+- ['echo "cheese" 1>&2', "shell={}".format(shell)],
+- python_shell=True,
+- )
+- self.assertTrue("pid" in ret)
+- self.assertTrue("retcode" in ret)
+- self.assertTrue("stdout" in ret)
+- self.assertTrue("stderr" in ret)
+- self.assertTrue(isinstance(ret.get("pid"), int))
+- self.assertTrue(isinstance(ret.get("retcode"), int))
+- self.assertTrue(isinstance(ret.get("stdout"), str))
+- self.assertTrue(isinstance(ret.get("stderr"), str))
+- self.assertEqual(
+- ret.get("stderr").rstrip(),
+- "cheese" if not salt.utils.platform.is_windows() else '"cheese"',
+- )
+-
+- @pytest.mark.slow_test
+- def test_retcode(self):
+- """
+- cmd.retcode
+- """
+- self.assertEqual(
+- self.run_function("cmd.retcode", ["exit 0"], python_shell=True), 0
+- )
+- self.assertEqual(
+- self.run_function("cmd.retcode", ["exit 1"], python_shell=True), 1
+- )
+-
+- @pytest.mark.slow_test
+- def test_run_all_with_success_retcodes(self):
+- """
+- cmd.run with success_retcodes
+- """
+- ret = self.run_function(
+- "cmd.run_all", ["exit 42"], success_retcodes=[42], python_shell=True
+- )
+-
+- self.assertTrue("retcode" in ret)
+- self.assertEqual(ret.get("retcode"), 0)
+-
+- @pytest.mark.slow_test
+- def test_retcode_with_success_retcodes(self):
+- """
+- cmd.run with success_retcodes
+- """
+- ret = self.run_function(
+- "cmd.retcode", ["exit 42"], success_retcodes=[42], python_shell=True
+- )
+-
+- self.assertEqual(ret, 0)
+-
+- @pytest.mark.slow_test
+- def test_run_all_with_success_stderr(self):
+- """
+- cmd.run with success_retcodes
+- """
+- random_file = "{}{}{}".format(
+- RUNTIME_VARS.TMP_ROOT_DIR, os.path.sep, random.random()
+- )
+-
+- if salt.utils.platform.is_windows():
+- func = "type"
+- expected_stderr = "cannot find the file specified"
+- else:
+- func = "cat"
+- expected_stderr = "No such file or directory"
+- ret = self.run_function(
+- "cmd.run_all",
+- ["{} {}".format(func, random_file)],
+- success_stderr=[expected_stderr],
+- python_shell=True,
+- )
+-
+- self.assertTrue("retcode" in ret)
+- self.assertEqual(ret.get("retcode"), 0)
+-
+- @pytest.mark.slow_test
+- def test_blacklist_glob(self):
+- """
+- cmd_blacklist_glob
+- """
+- self.assertEqual(
+- self.run_function("cmd.run", ["bad_command --foo"]).rstrip(),
+- 'ERROR: The shell command "bad_command --foo" is not permitted',
+- )
+-
+- @pytest.mark.slow_test
+- def test_script(self):
+- """
+- cmd.script
+- """
+- args = "saltines crackers biscuits=yes"
+- script = "salt://script.py"
+- ret = self.run_function("cmd.script", [script, args], saltenv="base")
+- self.assertEqual(ret["stdout"], args)
+-
+- @pytest.mark.slow_test
+- def test_script_query_string(self):
+- """
+- cmd.script
+- """
+- args = "saltines crackers biscuits=yes"
+- script = "salt://script.py?saltenv=base"
+- ret = self.run_function("cmd.script", [script, args], saltenv="base")
+- self.assertEqual(ret["stdout"], args)
+-
+- @pytest.mark.slow_test
+- def test_script_retcode(self):
+- """
+- cmd.script_retcode
+- """
+- script = "salt://script.py"
+- ret = self.run_function("cmd.script_retcode", [script], saltenv="base")
+- self.assertEqual(ret, 0)
+-
+- @pytest.mark.slow_test
+- def test_script_cwd(self):
+- """
+- cmd.script with cwd
+- """
+- tmp_cwd = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
+- args = "saltines crackers biscuits=yes"
+- script = "salt://script.py"
+- ret = self.run_function(
+- "cmd.script", [script, args], cwd=tmp_cwd, saltenv="base"
+- )
+- self.assertEqual(ret["stdout"], args)
+-
+- @pytest.mark.slow_test
+- def test_script_cwd_with_space(self):
+- """
+- cmd.script with cwd
+- """
+- tmp_cwd = "{}{}test 2".format(
+- tempfile.mkdtemp(dir=RUNTIME_VARS.TMP), os.path.sep
+- )
+- os.mkdir(tmp_cwd)
+-
+- args = "saltines crackers biscuits=yes"
+- script = "salt://script.py"
+- ret = self.run_function(
+- "cmd.script", [script, args], cwd=tmp_cwd, saltenv="base"
+- )
+- self.assertEqual(ret["stdout"], args)
+-
+- @pytest.mark.destructive_test
+- def test_tty(self):
+- """
+- cmd.tty
+- """
+- for tty in ("tty0", "pts3"):
+- if os.path.exists(os.path.join("/dev", tty)):
+- ret = self.run_function("cmd.tty", [tty, "apply salt liberally"])
+- self.assertTrue("Success" in ret)
+-
+- @pytest.mark.skip_on_windows
+- @pytest.mark.skip_if_binaries_missing("which")
+- def test_which(self):
+- """
+- cmd.which
+- """
+- cmd_which = self.run_function("cmd.which", ["cat"])
+- self.assertIsInstance(cmd_which, str)
+- cmd_run = self.run_function("cmd.run", ["which cat"])
+- self.assertIsInstance(cmd_run, str)
+- self.assertEqual(cmd_which.rstrip(), cmd_run.rstrip())
+-
+- @pytest.mark.skip_on_windows
+- @pytest.mark.skip_if_binaries_missing("which")
+- def test_which_bin(self):
+- """
+- cmd.which_bin
+- """
+- cmds = ["pip3", "pip2", "pip", "pip-python"]
+- ret = self.run_function("cmd.which_bin", [cmds])
+- self.assertTrue(os.path.split(ret)[1] in cmds)
+-
+- @pytest.mark.slow_test
+- def test_has_exec(self):
+- """
+- cmd.has_exec
+- """
+- self.assertTrue(
+- self.run_function("cmd.has_exec", [AVAILABLE_PYTHON_EXECUTABLE])
+- )
+- self.assertFalse(
+- self.run_function("cmd.has_exec", ["alllfsdfnwieulrrh9123857ygf"])
+- )
+-
+- @pytest.mark.slow_test
+- def test_exec_code(self):
+- """
+- cmd.exec_code
+- """
+- code = dedent(
+- """
+- import sys
+- sys.stdout.write('cheese')
+- """
+- )
+- self.assertEqual(
+- self.run_function(
+- "cmd.exec_code", [AVAILABLE_PYTHON_EXECUTABLE, code]
+- ).rstrip(),
+- "cheese",
+- )
+-
+- @pytest.mark.slow_test
+- def test_exec_code_with_single_arg(self):
+- """
+- cmd.exec_code
+- """
+- code = dedent(
+- """
+- import sys
+- sys.stdout.write(sys.argv[1])
+- """
+- )
+- arg = "cheese"
+- self.assertEqual(
+- self.run_function(
+- "cmd.exec_code", [AVAILABLE_PYTHON_EXECUTABLE, code], args=arg
+- ).rstrip(),
+- arg,
+- )
+-
+- @pytest.mark.slow_test
+- def test_exec_code_with_multiple_args(self):
+- """
+- cmd.exec_code
+- """
+- code = dedent(
+- """
+- import sys
+- sys.stdout.write(sys.argv[1])
+- """
+- )
+- arg = "cheese"
+- self.assertEqual(
+- self.run_function(
+- "cmd.exec_code", [AVAILABLE_PYTHON_EXECUTABLE, code], args=[arg, "test"]
+- ).rstrip(),
+- arg,
+- )
+-
+- @pytest.mark.slow_test
+- def test_quotes(self):
+- """
+- cmd.run with quoted command
+- """
+- cmd = """echo 'SELECT * FROM foo WHERE bar="baz"' """
+- expected_result = 'SELECT * FROM foo WHERE bar="baz"'
+- if salt.utils.platform.is_windows():
+- expected_result = "'SELECT * FROM foo WHERE bar=\"baz\"'"
+- result = self.run_function("cmd.run_stdout", [cmd]).strip()
+- self.assertEqual(result, expected_result)
+-
+- @pytest.mark.skip_if_not_root
+- @pytest.mark.skip_on_windows(reason="Skip on Windows, requires password")
+- def test_quotes_runas(self):
+- """
+- cmd.run with quoted command
+- """
+- cmd = """echo 'SELECT * FROM foo WHERE bar="baz"' """
+- expected_result = 'SELECT * FROM foo WHERE bar="baz"'
+- result = self.run_function(
+- "cmd.run_all", [cmd], runas=RUNTIME_VARS.RUNNING_TESTS_USER
+- )
+- errmsg = "The command returned: {}".format(result)
+- self.assertEqual(result["retcode"], 0, errmsg)
+- self.assertEqual(result["stdout"], expected_result, errmsg)
+-
+- @pytest.mark.destructive_test
+- @pytest.mark.skip_if_not_root
+- @pytest.mark.skip_on_windows(reason="Skip on Windows, uses unix commands")
+- @pytest.mark.slow_test
+- def test_avoid_injecting_shell_code_as_root(self):
+- """
+- cmd.run should execute the whole command as the "runas" user, not
+- running substitutions as root.
+- """
+- cmd = "echo $(id -u)"
+-
+- root_id = self.run_function("cmd.run_stdout", [cmd])
+- runas_root_id = self.run_function(
+- "cmd.run_stdout", [cmd], runas=RUNTIME_VARS.RUNNING_TESTS_USER
+- )
+- with self._ensure_user_exists(self.runas_usr):
+- user_id = self.run_function("cmd.run_stdout", [cmd], runas=self.runas_usr)
+-
+- self.assertNotEqual(user_id, root_id)
+- self.assertNotEqual(user_id, runas_root_id)
+- self.assertEqual(root_id, runas_root_id)
+-
+- @pytest.mark.destructive_test
+- @pytest.mark.skip_if_not_root
+- @pytest.mark.skip_on_windows(reason="Skip on Windows, uses unix commands")
+- @pytest.mark.slow_test
+- def test_cwd_runas(self):
+- """
+- cmd.run should be able to change working directory correctly, whether
+- or not runas is in use.
+- """
+- cmd = "pwd"
+- tmp_cwd = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
+- os.chmod(tmp_cwd, 0o711)
+-
+- cwd_normal = self.run_function("cmd.run_stdout", [cmd], cwd=tmp_cwd).rstrip(
+- "\n"
+- )
+- self.assertEqual(tmp_cwd, cwd_normal)
+-
+- with self._ensure_user_exists(self.runas_usr):
+- cwd_runas = self.run_function(
+- "cmd.run_stdout", [cmd], cwd=tmp_cwd, runas=self.runas_usr
+- ).rstrip("\n")
+- self.assertEqual(tmp_cwd, cwd_runas)
+-
+- @pytest.mark.destructive_test
+- @pytest.mark.skip_if_not_root
+- @pytest.mark.skip_unless_on_darwin(reason="Applicable to MacOS only")
+- @pytest.mark.slow_test
+- def test_runas_env(self):
+- """
+- cmd.run should be able to change working directory correctly, whether
+- or not runas is in use.
+- """
+- with self._ensure_user_exists(self.runas_usr):
+- user_path = self.run_function(
+- "cmd.run_stdout", ['printf %s "$PATH"'], runas=self.runas_usr
+- )
+- # XXX: Not sure of a better way. Environment starts out with
+- # /bin:/usr/bin and should be populated by path helper and the bash
+- # profile.
+- self.assertNotEqual("/bin:/usr/bin", user_path)
+-
+- @pytest.mark.destructive_test
+- @pytest.mark.skip_if_not_root
+- @pytest.mark.skip_unless_on_darwin(reason="Applicable to MacOS only")
+- @pytest.mark.slow_test
+- def test_runas_complex_command_bad_cwd(self):
+- """
+- cmd.run should not accidentally run parts of a complex command when
+- given a cwd which cannot be used by the user the command is run as.
+-
+- Due to the need to use `su -l` to login to another user on MacOS, we
+- cannot cd into directories that the target user themselves does not
+- have execute permission for. To an extent, this test is testing that
+- buggy behaviour, but its purpose is to ensure that the greater bug of
+- running commands after failing to cd does not occur.
+- """
+- tmp_cwd = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
+- os.chmod(tmp_cwd, 0o700)
+-
+- with self._ensure_user_exists(self.runas_usr):
+- cmd_result = self.run_function(
+- "cmd.run_all",
+- ['pwd; pwd; : $(echo "You have failed the test" >&2)'],
+- cwd=tmp_cwd,
+- runas=self.runas_usr,
+- )
+-
+- self.assertEqual("", cmd_result["stdout"])
+- self.assertNotIn("You have failed the test", cmd_result["stderr"])
+- self.assertNotEqual(0, cmd_result["retcode"])
+-
+- @SKIP_INITIAL_PHOTONOS_FAILURES
+- @pytest.mark.skip_on_windows
+- @pytest.mark.skip_if_not_root
+- @pytest.mark.destructive_test
+- @pytest.mark.slow_test
+- def test_runas(self):
+- """
+- Ensure that the env is the runas user's
+- """
+- with self._ensure_user_exists(self.runas_usr):
+- out = self.run_function(
+- "cmd.run", ["env"], runas=self.runas_usr
+- ).splitlines()
+- self.assertIn("USER={}".format(self.runas_usr), out)
+-
+- @pytest.mark.skip_if_binaries_missing("sleep", reason="sleep cmd not installed")
+- def test_timeout(self):
+- """
+- cmd.run trigger timeout
+- """
+- out = self.run_function(
+- "cmd.run", ["sleep 2 && echo hello"], f_timeout=1, python_shell=True
+- )
+- self.assertTrue("Timed out" in out)
+-
+- @pytest.mark.skip_if_binaries_missing("sleep", reason="sleep cmd not installed")
+- def test_timeout_success(self):
+- """
+- cmd.run sufficient timeout to succeed
+- """
+- out = self.run_function(
+- "cmd.run", ["sleep 1 && echo hello"], f_timeout=2, python_shell=True
+- )
+- self.assertEqual(out, "hello")
+-
+- @pytest.mark.slow_test
+- def test_hide_output(self):
+- """
+- Test the hide_output argument
+- """
+- ls_command = (
+- ["ls", "/"] if not salt.utils.platform.is_windows() else ["dir", "c:\\"]
+- )
+-
+- error_command = ["thiscommanddoesnotexist"]
+-
+- # cmd.run
+- out = self.run_function("cmd.run", ls_command, hide_output=True)
+- self.assertEqual(out, "")
+-
+- # cmd.shell
+- out = self.run_function("cmd.shell", ls_command, hide_output=True)
+- self.assertEqual(out, "")
+-
+- # cmd.run_stdout
+- out = self.run_function("cmd.run_stdout", ls_command, hide_output=True)
+- self.assertEqual(out, "")
+-
+- # cmd.run_stderr
+- out = self.run_function("cmd.shell", error_command, hide_output=True)
+- self.assertEqual(out, "")
+-
+- # cmd.run_all (command should have produced stdout)
+- out = self.run_function("cmd.run_all", ls_command, hide_output=True)
+- self.assertEqual(out["stdout"], "")
+- self.assertEqual(out["stderr"], "")
+-
+- # cmd.run_all (command should have produced stderr)
+- out = self.run_function("cmd.run_all", error_command, hide_output=True)
+- self.assertEqual(out["stdout"], "")
+- self.assertEqual(out["stderr"], "")
+-
+- @pytest.mark.slow_test
+- def test_cmd_run_whoami(self):
+- """
+- test return of whoami
+- """
+- if not salt.utils.platform.is_windows():
+- user = RUNTIME_VARS.RUNTIME_CONFIGS["master"]["user"]
+- else:
+- user = salt.utils.user.get_specific_user()
+- if user.startswith("sudo_"):
+- user = user.replace("sudo_", "")
+- cmd = self.run_function("cmd.run", ["whoami"])
+- try:
+- self.assertEqual(user.lower(), cmd.lower())
+- except AssertionError as exc:
+- if not salt.utils.platform.is_windows():
+- raise exc from None
+- if "\\" in user:
+- user = user.split("\\")[-1]
+- self.assertEqual(user.lower(), cmd.lower())
+-
+- @pytest.mark.skip_unless_on_windows(reason="Minion is not Windows")
+- @pytest.mark.slow_test
+- def test_windows_env_handling(self):
+- """
+- Ensure that nt.environ is used properly with cmd.run*
+- """
+- out = self.run_function(
+- "cmd.run", ["set"], env={"abc": "123", "ABC": "456"}
+- ).splitlines()
+- self.assertIn("abc=123", out)
+- self.assertIn("ABC=456", out)
+-
+- @pytest.mark.slow_test
+- @pytest.mark.skip_unless_on_windows(reason="Minion is not Windows")
+- def test_windows_powershell_script_args(self):
+- """
+- Ensure that powershell processes inline script in args
+- """
+- val = "i like cheese"
+- args = (
+- '-SecureString (ConvertTo-SecureString -String "{}" -AsPlainText -Force)'
+- " -ErrorAction Stop".format(val)
+- )
+- script = "salt://issue-56195/test.ps1"
+- ret = self.run_function(
+- "cmd.script", [script], args=args, shell="powershell", saltenv="base"
+- )
+- self.assertEqual(ret["stdout"], val)
+-
+- @pytest.mark.slow_test
+- @pytest.mark.skip_unless_on_windows(reason="Minion is not Windows")
+- @pytest.mark.skip_if_binaries_missing("pwsh")
+- def test_windows_powershell_script_args_pwsh(self):
+- """
+- Ensure that powershell processes inline script in args with powershell
+- core
+- """
+- val = "i like cheese"
+- args = (
+- '-SecureString (ConvertTo-SecureString -String "{}" -AsPlainText -Force)'
+- " -ErrorAction Stop".format(val)
+- )
+- script = "salt://issue-56195/test.ps1"
+- ret = self.run_function(
+- "cmd.script", [script], args=args, shell="pwsh", saltenv="base"
+- )
+- self.assertEqual(ret["stdout"], val)
+diff --git a/tests/integration/modules/test_cp.py b/tests/integration/modules/test_cp.py
+index ad7538b4ba8..cd3e4c2f5ad 100644
+--- a/tests/integration/modules/test_cp.py
++++ b/tests/integration/modules/test_cp.py
+@@ -234,9 +234,9 @@ class CPModuleTest(ModuleCase):
+ self.run_function("cp.get_url", ["https://repo.saltproject.io/index.html", tgt])
+ with salt.utils.files.fopen(tgt, "r") as instructions:
+ data = salt.utils.stringutils.to_unicode(instructions.read())
+- self.assertIn("Bootstrap", data)
+- self.assertIn("Debian", data)
+- self.assertIn("Windows", data)
++ self.assertIn("Salt Project", data)
++ self.assertIn("Package", data)
++ self.assertIn("Repo", data)
+ self.assertNotIn("AYBABTU", data)
+
+ @pytest.mark.slow_test
+@@ -250,9 +250,9 @@ class CPModuleTest(ModuleCase):
+
+ with salt.utils.files.fopen(ret, "r") as instructions:
+ data = salt.utils.stringutils.to_unicode(instructions.read())
+- self.assertIn("Bootstrap", data)
+- self.assertIn("Debian", data)
+- self.assertIn("Windows", data)
++ self.assertIn("Salt Project", data)
++ self.assertIn("Package", data)
++ self.assertIn("Repo", data)
+ self.assertNotIn("AYBABTU", data)
+
+ @pytest.mark.slow_test
+@@ -273,9 +273,9 @@ class CPModuleTest(ModuleCase):
+ time.sleep(sleep)
+ if ret.find("HTTP 599") != -1:
+ raise Exception("https://repo.saltproject.io/index.html returned 599 error")
+- self.assertIn("Bootstrap", ret)
+- self.assertIn("Debian", ret)
+- self.assertIn("Windows", ret)
++ self.assertIn("Salt Project", ret)
++ self.assertIn("Package", ret)
++ self.assertIn("Repo", ret)
+ self.assertNotIn("AYBABTU", ret)
+
+ @pytest.mark.slow_test
+@@ -346,9 +346,9 @@ class CPModuleTest(ModuleCase):
+ """
+ src = "https://repo.saltproject.io/index.html"
+ ret = self.run_function("cp.get_file_str", [src])
+- self.assertIn("Bootstrap", ret)
+- self.assertIn("Debian", ret)
+- self.assertIn("Windows", ret)
++ self.assertIn("Salt Project", ret)
++ self.assertIn("Package", ret)
++ self.assertIn("Repo", ret)
+ self.assertNotIn("AYBABTU", ret)
+
+ @pytest.mark.slow_test
+diff --git a/tests/integration/modules/test_timezone.py b/tests/integration/modules/test_timezone.py
+index 8d7180cbd13..c1dc8a7b73d 100644
+--- a/tests/integration/modules/test_timezone.py
++++ b/tests/integration/modules/test_timezone.py
+@@ -4,6 +4,7 @@ Integration tests for timezone module
+ Linux and Solaris are supported
+ """
+ import pytest
++import os
+
+ from tests.support.case import ModuleCase
+
+@@ -15,6 +16,8 @@ except ImportError:
+ HAS_TZLOCAL = False
+
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++@pytest.mark.skipif(INSIDE_CONTAINER, reason="No hwclock in a container")
+ class TimezoneLinuxModuleTest(ModuleCase):
+ def setUp(self):
+ """
+diff --git a/tests/integration/pillar/test_git_pillar.py b/tests/integration/pillar/test_git_pillar.py
+index 68c14daaa15..5b4cbda95c9 100644
+--- a/tests/integration/pillar/test_git_pillar.py
++++ b/tests/integration/pillar/test_git_pillar.py
+@@ -63,6 +63,7 @@ https://github.com/git/git/commit/6bc0cb5
+ https://github.com/unbit/uwsgi/commit/ac1e354
+ """
+
++import os
+ import random
+ import string
+ import sys
+@@ -100,9 +101,11 @@ try:
+ except Exception: # pylint: disable=broad-except
+ HAS_PYGIT2 = False
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+ pytestmark = [
+ SKIP_INITIAL_PHOTONOS_FAILURES,
+ pytest.mark.skip_on_platforms(windows=True, darwin=True),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Communication problems between containers."),
+ ]
+
+
+diff --git a/tests/integration/ssh/test_state.py b/tests/integration/ssh/test_state.py
+index a9fd3e7f2d3..69245454e85 100644
+--- a/tests/integration/ssh/test_state.py
++++ b/tests/integration/ssh/test_state.py
+@@ -283,53 +283,6 @@ class SSHStateTest(SSHCase):
+ check_file = self.run_function("file.file_exists", [SSH_SLS_FILE], wipe=False)
+ self.assertTrue(check_file)
+
+- @pytest.mark.slow_test
+- def test_state_running(self):
+- """
+- test state.running with salt-ssh
+- """
+-
+- retval = []
+-
+- def _run_in_background():
+- retval.append(self.run_function("state.sls", ["running"], wipe=False))
+-
+- bg_thread = threading.Thread(target=_run_in_background)
+- bg_thread.start()
+-
+- expected = 'The function "state.pkg" is running as'
+- state_ret = []
+- for _ in range(30):
+- if not bg_thread.is_alive():
+- continue
+- get_sls = self.run_function("state.running", wipe=False)
+- state_ret.append(get_sls)
+- if expected in " ".join(get_sls):
+- # We found the expected return
+- break
+- time.sleep(1)
+- else:
+- if not bg_thread.is_alive():
+- bg_failed_msg = "Failed to return clean data"
+- if retval and bg_failed_msg in retval.pop().get("_error", ""):
+- pytest.skip("Background state run failed, skipping")
+- self.fail(
+- "Did not find '{}' in state.running return: {}".format(
+- expected, state_ret
+- )
+- )
+-
+- # make sure we wait until the earlier state is complete
+- future = time.time() + 120
+- while True:
+- if expected not in " ".join(self.run_function("state.running", wipe=False)):
+- break
+- if time.time() > future:
+- self.fail(
+- "state.pkg is still running overtime. Test did not clean up"
+- " correctly."
+- )
+-
+ def tearDown(self):
+ """
+ make sure to clean up any old ssh directories
+diff --git a/tests/pytests/functional/cache/test_consul.py b/tests/pytests/functional/cache/test_consul.py
+index 3a38e495a93..c6e16d2588e 100644
+--- a/tests/pytests/functional/cache/test_consul.py
++++ b/tests/pytests/functional/cache/test_consul.py
+@@ -1,4 +1,5 @@
+ import logging
++import os
+ import socket
+ import time
+
+@@ -13,9 +14,12 @@ docker = pytest.importorskip("docker")
+
+ log = logging.getLogger(__name__)
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("dockerd"),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/functional/cache/test_mysql.py b/tests/pytests/functional/cache/test_mysql.py
+index c283872c08c..e15fc732a4a 100644
+--- a/tests/pytests/functional/cache/test_mysql.py
++++ b/tests/pytests/functional/cache/test_mysql.py
+@@ -1,4 +1,5 @@
+ import logging
++import os
+
+ import pytest
+
+@@ -11,9 +12,12 @@ docker = pytest.importorskip("docker")
+
+ log = logging.getLogger(__name__)
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("dockerd"),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/functional/fileserver/hgfs/test_hgfs.py b/tests/pytests/functional/fileserver/hgfs/test_hgfs.py
+index 571fe75e403..bfd927fd0fe 100644
+--- a/tests/pytests/functional/fileserver/hgfs/test_hgfs.py
++++ b/tests/pytests/functional/fileserver/hgfs/test_hgfs.py
+@@ -16,6 +16,8 @@ try:
+ except ImportError:
+ HAS_HG = False
+
++pytestmark = [pytest.mark.skipif(not HAS_HG, reason="missing hglib library")]
++
+
+ @pytest.fixture(scope="module")
+ def configure_loader_modules(master_opts):
+diff --git a/tests/pytests/functional/modules/test_cmdmod.py b/tests/pytests/functional/modules/test_cmdmod.py
+new file mode 100644
+index 00000000000..d30b474c6d2
+--- /dev/null
++++ b/tests/pytests/functional/modules/test_cmdmod.py
+@@ -0,0 +1,561 @@
++import os
++import random
++import sys
++from contextlib import contextmanager
++
++import pytest
++
++import salt.config
++import salt.utils.path
++import salt.utils.platform
++import salt.utils.user
++from tests.support.helpers import SKIP_INITIAL_PHOTONOS_FAILURES, dedent
++
++pytestmark = [pytest.mark.windows_whitelisted]
++
++
++@pytest.fixture(scope="module")
++def cmdmod(modules):
++ return modules.cmd
++
++
++@pytest.fixture(scope="module")
++def usermod(modules):
++ return modules.user
++
++
++@pytest.fixture(scope="module")
++def available_python_executable():
++ yield salt.utils.path.which_bin(["python", "python3"])
++
++
++@pytest.fixture
++def runas_usr():
++ runas_usr = "nobody"
++ if salt.utils.platform.is_darwin():
++ runas_usr = "macsalttest"
++ yield runas_usr
++
++
++@pytest.fixture
++def running_username():
++ """
++ Return the username that is running the code.
++ """
++ return salt.utils.user.get_user()
++
++
++@pytest.fixture
++def script_contents(state_tree):
++ _contents = """
++ #!/usr/bin/env python3
++ import sys
++ print(" ".join(sys.argv[1:]))
++ """
++
++ with pytest.helpers.temp_file("script.py", _contents, state_tree):
++ yield
++
++
++@pytest.fixture
++def issue_56195_test_ps1(state_tree):
++ _contents = """
++ [CmdLetBinding()]
++ Param(
++ [SecureString] $SecureString
++ )
++ $Credential = New-Object System.Net.NetworkCredential("DummyId", $SecureString)
++ $Credential.Password
++ """
++
++ with pytest.helpers.temp_file("issue_56195_test.ps1", _contents, state_tree):
++ yield
++
++
++@contextmanager
++def _ensure_user_exists(name, usermod):
++ if name in usermod.info(name).values():
++ # User already exists; don't touch
++ yield
++ else:
++ # Need to create user for test
++ usermod.add(name)
++ try:
++ yield
++ finally:
++ usermod.delete(name, remove=True)
++
++
++@pytest.mark.slow_test
++def test_run(cmdmod):
++ """
++ cmd.run
++ """
++ shell = os.environ.get("SHELL")
++ if shell is None:
++ # Failed to get the SHELL var, don't run
++ pytest.skip("Unable to get the SHELL environment variable")
++
++ assert cmdmod.run("echo $SHELL")
++ assert cmdmod.run("echo $SHELL", shell=shell, python_shell=True).rstrip() == shell
++ assert cmdmod.run("ls / | grep etc", python_shell=True) == "etc"
++ assert (
++ cmdmod.run(
++ 'echo {{grains.id}} | awk "{print $1}"',
++ template="jinja",
++ python_shell=True,
++ )
++ == "func-tests-minion"
++ )
++ assert cmdmod.run("grep f", stdin="one\ntwo\nthree\nfour\nfive\n") == "four\nfive"
++ assert cmdmod.run('echo "a=b" | sed -e s/=/:/g', python_shell=True) == "a:b"
++
++
++@pytest.mark.slow_test
++def test_stdout(cmdmod):
++ """
++ cmd.run_stdout
++ """
++ assert (
++ cmdmod.run_stdout('echo "cheese"').rstrip() == "cheese"
++ if not salt.utils.platform.is_windows()
++ else '"cheese"'
++ )
++
++
++@pytest.mark.slow_test
++def test_stderr(cmdmod):
++ """
++ cmd.run_stderr
++ """
++ if sys.platform.startswith(("freebsd", "openbsd")):
++ shell = "/bin/sh"
++ else:
++ shell = "/bin/bash"
++
++ assert (
++ cmdmod.run_stderr(
++ 'echo "cheese" 1>&2',
++ shell=shell,
++ python_shell=True,
++ ).rstrip()
++ == "cheese"
++ if not salt.utils.platform.is_windows()
++ else '"cheese"'
++ )
++
++
++@pytest.mark.slow_test
++def test_run_all(cmdmod):
++ """
++ cmd.run_all
++ """
++ if sys.platform.startswith(("freebsd", "openbsd")):
++ shell = "/bin/sh"
++ else:
++ shell = "/bin/bash"
++
++ ret = cmdmod.run_all(
++ 'echo "cheese" 1>&2',
++ shell=shell,
++ python_shell=True,
++ )
++ assert "pid" in ret
++ assert "retcode" in ret
++ assert "stdout" in ret
++ assert "stderr" in ret
++ assert isinstance(ret.get("pid"), int)
++ assert isinstance(ret.get("retcode"), int)
++ assert isinstance(ret.get("stdout"), str)
++ assert isinstance(ret.get("stderr"), str)
++ assert (
++ ret.get("stderr").rstrip() == "cheese"
++ if not salt.utils.platform.is_windows()
++ else '"cheese"'
++ )
++
++
++@pytest.mark.slow_test
++def test_retcode(cmdmod):
++ """
++ cmd.retcode
++ """
++ assert cmdmod.retcode("exit 0", python_shell=True) == 0
++ assert cmdmod.retcode("exit 1", python_shell=True) == 1
++
++
++@pytest.mark.slow_test
++def test_run_all_with_success_retcodes(cmdmod):
++ """
++ cmd.run with success_retcodes
++ """
++ ret = cmdmod.run_all("exit 42", success_retcodes=[42], python_shell=True)
++
++ assert "retcode" in ret
++ assert ret.get("retcode") == 0
++
++
++@pytest.mark.slow_test
++def test_retcode_with_success_retcodes(cmdmod):
++ """
++ cmd.run with success_retcodes
++ """
++ ret = cmdmod.retcode("exit 42", success_retcodes=[42], python_shell=True)
++
++ assert ret == 0
++
++
++@pytest.mark.slow_test
++def test_run_all_with_success_stderr(cmdmod, tmp_path):
++ """
++ cmd.run with success_retcodes
++ """
++ random_file = str(tmp_path / f"{random.random()}")
++
++ if salt.utils.platform.is_windows():
++ func = "type"
++ expected_stderr = "cannot find the file specified"
++ else:
++ func = "cat"
++ expected_stderr = "No such file or directory"
++ ret = cmdmod.run_all(
++ f"{func} {random_file}",
++ success_stderr=[expected_stderr],
++ python_shell=True,
++ )
++
++ assert "retcode" in ret
++ assert ret.get("retcode") == 0
++
++
++@pytest.mark.slow_test
++def test_script(cmdmod, script_contents):
++ """
++ cmd.script
++ """
++ args = "saltines crackers biscuits=yes"
++ script = "salt://script.py"
++ ret = cmdmod.script(script, args, saltenv="base")
++ assert ret["stdout"] == args
++
++
++@pytest.mark.slow_test
++def test_script_query_string(cmdmod, script_contents):
++ """
++ cmd.script
++ """
++ args = "saltines crackers biscuits=yes"
++ script = "salt://script.py?saltenv=base"
++ ret = cmdmod.script(script, args, saltenv="base")
++ assert ret["stdout"] == args
++
++
++@pytest.mark.slow_test
++def test_script_retcode(cmdmod, script_contents):
++ """
++ cmd.script_retcode
++ """
++ script = "salt://script.py"
++ ret = cmdmod.script_retcode(script, saltenv="base")
++ assert ret == 0
++
++
++@pytest.mark.slow_test
++def test_script_cwd(cmdmod, script_contents, tmp_path):
++ """
++ cmd.script with cwd
++ """
++ tmp_cwd = str(tmp_path)
++ args = "saltines crackers biscuits=yes"
++ script = "salt://script.py"
++ ret = cmdmod.script(script, args, cwd=tmp_cwd, saltenv="base")
++ assert ret["stdout"] == args
++
++
++@pytest.mark.slow_test
++def test_script_cwd_with_space(cmdmod, script_contents, tmp_path):
++ """
++ cmd.script with cwd
++ """
++ tmp_cwd = str(tmp_path / "test 2")
++ os.mkdir(tmp_cwd)
++
++ args = "saltines crackers biscuits=yes"
++ script = "salt://script.py"
++ ret = cmdmod.script(script, args, cwd=tmp_cwd, saltenv="base")
++ assert ret["stdout"] == args
++
++
++@pytest.mark.destructive_test
++def test_tty(cmdmod):
++ """
++ cmd.tty
++ """
++ for tty in ("tty0", "pts3"):
++ if os.path.exists(os.path.join("/dev", tty)):
++ ret = cmdmod.tty(tty, "apply salt liberally")
++ assert "Success" in ret
++
++
++@pytest.mark.skip_on_windows
++@pytest.mark.skip_if_binaries_missing("which")
++def test_which(cmdmod):
++ """
++ cmd.which
++ """
++ cmd_which = cmdmod.which("cat")
++ assert isinstance(cmd_which, str)
++ cmd_run = cmdmod.run("which cat")
++ assert isinstance(cmd_run, str)
++ assert cmd_which.rstrip() == cmd_run.rstrip()
++
++
++@pytest.mark.skip_on_windows
++@pytest.mark.skip_if_binaries_missing("which")
++def test_which_bin(cmdmod):
++ """
++ cmd.which_bin
++ """
++ cmds = ["pip3", "pip2", "pip", "pip-python"]
++ ret = cmdmod.which_bin(cmds)
++ assert os.path.split(ret)[1] in cmds
++
++
++@pytest.mark.slow_test
++def test_has_exec(cmdmod, available_python_executable):
++ """
++ cmd.has_exec
++ """
++ assert cmdmod.has_exec(available_python_executable)
++ assert not cmdmod.has_exec("alllfsdfnwieulrrh9123857ygf")
++
++
++@pytest.mark.slow_test
++def test_exec_code(cmdmod, available_python_executable):
++ """
++ cmd.exec_code
++ """
++ code = dedent(
++ """
++ import sys
++ sys.stdout.write('cheese')
++ """
++ )
++ assert cmdmod.exec_code(available_python_executable, code).rstrip() == "cheese"
++
++
++@pytest.mark.slow_test
++def test_exec_code_with_single_arg(cmdmod, available_python_executable):
++ """
++ cmd.exec_code
++ """
++ code = dedent(
++ """
++ import sys
++ sys.stdout.write(sys.argv[1])
++ """
++ )
++ arg = "cheese"
++ assert cmdmod.exec_code(available_python_executable, code, args=arg).rstrip() == arg
++
++
++@pytest.mark.slow_test
++def test_exec_code_with_multiple_args(cmdmod, available_python_executable):
++ """
++ cmd.exec_code
++ """
++ code = dedent(
++ """
++ import sys
++ sys.stdout.write(sys.argv[1])
++ """
++ )
++ arg = "cheese"
++ assert (
++ cmdmod.exec_code(available_python_executable, code, args=[arg, "test"]).rstrip()
++ == arg
++ )
++
++
++@pytest.mark.slow_test
++def test_quotes(cmdmod):
++ """
++ cmd.run with quoted command
++ """
++ cmd = """echo 'SELECT * FROM foo WHERE bar="baz"' """
++ expected_result = 'SELECT * FROM foo WHERE bar="baz"'
++ result = cmdmod.run_stdout(cmd).strip()
++ assert result == expected_result
++
++
++@pytest.mark.skip_if_not_root
++@pytest.mark.skip_on_windows(reason="Skip on Windows, requires password")
++def test_quotes_runas(cmdmod, running_username):
++ """
++ cmd.run with quoted command
++ """
++ cmd = """echo 'SELECT * FROM foo WHERE bar="baz"' """
++ expected_result = 'SELECT * FROM foo WHERE bar="baz"'
++ result = cmdmod.run_all(cmd, runas=running_username)
++ errmsg = f"The command returned: {result}"
++ assert result["retcode"] == 0, errmsg
++ assert result["stdout"] == expected_result, errmsg
++
++
++@pytest.mark.destructive_test
++@pytest.mark.skip_if_not_root
++@pytest.mark.skip_on_windows(reason="Skip on Windows, uses unix commands")
++@pytest.mark.slow_test
++def test_cwd_runas(cmdmod, usermod, runas_usr, tmp_path):
++ """
++ cmd.run should be able to change working directory correctly, whether
++ or not runas is in use.
++ """
++ cmd = "pwd"
++ tmp_cwd = str(tmp_path)
++ os.chmod(tmp_cwd, 0o711)
++
++ cwd_normal = cmdmod.run_stdout(cmd, cwd=tmp_cwd).rstrip("\n")
++ assert tmp_cwd == cwd_normal
++
++ with _ensure_user_exists(runas_usr, usermod):
++ cwd_runas = cmdmod.run_stdout(cmd, cwd=tmp_cwd, runas=runas_usr).rstrip("\n")
++ assert tmp_cwd == cwd_runas
++
++
++@pytest.mark.destructive_test
++@pytest.mark.skip_if_not_root
++@pytest.mark.skip_unless_on_darwin(reason="Applicable to MacOS only")
++@pytest.mark.slow_test
++def test_runas_env(cmdmod, usermod, runas_usr):
++ """
++ cmd.run should be able to change working directory correctly, whether
++ or not runas is in use.
++ """
++ with _ensure_user_exists(runas_usr, usermod):
++ user_path = cmdmod.run_stdout('printf %s "$PATH"', runas=runas_usr)
++ # XXX: Not sure of a better way. Environment starts out with
++ # /bin:/usr/bin and should be populated by path helper and the bash
++ # profile.
++ assert "/bin:/usr/bin" != user_path
++
++
++@pytest.mark.destructive_test
++@pytest.mark.skip_if_not_root
++@pytest.mark.skip_unless_on_darwin(reason="Applicable to MacOS only")
++@pytest.mark.slow_test
++def test_runas_complex_command_bad_cwd(cmdmod, usermod, runas_usr, tmp_path):
++ """
++ cmd.run should not accidentally run parts of a complex command when
++ given a cwd which cannot be used by the user the command is run as.
++ Due to the need to use `su -l` to login to another user on MacOS, we
++ cannot cd into directories that the target user themselves does not
++ have execute permission for. To an extent, this test is testing that
++ buggy behaviour, but its purpose is to ensure that the greater bug of
++ running commands after failing to cd does not occur.
++ """
++ tmp_cwd = str(tmp_path)
++ os.chmod(tmp_cwd, 0o700)
++
++ with _ensure_user_exists(runas_usr, usermod):
++ cmd_result = cmdmod.run_all(
++ 'pwd; pwd; : $(echo "You have failed the test" >&2)',
++ cwd=tmp_cwd,
++ runas=runas_usr,
++ )
++
++ assert "" == cmd_result["stdout"]
++ assert "You have failed the test" not in cmd_result["stderr"]
++ assert 0 != cmd_result["retcode"]
++
++
++@SKIP_INITIAL_PHOTONOS_FAILURES
++@pytest.mark.skip_on_windows
++@pytest.mark.skip_if_not_root
++@pytest.mark.destructive_test
++@pytest.mark.slow_test
++def test_runas(cmdmod, usermod, runas_usr):
++ """
++ Ensure that the env is the runas user's
++ """
++ with _ensure_user_exists(runas_usr, usermod):
++ out = cmdmod.run("env", runas=runas_usr).splitlines()
++ assert f"USER={runas_usr}" in out
++
++
++@pytest.mark.skip_if_binaries_missing("sleep", reason="sleep cmd not installed")
++def test_timeout(cmdmod):
++ """
++ cmd.run trigger timeout
++ """
++ out = cmdmod.run("sleep 2 && echo hello", timeout=1, python_shell=True)
++ assert "Timed out" in out
++
++
++@pytest.mark.skip_if_binaries_missing("sleep", reason="sleep cmd not installed")
++def test_timeout_success(cmdmod):
++ """
++ cmd.run sufficient timeout to succeed
++ """
++ out = cmdmod.run("sleep 1 && echo hello", timeout=2, python_shell=True)
++ assert out == "hello"
++
++
++@pytest.mark.slow_test
++def test_cmd_run_whoami(cmdmod, running_username):
++ """
++ test return of whoami
++ """
++ if not salt.utils.platform.is_windows():
++ user = running_username
++ else:
++ user = salt.utils.user.get_specific_user()
++ if user.startswith("sudo_"):
++ user = user.replace("sudo_", "")
++ cmd = cmdmod.run("whoami")
++ assert user.lower() == cmd.lower()
++
++
++@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows")
++@pytest.mark.slow_test
++def test_windows_env_handling(cmdmod):
++ """
++ Ensure that nt.environ is used properly with cmd.run*
++ """
++ out = cmdmod.run("set", env={"abc": "123", "ABC": "456"}).splitlines()
++ assert "abc=123" in out
++ assert "ABC=456" in out
++
++
++@pytest.mark.slow_test
++@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows")
++def test_windows_powershell_script_args(cmdmod, issue_56195_test_ps1):
++ """
++ Ensure that powershell processes inline script in args
++ """
++ val = "i like cheese"
++ args = (
++ '-SecureString (ConvertTo-SecureString -String "{}" -AsPlainText -Force)'
++ " -ErrorAction Stop".format(val)
++ )
++ script = "salt://issue_56195_test.ps1"
++ ret = cmdmod.script(script, args=args, shell="powershell", saltenv="base")
++ assert ret["stdout"] == val
++
++
++@pytest.mark.slow_test
++@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows")
++@pytest.mark.skip_if_binaries_missing("pwsh")
++def test_windows_powershell_script_args_pwsh(cmdmod, issue_56195_test_ps1):
++ """
++ Ensure that powershell processes inline script in args with powershell
++ core
++ """
++ val = "i like cheese"
++ args = (
++ '-SecureString (ConvertTo-SecureString -String "{}" -AsPlainText -Force)'
++ " -ErrorAction Stop".format(val)
++ )
++ script = "salt://issue_56195_test.ps1"
++ ret = cmdmod.script(script, args=args, shell="pwsh", saltenv="base")
++ assert ret["stdout"] == val
+diff --git a/tests/pytests/functional/modules/test_dockermod.py b/tests/pytests/functional/modules/test_dockermod.py
+index 3c7bb25e461..a5b40869352 100644
+--- a/tests/pytests/functional/modules/test_dockermod.py
++++ b/tests/pytests/functional/modules/test_dockermod.py
+@@ -2,6 +2,7 @@
+ Integration tests for the docker_container states
+ """
+ import logging
++import os
+
+ import pytest
+ from saltfactories.utils import random_string
+@@ -11,9 +12,12 @@ pytest.importorskip("docker")
+
+ log = logging.getLogger(__name__)
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("docker", "dockerd", check_all=False),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run inside a container"),
+ ]
+
+
+diff --git a/tests/pytests/functional/modules/test_swarm.py b/tests/pytests/functional/modules/test_swarm.py
+index 8c0ce8cbd93..9dc70f5b3dc 100644
+--- a/tests/pytests/functional/modules/test_swarm.py
++++ b/tests/pytests/functional/modules/test_swarm.py
+@@ -1,10 +1,15 @@
++import os
++
+ import pytest
+
+ import salt.utils.versions
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("dockerd"),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="No hwclock in a container"),
+ ]
+
+ # The swarm module need the docker-py library installed
+diff --git a/tests/pytests/functional/modules/test_system.py b/tests/pytests/functional/modules/test_system.py
+index 2dabaaebfad..3b669c46afd 100644
+--- a/tests/pytests/functional/modules/test_system.py
++++ b/tests/pytests/functional/modules/test_system.py
+@@ -9,9 +9,12 @@ import pytest
+
+ import salt.utils.files
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.skip_unless_on_linux,
+ pytest.mark.slow_test,
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="No systemd in container."),
+ ]
+
+ log = logging.getLogger(__name__)
+diff --git a/tests/pytests/functional/pillar/hg_pillar/test_hg_pillar.py b/tests/pytests/functional/pillar/hg_pillar/test_hg_pillar.py
+index 183b002d8b2..44603d96f1d 100644
+--- a/tests/pytests/functional/pillar/hg_pillar/test_hg_pillar.py
++++ b/tests/pytests/functional/pillar/hg_pillar/test_hg_pillar.py
+@@ -60,6 +60,7 @@ def hg_setup_and_teardown():
+ @pytest.mark.skip_on_windows(
+ reason="just testing if this or hgfs causes the issue with total crash"
+ )
++@pytest.mark.skipif(not HAS_HG, reason="missing hglib library")
+ def test_ext_pillar(hg_setup_and_teardown):
+ data = hg_pillar.ext_pillar("*", None, hg_setup_and_teardown)
+ assert data == {"testinfo": "info", "testinfo2": "info"}
+diff --git a/tests/pytests/functional/states/rabbitmq/test_cluster.py b/tests/pytests/functional/states/rabbitmq/test_cluster.py
+index f8b4bdc225e..210b22a2360 100644
+--- a/tests/pytests/functional/states/rabbitmq/test_cluster.py
++++ b/tests/pytests/functional/states/rabbitmq/test_cluster.py
+@@ -3,6 +3,7 @@ Integration tests for the rabbitmq_cluster states
+ """
+
+ import logging
++import os
+
+ import pytest
+
+@@ -13,11 +14,14 @@ pytest.importorskip("docker")
+
+ log = logging.getLogger(__name__)
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing(
+ "docker", "dockerd", reason="Docker not installed"
+ ),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/rabbitmq/test_plugin.py b/tests/pytests/functional/states/rabbitmq/test_plugin.py
+index e1b686e3365..f1191490536 100644
+--- a/tests/pytests/functional/states/rabbitmq/test_plugin.py
++++ b/tests/pytests/functional/states/rabbitmq/test_plugin.py
+@@ -3,6 +3,7 @@ Integration tests for the rabbitmq_plugin states
+ """
+
+ import logging
++import os
+
+ import pytest
+
+@@ -14,11 +15,14 @@ log = logging.getLogger(__name__)
+
+ pytest.importorskip("docker")
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing(
+ "docker", "dockerd", reason="Docker not installed"
+ ),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/rabbitmq/test_policy.py b/tests/pytests/functional/states/rabbitmq/test_policy.py
+index e5cee97cbc8..7ccf6a522e0 100644
+--- a/tests/pytests/functional/states/rabbitmq/test_policy.py
++++ b/tests/pytests/functional/states/rabbitmq/test_policy.py
+@@ -3,6 +3,7 @@ Integration tests for the rabbitmq_policy states
+ """
+
+ import logging
++import os
+
+ import pytest
+
+@@ -14,11 +15,14 @@ log = logging.getLogger(__name__)
+
+ pytest.importorskip("docker")
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing(
+ "docker", "dockerd", reason="Docker not installed"
+ ),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/rabbitmq/test_upstream.py b/tests/pytests/functional/states/rabbitmq/test_upstream.py
+index cfdad35aba6..c7bcf3b0d44 100644
+--- a/tests/pytests/functional/states/rabbitmq/test_upstream.py
++++ b/tests/pytests/functional/states/rabbitmq/test_upstream.py
+@@ -3,6 +3,7 @@ Integration tests for the rabbitmq_user states
+ """
+
+ import logging
++import os
+
+ import pytest
+
+@@ -13,11 +14,14 @@ log = logging.getLogger(__name__)
+
+ pytest.importorskip("docker")
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing(
+ "docker", "dockerd", reason="Docker not installed"
+ ),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/rabbitmq/test_user.py b/tests/pytests/functional/states/rabbitmq/test_user.py
+index 2f9b22d28d2..31723df7be8 100644
+--- a/tests/pytests/functional/states/rabbitmq/test_user.py
++++ b/tests/pytests/functional/states/rabbitmq/test_user.py
+@@ -3,6 +3,7 @@ Integration tests for the rabbitmq_user states
+ """
+
+ import logging
++import os
+
+ import pytest
+
+@@ -13,11 +14,14 @@ log = logging.getLogger(__name__)
+
+ pytest.importorskip("docker")
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing(
+ "docker", "dockerd", reason="Docker not installed"
+ ),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/rabbitmq/test_vhost.py b/tests/pytests/functional/states/rabbitmq/test_vhost.py
+index a648d41854f..d6ac6901a25 100644
+--- a/tests/pytests/functional/states/rabbitmq/test_vhost.py
++++ b/tests/pytests/functional/states/rabbitmq/test_vhost.py
+@@ -3,6 +3,7 @@ Integration tests for the rabbitmq_user states
+ """
+
+ import logging
++import os
+
+ import pytest
+
+@@ -13,11 +14,14 @@ log = logging.getLogger(__name__)
+
+ pytest.importorskip("docker")
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing(
+ "docker", "dockerd", reason="Docker not installed"
+ ),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/test_docker_network.py b/tests/pytests/functional/states/test_docker_network.py
+index 16a78b13a4a..0da01ed8bac 100644
+--- a/tests/pytests/functional/states/test_docker_network.py
++++ b/tests/pytests/functional/states/test_docker_network.py
+@@ -1,5 +1,6 @@
+ import functools
+ import logging
++import os
+ import random
+
+ import pytest
+@@ -15,9 +16,13 @@ pytest.importorskip("docker")
+
+ log = logging.getLogger(__name__)
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
++
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("docker", "dockerd", check_all=False),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/test_pkg.py b/tests/pytests/functional/states/test_pkg.py
+index 0e82dc608ba..12318c996d1 100644
+--- a/tests/pytests/functional/states/test_pkg.py
++++ b/tests/pytests/functional/states/test_pkg.py
+@@ -64,7 +64,7 @@ def PKG_CAP_TARGETS(grains):
+ _PKG_CAP_TARGETS = []
+ if grains["os_family"] == "Suse":
+ if grains["os"] == "SUSE":
+- _PKG_CAP_TARGETS = [("perl(ZNC)", "znc-perl")]
++ _PKG_CAP_TARGETS = [("perl(Error)", "perl-Error")]
+ if not _PKG_CAP_TARGETS:
+ pytest.skip("Capability not provided")
+ return _PKG_CAP_TARGETS
+@@ -856,8 +856,8 @@ def test_pkg_cap_003_installed_multipkg_with_version(
+ This is a destructive test as it installs and then removes two packages
+ """
+ target, realpkg = PKG_CAP_TARGETS[0]
+- version = latest_version(target)
+- realver = latest_version(realpkg)
++ version = modules.pkg.version(target)
++ realver = modules.pkg.version(realpkg)
+
+ # If this condition is False, we need to find new targets.
+ # This needs to be able to test successful installation of packages.
+diff --git a/tests/pytests/integration/cli/test_syndic_eauth.py b/tests/pytests/integration/cli/test_syndic_eauth.py
+index 57e9c0a467a..218022b9e3c 100644
+--- a/tests/pytests/integration/cli/test_syndic_eauth.py
++++ b/tests/pytests/integration/cli/test_syndic_eauth.py
+@@ -1,4 +1,5 @@
+ import json
++import os
+ import pathlib
+ import tempfile
+ import time
+@@ -7,9 +8,11 @@ import pytest
+
+ docker = pytest.importorskip("docker")
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
+ pytestmark = [
+ pytest.mark.core_test,
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/integration/daemons/test_memory_leak.py b/tests/pytests/integration/daemons/test_memory_leak.py
+index 1b782760418..8157091c44e 100644
+--- a/tests/pytests/integration/daemons/test_memory_leak.py
++++ b/tests/pytests/integration/daemons/test_memory_leak.py
+@@ -1,3 +1,4 @@
++import os
+ import time
+ from multiprocessing import Manager, Process
+
+@@ -8,6 +9,8 @@ pytestmark = [
+ pytest.mark.slow_test,
+ ]
+
++GITHUB_ACTIONS = bool(os.getenv("GITHUB_ACTIONS", False))
++
+
+ @pytest.fixture
+ def testfile_path(tmp_path):
+@@ -45,6 +48,7 @@ def file_add_delete_sls(testfile_path, base_env_state_tree_root_dir):
+
+
+ @pytest.mark.skip_on_darwin(reason="MacOS is a spawning platform, won't work")
++@pytest.mark.skipif(GITHUB_ACTIONS, reason="Test is failing in GitHub Actions")
+ @pytest.mark.flaky(max_runs=4)
+ def test_memory_leak(salt_cli, salt_minion, file_add_delete_sls):
+ max_usg = None
+diff --git a/tests/pytests/integration/modules/test_cmdmod.py b/tests/pytests/integration/modules/test_cmdmod.py
+index 4e8ce5824ee..d9c326c3f0a 100644
+--- a/tests/pytests/integration/modules/test_cmdmod.py
++++ b/tests/pytests/integration/modules/test_cmdmod.py
+@@ -1,5 +1,11 @@
++import logging
++
+ import pytest
+
++import salt.utils.user
++
++log = logging.getLogger(__name__)
++
+
+ @pytest.fixture(scope="module")
+ def non_root_account():
+@@ -7,6 +13,14 @@ def non_root_account():
+ yield account
+
+
++@pytest.fixture
++def running_username():
++ """
++ Return the username that is running the code.
++ """
++ return salt.utils.user.get_user()
++
++
+ @pytest.mark.skip_if_not_root
+ def test_exec_code_all(salt_call_cli, non_root_account):
+ ret = salt_call_cli.run(
+@@ -22,3 +36,82 @@ def test_long_stdout(salt_cli, salt_minion):
+ )
+ assert ret.returncode == 0
+ assert len(ret.data.strip()) == len(echo_str)
++
++
++@pytest.mark.skip_if_not_root
++@pytest.mark.skip_on_windows(reason="Skip on Windows, uses unix commands")
++def test_avoid_injecting_shell_code_as_root(
++ salt_call_cli, non_root_account, running_username
++):
++ """
++ cmd.run should execute the whole command as the "runas" user, not
++ running substitutions as root.
++ """
++ cmd = "echo $(id -u)"
++
++ ret = salt_call_cli.run("cmd.run_stdout", cmd)
++ root_id = ret.json
++ ret = salt_call_cli.run("cmd.run_stdout", cmd, runas=running_username)
++ runas_root_id = ret.json
++
++ ret = salt_call_cli.run("cmd.run_stdout", cmd, runas=non_root_account.username)
++ user_id = ret.json
++
++ assert user_id != root_id
++ assert user_id != runas_root_id
++ assert root_id == runas_root_id
++
++
++@pytest.mark.slow_test
++def test_blacklist_glob(salt_call_cli):
++ """
++ cmd_blacklist_glob
++ """
++ cmd = "bad_command --foo"
++ ret = salt_call_cli.run(
++ "cmd.run",
++ cmd,
++ )
++
++ assert (
++ ret.stderr.rstrip()
++ == "Error running 'cmd.run': The shell command \"bad_command --foo\" is not permitted"
++ )
++
++
++@pytest.mark.slow_test
++def test_hide_output(salt_call_cli):
++ """
++ Test the hide_output argument
++ """
++ ls_command = (
++ ["ls", "/"] if not salt.utils.platform.is_windows() else ["dir", "c:\\"]
++ )
++
++ error_command = ["thiscommanddoesnotexist"]
++
++ # cmd.run
++ ret = salt_call_cli.run("cmd.run", ls_command, hide_output=True)
++ assert ret.data == ""
++
++ # cmd.shell
++ ret = salt_call_cli.run("cmd.shell", ls_command, hide_output=True)
++ assert ret.data == ""
++
++ # cmd.run_stdout
++ ret = salt_call_cli.run("cmd.run_stdout", ls_command, hide_output=True)
++ assert ret.data == ""
++
++ # cmd.run_stderr
++ ret = salt_call_cli.run("cmd.shell", error_command, hide_output=True)
++ assert ret.data == ""
++
++ # cmd.run_all (command should have produced stdout)
++ ret = salt_call_cli.run("cmd.run_all", ls_command, hide_output=True)
++ assert ret.data["stdout"] == ""
++ assert ret.data["stderr"] == ""
++
++ # cmd.run_all (command should have produced stderr)
++ ret = salt_call_cli.run("cmd.run_all", error_command, hide_output=True)
++ assert ret.data["stdout"] == ""
++ assert ret.data["stderr"] == ""
+diff --git a/tests/pytests/integration/modules/test_virt.py b/tests/pytests/integration/modules/test_virt.py
+index 57ec239c4e9..1b7f30154a7 100644
+--- a/tests/pytests/integration/modules/test_virt.py
++++ b/tests/pytests/integration/modules/test_virt.py
+@@ -2,6 +2,7 @@
+ Validate the virt module
+ """
+ import logging
++import os
+ from numbers import Number
+ from xml.etree import ElementTree
+
+@@ -14,9 +15,12 @@ docker = pytest.importorskip("docker")
+
+ log = logging.getLogger(__name__)
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("docker"),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/integration/ssh/test_log.py b/tests/pytests/integration/ssh/test_log.py
+index e87c4a8581f..683feb8bd91 100644
+--- a/tests/pytests/integration/ssh/test_log.py
++++ b/tests/pytests/integration/ssh/test_log.py
+@@ -2,6 +2,7 @@
+ Integration tests for salt-ssh logging
+ """
+ import logging
++import os
+ import time
+
+ import pytest
+@@ -11,12 +12,14 @@ from tests.support.helpers import Keys
+
+ pytest.importorskip("docker")
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
+ log = logging.getLogger(__name__)
+
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("dockerd"),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/integration/ssh/test_master.py b/tests/pytests/integration/ssh/test_master.py
+index 31e318870cb..0c2f482cf9f 100644
+--- a/tests/pytests/integration/ssh/test_master.py
++++ b/tests/pytests/integration/ssh/test_master.py
+@@ -2,6 +2,8 @@
+ Simple Smoke Tests for Connected SSH minions
+ """
+
++import os
++
+ import pytest
+ from saltfactories.utils.functional import StateResult
+
+@@ -10,7 +12,10 @@ pytestmark = [
+ pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"),
+ ]
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+
++@pytest.mark.skipif(INSIDE_CONTAINER, reason="No systemd in container.")
+ @pytest.mark.skip_if_not_root
+ def test_service(salt_ssh_cli, grains):
+ service = "cron"
+diff --git a/tests/pytests/integration/ssh/test_py_versions.py b/tests/pytests/integration/ssh/test_py_versions.py
+index 52ab819e808..71d4cfaa94e 100644
+--- a/tests/pytests/integration/ssh/test_py_versions.py
++++ b/tests/pytests/integration/ssh/test_py_versions.py
+@@ -2,6 +2,7 @@
+ Integration tests for salt-ssh py_versions
+ """
+ import logging
++import os
+ import socket
+ import time
+
+@@ -12,12 +13,14 @@ from tests.support.helpers import Keys
+
+ pytest.importorskip("docker")
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
+ log = logging.getLogger(__name__)
+
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("dockerd"),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/integration/ssh/test_ssh_setup.py b/tests/pytests/integration/ssh/test_ssh_setup.py
+index eddf31caccd..79b55ad90a5 100644
+--- a/tests/pytests/integration/ssh/test_ssh_setup.py
++++ b/tests/pytests/integration/ssh/test_ssh_setup.py
+@@ -17,12 +17,14 @@ from tests.support.helpers import Keys
+
+ pytest.importorskip("docker")
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
+ log = logging.getLogger(__name__)
+
+ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("dockerd"),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/scenarios/compat/test_with_versions.py b/tests/pytests/scenarios/compat/test_with_versions.py
+index 75a2b87f24c..498dd6a60de 100644
+--- a/tests/pytests/scenarios/compat/test_with_versions.py
++++ b/tests/pytests/scenarios/compat/test_with_versions.py
+@@ -5,6 +5,7 @@
+ Test current salt master with older salt minions
+ """
+ import logging
++import os
+ import pathlib
+
+ import pytest
+@@ -18,6 +19,8 @@ docker = pytest.importorskip("docker")
+
+ log = logging.getLogger(__name__)
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+
+ pytestmark = [
+ pytest.mark.slow_test,
+@@ -25,6 +28,7 @@ pytestmark = [
+ pytest.mark.skipif(
+ salt.utils.platform.is_photonos() is True, reason="Skip on PhotonOS"
+ ),
++ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
+ ]
+
+
+diff --git a/tests/pytests/scenarios/failover/multimaster/test_failover_master.py b/tests/pytests/scenarios/failover/multimaster/test_failover_master.py
+index 6efecfb8334..9f6251a4d6f 100644
+--- a/tests/pytests/scenarios/failover/multimaster/test_failover_master.py
++++ b/tests/pytests/scenarios/failover/multimaster/test_failover_master.py
+@@ -12,7 +12,10 @@ pytestmark = [
+
+ log = logging.getLogger(__name__)
+
++GITHUB_ACTIONS = bool(os.getenv("GITHUB_ACTIONS", False))
+
++
++@pytest.mark.skipif(GITHUB_ACTIONS, reason="Test is failing in GitHub Actions")
+ def test_pki(salt_mm_failover_master_1, salt_mm_failover_master_2, caplog):
+ """
+ Verify https://docs.saltproject.io/en/latest/topics/tutorials/multimaster_pki.html
+diff --git a/tests/pytests/scenarios/setup/test_install.py b/tests/pytests/scenarios/setup/test_install.py
+index 48f1d5889f6..7664fda804e 100644
+--- a/tests/pytests/scenarios/setup/test_install.py
++++ b/tests/pytests/scenarios/setup/test_install.py
+@@ -3,6 +3,7 @@ Tests for building and installing salt
+ """
+ import json
+ import logging
++import os
+ import pathlib
+ import re
+ import sys
+@@ -16,11 +17,16 @@ from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
+
+ log = logging.getLogger(__name__)
+
++INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ pytest.mark.core_test,
+ pytest.mark.windows_whitelisted,
+ pytest.mark.skip_initial_onedir_failure,
+ pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False),
++ pytest.mark.skipif(
++ INSIDE_CONTAINER, reason="No gcc and python3-devel in container."
++ ),
+ ]
+
+
+diff --git a/tests/pytests/unit/modules/test_aptpkg.py b/tests/pytests/unit/modules/test_aptpkg.py
+index eb72447c3aa..6f0b905ef73 100644
+--- a/tests/pytests/unit/modules/test_aptpkg.py
++++ b/tests/pytests/unit/modules/test_aptpkg.py
+@@ -1360,17 +1360,17 @@ def test_call_apt_dpkg_lock():
+ ]
+
+ cmd_mock = MagicMock(side_effect=cmd_side_effect)
+- cmd_call = (
++ cmd_call = [
+ call(
+ ["dpkg", "-l", "python"],
+- env={},
+- ignore_retcode=False,
+ output_loglevel="quiet",
+ python_shell=True,
++ env={},
++ ignore_retcode=False,
+ username="Darth Vader",
+ ),
+- )
+- expected_calls = [cmd_call * 5]
++ ]
++ expected_calls = cmd_call * 5
+
+ with patch.dict(
+ aptpkg.__salt__,
+@@ -1390,7 +1390,7 @@ def test_call_apt_dpkg_lock():
+
+ # We should attempt to call the cmd 5 times
+ assert cmd_mock.call_count == 5
+- cmd_mock.has_calls(expected_calls)
++ cmd_mock.assert_has_calls(expected_calls)
+
+
+ def test_services_need_restart_checkrestart_missing():
+diff --git a/tests/pytests/unit/modules/test_linux_sysctl.py b/tests/pytests/unit/modules/test_linux_sysctl.py
+index 0bdd24039d7..6b0875bc460 100644
+--- a/tests/pytests/unit/modules/test_linux_sysctl.py
++++ b/tests/pytests/unit/modules/test_linux_sysctl.py
+@@ -215,7 +215,7 @@ def test_persist_no_conf_failure():
+ ):
+ with pytest.raises(CommandExecutionError):
+ linux_sysctl.persist("net.ipv4.ip_forward", 42, config=None)
+- fopen_mock.called_once()
++ fopen_mock.assert_called_once()
+
+
+ def test_persist_no_conf_success():
+@@ -353,7 +353,7 @@ def test_persist_value_with_spaces_already_set(tmp_path):
+ """
+ config = str(tmp_path / "existing_sysctl_with_spaces.conf")
+ value = "|/usr/share/kdump-tools/dump-core %p %s %t %e"
+- config_file_content = "kernel.core_pattern = {}\n".format(value)
++ config_file_content = f"kernel.core_pattern = {value}\n"
+ with fopen(config, "w", encoding="utf-8") as config_file:
+ config_file.write(config_file_content)
+ mock_run = MagicMock(return_value=value)
+@@ -383,7 +383,7 @@ def test_persist_value_with_spaces_already_configured(tmp_path):
+ """
+ config = str(tmp_path / "existing_sysctl_with_spaces.conf")
+ value = "|/usr/share/kdump-tools/dump-core %p %s %t %e"
+- config_file_content = "kernel.core_pattern = {}\n".format(value)
++ config_file_content = f"kernel.core_pattern = {value}\n"
+ with fopen(config, "w", encoding="utf-8") as config_file:
+ config_file.write(config_file_content)
+ mock_run = MagicMock(return_value="")
+@@ -451,7 +451,7 @@ def test_persist_value_with_spaces_update_config(tmp_path):
+ assert os.path.isfile(config)
+ with fopen(config, encoding="utf-8") as config_file:
+ written = config_file.read()
+- assert written == "kernel.core_pattern = {}\n".format(value)
++ assert written == f"kernel.core_pattern = {value}\n"
+
+
+ def test_persist_value_with_spaces_new_file(tmp_path):
+diff --git a/tests/pytests/unit/modules/test_win_ip.py b/tests/pytests/unit/modules/test_win_ip.py
+index 38eb6b1ac5f..94a3fe7ca93 100644
+--- a/tests/pytests/unit/modules/test_win_ip.py
++++ b/tests/pytests/unit/modules/test_win_ip.py
+@@ -151,7 +151,7 @@ def test_enable():
+ ):
+ assert win_ip.enable("Ethernet")
+
+- mock_cmd.called_once_with(
++ mock_cmd.assert_called_once_with(
+ [
+ "netsh",
+ "interface",
+@@ -180,7 +180,7 @@ def test_disable():
+ ):
+ assert win_ip.disable("Ethernet")
+
+- mock_cmd.called_once_with(
++ mock_cmd.assert_called_once_with(
+ [
+ "netsh",
+ "interface",
+diff --git a/tests/pytests/unit/test_master.py b/tests/pytests/unit/test_master.py
+index d338307d1f8..679229066d4 100644
+--- a/tests/pytests/unit/test_master.py
++++ b/tests/pytests/unit/test_master.py
+@@ -61,7 +61,7 @@ def test_fileserver_duration():
+ end = time.time()
+ # Interval is equal to timeout so the _do_update method will be called
+ # one time.
+- update.called_once()
++ update.assert_called_once()
+ # Timeout is 1 second
+ duration = end - start
+ if duration > 2 and salt.utils.platform.spawning_platform():
+diff --git a/tests/pytests/unit/test_minion.py b/tests/pytests/unit/test_minion.py
+index 740743194e4..a9e91742a2d 100644
+--- a/tests/pytests/unit/test_minion.py
++++ b/tests/pytests/unit/test_minion.py
+@@ -655,7 +655,9 @@ def test_gen_modules_executors(minion_opts):
+ with patch("salt.pillar.get_pillar", return_value=MockPillarCompiler()):
+ with patch("salt.loader.executors") as execmock:
+ minion.gen_modules()
+- assert execmock.called_with(minion.opts, minion.functions)
++ execmock.assert_called_with(
++ minion.opts, functions=minion.functions, proxy=minion.proxy, context={}
++ )
+ finally:
+ minion.destroy()
+
+diff --git a/tests/pytests/unit/utils/event/test_event.py b/tests/pytests/unit/utils/event/test_event.py
+index e289e72dad0..f4b6c159996 100644
+--- a/tests/pytests/unit/utils/event/test_event.py
++++ b/tests/pytests/unit/utils/event/test_event.py
+@@ -38,7 +38,7 @@ def sock_dir(tmp_path):
+ def _assert_got_event(evt, data, msg=None, expected_failure=False):
+ assert evt is not None, msg
+ for key in data:
+- assert key in evt, "{}: Key {} missing".format(msg, key)
++ assert key in evt, f"{msg}: Key {key} missing"
+ assertMsg = "{0}: Key {1} value mismatch, {2} != {3}"
+ assertMsg = assertMsg.format(msg, key, data[key], evt[key])
+ if not expected_failure:
+@@ -59,8 +59,8 @@ def test_minion_event(sock_dir):
+ :10
+ ]
+ with salt.utils.event.MinionEvent(opts, listen=False) as me:
+- assert me.puburi == str(sock_dir / "minion_event_{}_pub.ipc".format(id_hash))
+- assert me.pulluri == str(sock_dir / "minion_event_{}_pull.ipc".format(id_hash))
++ assert me.puburi == str(sock_dir / f"minion_event_{id_hash}_pub.ipc")
++ assert me.pulluri == str(sock_dir / f"minion_event_{id_hash}_pull.ipc")
+
+
+ def test_minion_event_tcp_ipc_mode():
+@@ -73,8 +73,8 @@ def test_minion_event_tcp_ipc_mode():
+ def test_minion_event_no_id(sock_dir):
+ with salt.utils.event.MinionEvent(dict(sock_dir=str(sock_dir)), listen=False) as me:
+ id_hash = hashlib.sha256(salt.utils.stringutils.to_bytes("")).hexdigest()[:10]
+- assert me.puburi == str(sock_dir / "minion_event_{}_pub.ipc".format(id_hash))
+- assert me.pulluri == str(sock_dir / "minion_event_{}_pull.ipc".format(id_hash))
++ assert me.puburi == str(sock_dir / f"minion_event_{id_hash}_pub.ipc")
++ assert me.pulluri == str(sock_dir / f"minion_event_{id_hash}_pull.ipc")
+
+
+ @pytest.mark.slow_test
+@@ -256,9 +256,9 @@ def test_event_many(sock_dir):
+ with eventpublisher_process(str(sock_dir)):
+ with salt.utils.event.MasterEvent(str(sock_dir), listen=True) as me:
+ for i in range(500):
+- me.fire_event({"data": "{}".format(i)}, "testevents")
++ me.fire_event({"data": f"{i}"}, "testevents")
+ evt = me.get_event(tag="testevents")
+- _assert_got_event(evt, {"data": "{}".format(i)}, "Event {}".format(i))
++ _assert_got_event(evt, {"data": f"{i}"}, f"Event {i}")
+
+
+ @pytest.mark.slow_test
+@@ -268,10 +268,10 @@ def test_event_many_backlog(sock_dir):
+ with salt.utils.event.MasterEvent(str(sock_dir), listen=True) as me:
+ # Must not exceed zmq HWM
+ for i in range(500):
+- me.fire_event({"data": "{}".format(i)}, "testevents")
++ me.fire_event({"data": f"{i}"}, "testevents")
+ for i in range(500):
+ evt = me.get_event(tag="testevents")
+- _assert_got_event(evt, {"data": "{}".format(i)}, "Event {}".format(i))
++ _assert_got_event(evt, {"data": f"{i}"}, f"Event {i}")
+
+
+ # Test the fire_master function. As it wraps the underlying fire_event,
+@@ -300,7 +300,7 @@ def test_connect_pull_should_debug_log_on_StreamClosedError():
+ event = SaltEvent(node=None)
+ with patch.object(event, "pusher") as mock_pusher:
+ with patch.object(
+- salt.utils.event.log, "debug", auto_spec=True
++ salt.utils.event.log, "debug", autospec=True
+ ) as mock_log_debug:
+ mock_pusher.connect.side_effect = (
+ salt.ext.tornado.iostream.StreamClosedError
+@@ -317,10 +317,10 @@ def test_connect_pull_should_error_log_on_other_errors(error):
+ event = SaltEvent(node=None)
+ with patch.object(event, "pusher") as mock_pusher:
+ with patch.object(
+- salt.utils.event.log, "debug", auto_spec=True
++ salt.utils.event.log, "debug", autospec=True
+ ) as mock_log_debug:
+ with patch.object(
+- salt.utils.event.log, "error", auto_spec=True
++ salt.utils.event.log, "error", autospec=True
+ ) as mock_log_error:
+ mock_pusher.connect.side_effect = error
+ event.connect_pull()
+diff --git a/tests/unit/modules/test_boto_apigateway.py b/tests/unit/modules/test_boto_apigateway.py
+index 5f3d2a49822..ebf50679bd8 100644
+--- a/tests/unit/modules/test_boto_apigateway.py
++++ b/tests/unit/modules/test_boto_apigateway.py
+@@ -15,6 +15,7 @@ from tests.support.unit import TestCase
+
+ # pylint: disable=import-error,no-name-in-module
+ try:
++ import boto
+ import boto3
+ import botocore
+ from botocore.exceptions import ClientError
+diff --git a/tests/unit/modules/test_boto_cognitoidentity.py b/tests/unit/modules/test_boto_cognitoidentity.py
+index 1e213a169ac..974832f9ff9 100644
+--- a/tests/unit/modules/test_boto_cognitoidentity.py
++++ b/tests/unit/modules/test_boto_cognitoidentity.py
+@@ -14,6 +14,7 @@ from tests.support.unit import TestCase
+
+ # pylint: disable=import-error,no-name-in-module
+ try:
++ import boto
+ import boto3
+ from botocore.exceptions import ClientError
+
+diff --git a/tests/unit/modules/test_boto_elasticsearch_domain.py b/tests/unit/modules/test_boto_elasticsearch_domain.py
+index 5c5845aa25b..0578a81e8ef 100644
+--- a/tests/unit/modules/test_boto_elasticsearch_domain.py
++++ b/tests/unit/modules/test_boto_elasticsearch_domain.py
+@@ -14,6 +14,7 @@ from tests.support.unit import TestCase
+
+ # pylint: disable=import-error,no-name-in-module
+ try:
++ import boto
+ import boto3
+ from botocore.exceptions import ClientError
+
+diff --git a/tests/unit/modules/test_boto_lambda.py b/tests/unit/modules/test_boto_lambda.py
+index d32dc9345b6..ecaa532f1ff 100644
+--- a/tests/unit/modules/test_boto_lambda.py
++++ b/tests/unit/modules/test_boto_lambda.py
+@@ -18,6 +18,7 @@ from tests.support.unit import TestCase
+
+ # pylint: disable=import-error,no-name-in-module
+ try:
++ import boto
+ import boto3
+ from botocore import __version__ as found_botocore_version
+ from botocore.exceptions import ClientError
+diff --git a/tests/unit/modules/test_network.py b/tests/unit/modules/test_network.py
+index 34b06250fc6..9eef9a02f58 100644
+--- a/tests/unit/modules/test_network.py
++++ b/tests/unit/modules/test_network.py
+@@ -153,9 +153,11 @@ class NetworkTestCase(TestCase, LoaderModuleMockMixin):
+ """
+ Test for Performs a DNS lookup with dig
+ """
+- with patch("salt.utils.path.which", MagicMock(return_value="dig")), patch.dict(
++ with patch.dict(
+ network.__utils__, {"network.sanitize_host": MagicMock(return_value="A")}
+- ), patch.dict(network.__salt__, {"cmd.run": MagicMock(return_value="A")}):
++ ), patch("salt.utils.path.which", MagicMock(return_value="dig")), patch.dict(
++ network.__salt__, {"cmd.run": MagicMock(return_value="A")}
++ ):
+ self.assertEqual(network.dig("host"), "A")
+
+ def test_arp(self):
+diff --git a/tests/unit/modules/test_nilrt_ip.py b/tests/unit/modules/test_nilrt_ip.py
+index 1261473edb4..50dc13b20b8 100644
+--- a/tests/unit/modules/test_nilrt_ip.py
++++ b/tests/unit/modules/test_nilrt_ip.py
+@@ -28,7 +28,7 @@ class NilrtIPTestCase(TestCase, LoaderModuleMockMixin):
+ "salt.modules.nilrt_ip._change_dhcp_config", return_value=True
+ ) as change_dhcp_config_mock:
+ assert nilrt_ip._change_state("test_interface", "down")
+- assert change_dhcp_config_mock.called_with("test_interface", False)
++ change_dhcp_config_mock.assert_called_with("test_interface", False)
+
+ def test_change_state_up_state(self):
+ """
+@@ -42,7 +42,7 @@ class NilrtIPTestCase(TestCase, LoaderModuleMockMixin):
+ "salt.modules.nilrt_ip._change_dhcp_config", return_value=True
+ ) as change_dhcp_config_mock:
+ assert nilrt_ip._change_state("test_interface", "up")
+- assert change_dhcp_config_mock.called_with("test_interface")
++ change_dhcp_config_mock.assert_called_with("test_interface")
+
+ def test_set_static_all_with_dns(self):
+ """
+diff --git a/tests/unit/modules/test_zcbuildout.py b/tests/unit/modules/test_zcbuildout.py
+index f793e3fc3f8..5a5996e110e 100644
+--- a/tests/unit/modules/test_zcbuildout.py
++++ b/tests/unit/modules/test_zcbuildout.py
+@@ -451,6 +451,7 @@ class BuildoutOnlineTestCase(Base):
+ )
+
+ @pytest.mark.slow_test
++ @pytest.mark.skip(reason="TODO this test should probably be fixed")
+ def test_run_buildout(self):
+ if salt.modules.virtualenv_mod.virtualenv_ver(self.ppy_st) >= (20, 0, 0):
+ self.skipTest(
+@@ -467,6 +468,7 @@ class BuildoutOnlineTestCase(Base):
+ self.assertTrue("Installing b" in out)
+
+ @pytest.mark.slow_test
++ @pytest.mark.skip(reason="TODO this test should probably be fixed")
+ def test_buildout(self):
+ if salt.modules.virtualenv_mod.virtualenv_ver(self.ppy_st) >= (20, 0, 0):
+ self.skipTest(
+diff --git a/tests/unit/netapi/rest_tornado/test_saltnado.py b/tests/unit/netapi/rest_tornado/test_saltnado.py
+index 7b63a65d4f3..c4758e700ab 100644
+--- a/tests/unit/netapi/rest_tornado/test_saltnado.py
++++ b/tests/unit/netapi/rest_tornado/test_saltnado.py
+@@ -647,7 +647,6 @@ class TestDisbatchLocal(salt.ext.tornado.testing.AsyncTestCase):
+ with patch.object(
+ self.handler.application.event_listener,
+ "get_event",
+- autospec=True,
+ side_effect=fancy_get_event,
+ ), patch.dict(
+ self.handler.application.opts,
+@@ -698,7 +697,6 @@ class TestDisbatchLocal(salt.ext.tornado.testing.AsyncTestCase):
+ with patch.object(
+ self.handler.application.event_listener,
+ "get_event",
+- autospec=True,
+ side_effect=fancy_get_event,
+ ), patch.object(
+ self.handler,
+@@ -729,8 +727,8 @@ class TestDisbatchLocal(salt.ext.tornado.testing.AsyncTestCase):
+ {
+ "tag": "fnord",
+ "data": {
+- "return": "return from fnord {}".format(i),
+- "id": "fnord {}".format(i),
++ "return": f"return from fnord {i}",
++ "id": f"fnord {i}",
+ },
+ }
+ )
+@@ -760,7 +758,6 @@ class TestDisbatchLocal(salt.ext.tornado.testing.AsyncTestCase):
+ with patch.object(
+ self.handler.application.event_listener,
+ "get_event",
+- autospec=True,
+ side_effect=fancy_get_event,
+ ), patch.object(
+ self.handler,
+@@ -794,8 +791,8 @@ class TestDisbatchLocal(salt.ext.tornado.testing.AsyncTestCase):
+ {
+ "tag": "fnord",
+ "data": {
+- "return": "return from fnord {}".format(i),
+- "id": "fnord {}".format(i),
++ "return": f"return from fnord {i}",
++ "id": f"fnord {i}",
+ },
+ }
+ )
+@@ -820,7 +817,6 @@ class TestDisbatchLocal(salt.ext.tornado.testing.AsyncTestCase):
+ with patch.object(
+ self.handler.application.event_listener,
+ "get_event",
+- autospec=True,
+ side_effect=fancy_get_event,
+ ), patch.dict(
+ self.handler.application.opts,
+@@ -843,12 +839,12 @@ class TestDisbatchLocal(salt.ext.tornado.testing.AsyncTestCase):
+ completed_events = [salt.ext.tornado.gen.Future() for _ in range(10)]
+ events_by_id = {}
+ for i, event in enumerate(completed_events):
+- id_ = "fnord {}".format(i)
++ id_ = f"fnord {i}"
+ events_by_id[id_] = event
+ event.set_result(
+ {
+ "tag": "fnord",
+- "data": {"return": "return from {}".format(id_), "id": id_},
++ "data": {"return": f"return from {id_}", "id": id_},
+ }
+ )
+ expected_result = {
+@@ -878,7 +874,6 @@ class TestDisbatchLocal(salt.ext.tornado.testing.AsyncTestCase):
+ with patch.object(
+ self.handler.application.event_listener,
+ "get_event",
+- autospec=True,
+ side_effect=fancy_get_event,
+ ), patch.dict(
+ self.handler.application.opts,
+@@ -904,12 +899,12 @@ class TestDisbatchLocal(salt.ext.tornado.testing.AsyncTestCase):
+ events_by_id = {}
+ # Setup some real-enough looking return data
+ for i, event in enumerate(completed_events):
+- id_ = "fnord {}".format(i)
++ id_ = f"fnord {i}"
+ events_by_id[id_] = event
+ event.set_result(
+ {
+ "tag": "fnord",
+- "data": {"return": "return from {}".format(id_), "id": id_},
++ "data": {"return": f"return from {id_}", "id": id_},
+ }
+ )
+ # Hard coded instead of dynamic to avoid potentially writing a test
+@@ -971,7 +966,6 @@ class TestDisbatchLocal(salt.ext.tornado.testing.AsyncTestCase):
+ with patch.object(
+ self.handler.application.event_listener,
+ "get_event",
+- autospec=True,
+ side_effect=fancy_get_event,
+ ), patch.object(
+ self.handler,
+diff --git a/tests/unit/states/test_boto_apigateway.py b/tests/unit/states/test_boto_apigateway.py
+index 51c85d6058a..1edde8d303c 100644
+--- a/tests/unit/states/test_boto_apigateway.py
++++ b/tests/unit/states/test_boto_apigateway.py
+@@ -20,6 +20,7 @@ from tests.support.unit import TestCase
+ from tests.unit.modules.test_boto_apigateway import BotoApiGatewayTestCaseMixin
+
+ try:
++ import boto
+ import boto3
+ import botocore
+ from botocore.exceptions import ClientError
+diff --git a/tests/unit/states/test_boto_cognitoidentity.py b/tests/unit/states/test_boto_cognitoidentity.py
+index 4354df0546f..479477ac800 100644
+--- a/tests/unit/states/test_boto_cognitoidentity.py
++++ b/tests/unit/states/test_boto_cognitoidentity.py
+@@ -18,6 +18,7 @@ from tests.unit.modules.test_boto_cognitoidentity import (
+ )
+
+ try:
++ import boto
+ import boto3
+ from botocore.exceptions import ClientError
+
+diff --git a/tests/unit/states/test_zcbuildout.py b/tests/unit/states/test_zcbuildout.py
+index db6013076d1..0abaadeb4be 100644
+--- a/tests/unit/states/test_zcbuildout.py
++++ b/tests/unit/states/test_zcbuildout.py
+@@ -48,6 +48,7 @@ class BuildoutTestCase(Base):
+ self.assertFalse(ret["result"])
+
+ @pytest.mark.slow_test
++ @pytest.mark.skip(reason="TODO this test should probably be fixed")
+ def test_installed(self):
+ if salt.modules.virtualenv_mod.virtualenv_ver(self.ppy_st) >= (20, 0, 0):
+ self.skipTest(
+--
+2.43.0
+
+
diff --git a/fix-salt-warnings-and-testuite-for-python-3.11-635.patch b/fix-salt-warnings-and-testuite-for-python-3.11-635.patch
new file mode 100644
index 0000000..0b449a7
--- /dev/null
+++ b/fix-salt-warnings-and-testuite-for-python-3.11-635.patch
@@ -0,0 +1,3860 @@
+From cdb7211920c9256942518fbcf3bd627a70a99855 Mon Sep 17 00:00:00 2001
+From: Victor Zhestkov
+Date: Mon, 18 Mar 2024 09:15:08 +0100
+Subject: [PATCH] Fix Salt warnings and testuite for Python 3.11 (#635)
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+* Switch to `FullArgSpec` since Py 3.11 no longer has `ArgSpec`, deprecated since Py 3.0
+
+Signed-off-by: Pedro Algarvio
+
+* Backport `locale.getdefaultlocale()` into Salt. It's getting removed in Py 3.13
+
+Signed-off-by: Pedro Algarvio
+
+* Stop using the deprecated `pipes` module
+
+Signed-off-by: Pedro Algarvio
+
+* Stop using the deprecated `cgi` module.
+
+Signed-off-by: Pedro Algarvio
+
+* Add `__getstate__` to blacklisted methods, present in Py 3.11
+
+Signed-off-by: Pedro Algarvio
+
+* Fix test_state test
+
+* Use proper keys since Python's base64 in Py3.11 is more picky
+
+```
+❯ artifacts/salt/bin/python3
+Python 3.10.11 (main, May 5 2023, 02:31:54) [GCC 11.2.0] on linux
+Type "help", "copyright", "credits" or "license" for more information.
+>>> import base64
+>>> base64.b64decode("AAAAB3NzaC1kcQ9J5bYTEyZ==", validate=True)
+b'\x00\x00\x00\x07ssh-dq\x0fI\xe5\xb6\x13\x13&'
+```
+```
+$ artifacts/salt/bin/python3
+Python 3.11.3 (main, May 5 2023, 02:31:40) [GCC 11.2.0] on linux
+Type "help", "copyright", "credits" or "license" for more information.
+>>> import base64
+>>> base64.b64decode("AAAAB3NzaC1kcQ9J5bYTEyZ==", validate=True)
+Traceback (most recent call last):
+ File "", line 1, in
+ File "/tmp/testing/artifacts/salt/lib/python3.11/base64.py", line 88, in b64decode
+ return binascii.a2b_base64(s, strict_mode=validate)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+binascii.Error: Excess data after padding
+```
+
+Signed-off-by: Pedro Algarvio
+
+---------
+
+Signed-off-by: Pedro Algarvio
+Co-authored-by: Pedro Algarvio
+Co-authored-by: Marek Czernek
+---
+ salt/__init__.py | 43 ++-
+ salt/grains/core.py | 108 +++----
+ salt/modules/container_resource.py | 74 ++---
+ salt/modules/deb_postgres.py | 16 +-
+ salt/modules/dockermod.py | 178 +++++------
+ salt/modules/lxc.py | 50 +--
+ salt/modules/mac_keychain.py | 32 +-
+ salt/modules/macpackage.py | 45 +--
+ salt/modules/openstack_config.py | 41 +--
+ salt/modules/postgres.py | 116 +++----
+ salt/utils/cloud.py | 300 ++++++++----------
+ salt/utils/http.py | 48 ++-
+ salt/utils/jinja.py | 25 +-
+ salt/utils/locales.py | 39 ++-
+ tests/integration/states/test_ssh_auth.py | 50 ++-
+ .../pytests/unit/modules/state/test_state.py | 2 +-
+ tests/unit/states/test_module.py | 56 ++--
+ tests/unit/test_master.py | 8 +-
+ 18 files changed, 586 insertions(+), 645 deletions(-)
+
+diff --git a/salt/__init__.py b/salt/__init__.py
+index e06b8ad7127..b5fe3677c22 100644
+--- a/salt/__init__.py
++++ b/salt/__init__.py
+@@ -72,6 +72,44 @@ warnings.filterwarnings(
+ )
+
+
++def __getdefaultlocale(envvars=("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE")):
++ """
++ This function was backported from Py3.11 which started triggering a
++ deprecation warning about it's removal in 3.13.
++ """
++ import locale
++
++ try:
++ # check if it's supported by the _locale module
++ import _locale
++
++ code, encoding = _locale._getdefaultlocale()
++ except (ImportError, AttributeError):
++ pass
++ else:
++ # make sure the code/encoding values are valid
++ if sys.platform == "win32" and code and code[:2] == "0x":
++ # map windows language identifier to language name
++ code = locale.windows_locale.get(int(code, 0))
++ # ...add other platform-specific processing here, if
++ # necessary...
++ return code, encoding
++
++ # fall back on POSIX behaviour
++ import os
++
++ lookup = os.environ.get
++ for variable in envvars:
++ localename = lookup(variable, None)
++ if localename:
++ if variable == "LANGUAGE":
++ localename = localename.split(":")[0]
++ break
++ else:
++ localename = "C"
++ return locale._parse_localename(localename)
++
++
+ def __define_global_system_encoding_variable__():
+ import sys
+
+@@ -90,17 +128,14 @@ def __define_global_system_encoding_variable__():
+ # If the system is properly configured this should return a valid
+ # encoding. MS Windows has problems with this and reports the wrong
+ # encoding
+- import locale
+
+ try:
+- encoding = locale.getdefaultlocale()[-1]
++ encoding = __getdefaultlocale()[-1]
+ except ValueError:
+ # A bad locale setting was most likely found:
+ # https://github.com/saltstack/salt/issues/26063
+ pass
+
+- # This is now garbage collectable
+- del locale
+ if not encoding:
+ # This is most likely ascii which is not the best but we were
+ # unable to find a better encoding. If this fails, we fall all
+diff --git a/salt/grains/core.py b/salt/grains/core.py
+index 5c125563461..4454c303fed 100644
+--- a/salt/grains/core.py
++++ b/salt/grains/core.py
+@@ -11,7 +11,6 @@ as those returned here
+
+ import datetime
+ import hashlib
+-import locale
+ import logging
+ import os
+ import platform
+@@ -34,6 +33,7 @@ import salt.modules.smbios
+ import salt.utils.args
+ import salt.utils.dns
+ import salt.utils.files
++import salt.utils.locales
+ import salt.utils.network
+ import salt.utils.path
+ import salt.utils.pkg.rpm
+@@ -290,7 +290,7 @@ def _linux_gpu_data():
+
+ devs = []
+ try:
+- lspci_out = __salt__["cmd.run"]("{} -vmm".format(lspci))
++ lspci_out = __salt__["cmd.run"](f"{lspci} -vmm")
+
+ cur_dev = {}
+ error = False
+@@ -364,7 +364,7 @@ def _netbsd_gpu_data():
+ for line in pcictl_out.splitlines():
+ for vendor in known_vendors:
+ vendor_match = re.match(
+- r"[0-9:]+ ({}) (.+) \(VGA .+\)".format(vendor), line, re.IGNORECASE
++ rf"[0-9:]+ ({vendor}) (.+) \(VGA .+\)", line, re.IGNORECASE
+ )
+ if vendor_match:
+ gpus.append(
+@@ -426,18 +426,18 @@ def _bsd_cpudata(osdata):
+ if sysctl:
+ cmds.update(
+ {
+- "num_cpus": "{} -n hw.ncpu".format(sysctl),
+- "cpuarch": "{} -n hw.machine".format(sysctl),
+- "cpu_model": "{} -n hw.model".format(sysctl),
++ "num_cpus": f"{sysctl} -n hw.ncpu",
++ "cpuarch": f"{sysctl} -n hw.machine",
++ "cpu_model": f"{sysctl} -n hw.model",
+ }
+ )
+
+ if arch and osdata["kernel"] == "OpenBSD":
+- cmds["cpuarch"] = "{} -s".format(arch)
++ cmds["cpuarch"] = f"{arch} -s"
+
+ if osdata["kernel"] == "Darwin":
+- cmds["cpu_model"] = "{} -n machdep.cpu.brand_string".format(sysctl)
+- cmds["cpu_flags"] = "{} -n machdep.cpu.features".format(sysctl)
++ cmds["cpu_model"] = f"{sysctl} -n machdep.cpu.brand_string"
++ cmds["cpu_flags"] = f"{sysctl} -n machdep.cpu.features"
+
+ grains = {k: __salt__["cmd.run"](v) for k, v in cmds.items()}
+
+@@ -522,7 +522,7 @@ def _aix_cpudata():
+ grains = {}
+ cmd = salt.utils.path.which("prtconf")
+ if cmd:
+- data = __salt__["cmd.run"]("{}".format(cmd)) + os.linesep
++ data = __salt__["cmd.run"](f"{cmd}") + os.linesep
+ for dest, regstring in (
+ ("cpuarch", r"(?im)^\s*Processor\s+Type:\s+(\S+)"),
+ ("cpu_flags", r"(?im)^\s*Processor\s+Version:\s+(\S+)"),
+@@ -568,9 +568,9 @@ def _osx_memdata():
+
+ sysctl = salt.utils.path.which("sysctl")
+ if sysctl:
+- mem = __salt__["cmd.run"]("{} -n hw.memsize".format(sysctl))
++ mem = __salt__["cmd.run"](f"{sysctl} -n hw.memsize")
+ swap_total = (
+- __salt__["cmd.run"]("{} -n vm.swapusage".format(sysctl))
++ __salt__["cmd.run"](f"{sysctl} -n vm.swapusage")
+ .split()[2]
+ .replace(",", ".")
+ )
+@@ -595,20 +595,20 @@ def _bsd_memdata(osdata):
+
+ sysctl = salt.utils.path.which("sysctl")
+ if sysctl:
+- mem = __salt__["cmd.run"]("{} -n hw.physmem".format(sysctl))
++ mem = __salt__["cmd.run"](f"{sysctl} -n hw.physmem")
+ if osdata["kernel"] == "NetBSD" and mem.startswith("-"):
+- mem = __salt__["cmd.run"]("{} -n hw.physmem64".format(sysctl))
++ mem = __salt__["cmd.run"](f"{sysctl} -n hw.physmem64")
+ grains["mem_total"] = int(mem) // 1024 // 1024
+
+ if osdata["kernel"] in ["OpenBSD", "NetBSD"]:
+ swapctl = salt.utils.path.which("swapctl")
+- swap_data = __salt__["cmd.run"]("{} -sk".format(swapctl))
++ swap_data = __salt__["cmd.run"](f"{swapctl} -sk")
+ if swap_data == "no swap devices configured":
+ swap_total = 0
+ else:
+ swap_total = swap_data.split(" ")[1]
+ else:
+- swap_total = __salt__["cmd.run"]("{} -n vm.swap_total".format(sysctl))
++ swap_total = __salt__["cmd.run"](f"{sysctl} -n vm.swap_total")
+ grains["swap_total"] = int(swap_total) // 1024 // 1024
+ return grains
+
+@@ -626,7 +626,7 @@ def _sunos_memdata():
+ grains["mem_total"] = int(comps[2].strip())
+
+ swap_cmd = salt.utils.path.which("swap")
+- swap_data = __salt__["cmd.run"]("{} -s".format(swap_cmd)).split()
++ swap_data = __salt__["cmd.run"](f"{swap_cmd} -s").split()
+ try:
+ swap_avail = int(swap_data[-2][:-1])
+ swap_used = int(swap_data[-4][:-1])
+@@ -654,7 +654,7 @@ def _aix_memdata():
+
+ swap_cmd = salt.utils.path.which("swap")
+ if swap_cmd:
+- swap_data = __salt__["cmd.run"]("{} -s".format(swap_cmd)).split()
++ swap_data = __salt__["cmd.run"](f"{swap_cmd} -s").split()
+ try:
+ swap_total = (int(swap_data[-2]) + int(swap_data[-6])) * 4
+ except ValueError:
+@@ -707,7 +707,7 @@ def _aix_get_machine_id():
+ grains = {}
+ cmd = salt.utils.path.which("lsattr")
+ if cmd:
+- data = __salt__["cmd.run"]("{} -El sys0".format(cmd)) + os.linesep
++ data = __salt__["cmd.run"](f"{cmd} -El sys0") + os.linesep
+ uuid_regexes = [re.compile(r"(?im)^\s*os_uuid\s+(\S+)\s+(.*)")]
+ for regex in uuid_regexes:
+ res = regex.search(data)
+@@ -1018,7 +1018,7 @@ def _virtual(osdata):
+ subtype_cmd = "{} -c current get -H -o value {}-role".format(
+ command, role
+ )
+- ret = __salt__["cmd.run"]("{}".format(subtype_cmd))
++ ret = __salt__["cmd.run"](f"{subtype_cmd}")
+ if ret == "true":
+ roles.append(role)
+ if roles:
+@@ -1164,14 +1164,14 @@ def _virtual(osdata):
+ elif osdata["kernel"] == "FreeBSD":
+ kenv = salt.utils.path.which("kenv")
+ if kenv:
+- product = __salt__["cmd.run"]("{} smbios.system.product".format(kenv))
+- maker = __salt__["cmd.run"]("{} smbios.system.maker".format(kenv))
++ product = __salt__["cmd.run"](f"{kenv} smbios.system.product")
++ maker = __salt__["cmd.run"](f"{kenv} smbios.system.maker")
+ if product.startswith("VMware"):
+ grains["virtual"] = "VMware"
+ if product.startswith("VirtualBox"):
+ grains["virtual"] = "VirtualBox"
+ if maker.startswith("Xen"):
+- grains["virtual_subtype"] = "{} {}".format(maker, product)
++ grains["virtual_subtype"] = f"{maker} {product}"
+ grains["virtual"] = "xen"
+ if maker.startswith("Microsoft") and product.startswith("Virtual"):
+ grains["virtual"] = "VirtualPC"
+@@ -1182,9 +1182,9 @@ def _virtual(osdata):
+ if maker.startswith("Amazon EC2"):
+ grains["virtual"] = "Nitro"
+ if sysctl:
+- hv_vendor = __salt__["cmd.run"]("{} -n hw.hv_vendor".format(sysctl))
+- model = __salt__["cmd.run"]("{} -n hw.model".format(sysctl))
+- jail = __salt__["cmd.run"]("{} -n security.jail.jailed".format(sysctl))
++ hv_vendor = __salt__["cmd.run"](f"{sysctl} -n hw.hv_vendor")
++ model = __salt__["cmd.run"](f"{sysctl} -n hw.model")
++ jail = __salt__["cmd.run"](f"{sysctl} -n security.jail.jailed")
+ if "bhyve" in hv_vendor:
+ grains["virtual"] = "bhyve"
+ elif "QEMU Virtual CPU" in model:
+@@ -1200,22 +1200,19 @@ def _virtual(osdata):
+ elif osdata["kernel"] == "NetBSD":
+ if sysctl:
+ if "QEMU Virtual CPU" in __salt__["cmd.run"](
+- "{} -n machdep.cpu_brand".format(sysctl)
++ f"{sysctl} -n machdep.cpu_brand"
+ ):
+ grains["virtual"] = "kvm"
+ elif "invalid" not in __salt__["cmd.run"](
+- "{} -n machdep.xen.suspend".format(sysctl)
++ f"{sysctl} -n machdep.xen.suspend"
+ ):
+ grains["virtual"] = "Xen PV DomU"
+ elif "VMware" in __salt__["cmd.run"](
+- "{} -n machdep.dmi.system-vendor".format(sysctl)
++ f"{sysctl} -n machdep.dmi.system-vendor"
+ ):
+ grains["virtual"] = "VMware"
+ # NetBSD has Xen dom0 support
+- elif (
+- __salt__["cmd.run"]("{} -n machdep.idle-mechanism".format(sysctl))
+- == "xen"
+- ):
++ elif __salt__["cmd.run"](f"{sysctl} -n machdep.idle-mechanism") == "xen":
+ if os.path.isfile("/var/run/xenconsoled.pid"):
+ grains["virtual_subtype"] = "Xen Dom0"
+ elif osdata["kernel"] == "SunOS":
+@@ -1223,7 +1220,7 @@ def _virtual(osdata):
+ # check the zonename here as fallback
+ zonename = salt.utils.path.which("zonename")
+ if zonename:
+- zone = __salt__["cmd.run"]("{}".format(zonename))
++ zone = __salt__["cmd.run"](f"{zonename}")
+ if zone != "global":
+ grains["virtual"] = "zone"
+
+@@ -1252,7 +1249,7 @@ def _virtual(osdata):
+ r".*Product Name: ([^\r\n]*).*", output, flags=re.DOTALL
+ )
+ if product:
+- grains["virtual_subtype"] = "Amazon EC2 ({})".format(product[1])
++ grains["virtual_subtype"] = f"Amazon EC2 ({product[1]})"
+ elif re.match(r".*Version: [^\r\n]+\.amazon.*", output, flags=re.DOTALL):
+ grains["virtual_subtype"] = "Amazon EC2"
+
+@@ -1284,9 +1281,7 @@ def _virtual_hv(osdata):
+ try:
+ version = {}
+ for fn in ("major", "minor", "extra"):
+- with salt.utils.files.fopen(
+- "/sys/hypervisor/version/{}".format(fn), "r"
+- ) as fhr:
++ with salt.utils.files.fopen(f"/sys/hypervisor/version/{fn}", "r") as fhr:
+ version[fn] = salt.utils.stringutils.to_unicode(fhr.read().strip())
+ grains["virtual_hv_version"] = "{}.{}{}".format(
+ version["major"], version["minor"], version["extra"]
+@@ -1442,7 +1437,7 @@ def _windows_os_release_grain(caption, product_type):
+ # ie: R2
+ if re.match(r"^R\d+$", item):
+ release = item
+- os_release = "{}Server{}".format(version, release)
++ os_release = f"{version}Server{release}"
+ else:
+ for item in caption.split(" "):
+ # If it's a number, decimal number, Thin or Vista, then it's the
+@@ -1633,7 +1628,7 @@ def _linux_devicetree_platform_data():
+ try:
+ # /proc/device-tree should be used instead of /sys/firmware/devicetree/base
+ # see https://github.com/torvalds/linux/blob/v5.13/Documentation/ABI/testing/sysfs-firmware-ofw#L14
+- loc = "/proc/device-tree/{}".format(path)
++ loc = f"/proc/device-tree/{path}"
+ if os.path.isfile(loc):
+ with salt.utils.files.fopen(loc, mode="r") as f:
+ return f.read().rstrip("\x00") # all strings are null-terminated
+@@ -1872,18 +1867,13 @@ def _linux_bin_exists(binary):
+ """
+ for search_cmd in ("which", "type -ap"):
+ try:
+- return __salt__["cmd.retcode"]("{} {}".format(search_cmd, binary)) == 0
++ return __salt__["cmd.retcode"](f"{search_cmd} {binary}") == 0
+ except salt.exceptions.CommandExecutionError:
+ pass
+
+ try:
+ return (
+- len(
+- __salt__["cmd.run_all"]("whereis -b {}".format(binary))[
+- "stdout"
+- ].split()
+- )
+- > 1
++ len(__salt__["cmd.run_all"](f"whereis -b {binary}")["stdout"].split()) > 1
+ )
+ except salt.exceptions.CommandExecutionError:
+ return False
+@@ -1901,7 +1891,7 @@ def _parse_lsb_release():
+ pass
+ else:
+ # Adds lsb_distrib_{id,release,codename,description}
+- ret["lsb_{}".format(key.lower())] = value.rstrip()
++ ret[f"lsb_{key.lower()}"] = value.rstrip()
+ except OSError as exc:
+ log.trace("Failed to parse /etc/lsb-release: %s", exc)
+ return ret
+@@ -2634,7 +2624,7 @@ def os_data():
+ osbuild = __salt__["cmd.run"]("sw_vers -buildVersion")
+ grains["os"] = "MacOS"
+ grains["os_family"] = "MacOS"
+- grains["osfullname"] = "{} {}".format(osname, osrelease)
++ grains["osfullname"] = f"{osname} {osrelease}"
+ grains["osrelease"] = osrelease
+ grains["osbuild"] = osbuild
+ grains["init"] = "launchd"
+@@ -2708,7 +2698,7 @@ def locale_info():
+ (
+ grains["locale_info"]["defaultlanguage"],
+ grains["locale_info"]["defaultencoding"],
+- ) = locale.getdefaultlocale()
++ ) = salt.utils.locales.getdefaultlocale()
+ except Exception: # pylint: disable=broad-except
+ # locale.getdefaultlocale can ValueError!! Catch anything else it
+ # might do, per #2205
+@@ -3175,7 +3165,7 @@ def _hw_data(osdata):
+ "productname": "DeviceDesc",
+ }
+ for grain_name, cmd_key in hwdata.items():
+- result = __salt__["cmd.run_all"]("fw_printenv {}".format(cmd_key))
++ result = __salt__["cmd.run_all"](f"fw_printenv {cmd_key}")
+ if result["retcode"] == 0:
+ uboot_keyval = result["stdout"].split("=")
+ grains[grain_name] = _clean_value(grain_name, uboot_keyval[1])
+@@ -3195,7 +3185,7 @@ def _hw_data(osdata):
+ "uuid": "smbios.system.uuid",
+ }
+ for key, val in fbsd_hwdata.items():
+- value = __salt__["cmd.run"]("{} {}".format(kenv, val))
++ value = __salt__["cmd.run"](f"{kenv} {val}")
+ grains[key] = _clean_value(key, value)
+ elif osdata["kernel"] == "OpenBSD":
+ sysctl = salt.utils.path.which("sysctl")
+@@ -3207,7 +3197,7 @@ def _hw_data(osdata):
+ "uuid": "hw.uuid",
+ }
+ for key, oid in hwdata.items():
+- value = __salt__["cmd.run"]("{} -n {}".format(sysctl, oid))
++ value = __salt__["cmd.run"](f"{sysctl} -n {oid}")
+ if not value.endswith(" value is not available"):
+ grains[key] = _clean_value(key, value)
+ elif osdata["kernel"] == "NetBSD":
+@@ -3222,7 +3212,7 @@ def _hw_data(osdata):
+ "uuid": "machdep.dmi.system-uuid",
+ }
+ for key, oid in nbsd_hwdata.items():
+- result = __salt__["cmd.run_all"]("{} -n {}".format(sysctl, oid))
++ result = __salt__["cmd.run_all"](f"{sysctl} -n {oid}")
+ if result["retcode"] == 0:
+ grains[key] = _clean_value(key, result["stdout"])
+ elif osdata["kernel"] == "Darwin":
+@@ -3230,7 +3220,7 @@ def _hw_data(osdata):
+ sysctl = salt.utils.path.which("sysctl")
+ hwdata = {"productname": "hw.model"}
+ for key, oid in hwdata.items():
+- value = __salt__["cmd.run"]("{} -b {}".format(sysctl, oid))
++ value = __salt__["cmd.run"](f"{sysctl} -b {oid}")
+ if not value.endswith(" is invalid"):
+ grains[key] = _clean_value(key, value)
+ elif osdata["kernel"] == "SunOS" and osdata["cpuarch"].startswith("sparc"):
+@@ -3244,7 +3234,7 @@ def _hw_data(osdata):
+ ("/usr/sbin/virtinfo", "-a"),
+ ):
+ if salt.utils.path.which(cmd): # Also verifies that cmd is executable
+- data += __salt__["cmd.run"]("{} {}".format(cmd, args))
++ data += __salt__["cmd.run"](f"{cmd} {args}")
+ data += "\n"
+
+ sn_regexes = [
+@@ -3359,7 +3349,7 @@ def _hw_data(osdata):
+ elif osdata["kernel"] == "AIX":
+ cmd = salt.utils.path.which("prtconf")
+ if cmd:
+- data = __salt__["cmd.run"]("{}".format(cmd)) + os.linesep
++ data = __salt__["cmd.run"](f"{cmd}") + os.linesep
+ for dest, regstring in (
+ ("serialnumber", r"(?im)^\s*Machine\s+Serial\s+Number:\s+(\S+)"),
+ ("systemfirmware", r"(?im)^\s*Firmware\s+Version:\s+(.*)"),
+@@ -3480,14 +3470,14 @@ def default_gateway():
+ for line in out.splitlines():
+ if line.startswith("default"):
+ grains["ip_gw"] = True
+- grains["ip{}_gw".format(ip_version)] = True
++ grains[f"ip{ip_version}_gw"] = True
+ try:
+ via, gw_ip = line.split()[1:3]
+ except ValueError:
+ pass
+ else:
+ if via == "via":
+- grains["ip{}_gw".format(ip_version)] = gw_ip
++ grains[f"ip{ip_version}_gw"] = gw_ip
+ break
+ except Exception: # pylint: disable=broad-except
+ continue
+diff --git a/salt/modules/container_resource.py b/salt/modules/container_resource.py
+index a29cba2e468..ceec72a7b20 100644
+--- a/salt/modules/container_resource.py
++++ b/salt/modules/container_resource.py
+@@ -8,13 +8,11 @@ These functions are not designed to be called directly, but instead from the
+ :mod:`docker ` execution modules. They provide for
+ common logic to be re-used for common actions.
+ """
+-
+-
+ import copy
+ import functools
+ import logging
+ import os
+-import pipes
++import shlex
+ import time
+ import traceback
+
+@@ -68,14 +66,14 @@ def _nsenter(pid):
+ """
+ Return the nsenter command to attach to the named container
+ """
+- return "nsenter --target {} --mount --uts --ipc --net --pid".format(pid)
++ return f"nsenter --target {pid} --mount --uts --ipc --net --pid"
+
+
+ def _get_md5(name, path, run_func):
+ """
+ Get the MD5 checksum of a file from a container
+ """
+- output = run_func(name, "md5sum {}".format(pipes.quote(path)), ignore_retcode=True)[
++ output = run_func(name, f"md5sum {shlex.quote(path)}", ignore_retcode=True)[
+ "stdout"
+ ]
+ try:
+@@ -102,10 +100,10 @@ def cache_file(source):
+ if source.startswith("salt://"):
+ cached_source = __salt__["cp.cache_file"](source)
+ if not cached_source:
+- raise CommandExecutionError("Unable to cache {}".format(source))
++ raise CommandExecutionError(f"Unable to cache {source}")
+ return cached_source
+ except AttributeError:
+- raise SaltInvocationError("Invalid source file {}".format(source))
++ raise SaltInvocationError(f"Invalid source file {source}")
+ return source
+
+
+@@ -164,55 +162,47 @@ def run(
+ if exec_driver == "lxc-attach":
+ full_cmd = "lxc-attach "
+ if path:
+- full_cmd += "-P {} ".format(pipes.quote(path))
++ full_cmd += f"-P {shlex.quote(path)} "
+ if keep_env is not True:
+ full_cmd += "--clear-env "
+ if "PATH" not in to_keep:
+- full_cmd += "--set-var {} ".format(PATH)
++ full_cmd += f"--set-var {PATH} "
+ # --clear-env results in a very restrictive PATH
+ # (/bin:/usr/bin), use a good fallback.
+ full_cmd += " ".join(
+ [
+- "--set-var {}={}".format(x, pipes.quote(os.environ[x]))
++ f"--set-var {x}={shlex.quote(os.environ[x])}"
+ for x in to_keep
+ if x in os.environ
+ ]
+ )
+- full_cmd += " -n {} -- {}".format(pipes.quote(name), cmd)
++ full_cmd += f" -n {shlex.quote(name)} -- {cmd}"
+ elif exec_driver == "nsenter":
+- pid = __salt__["{}.pid".format(container_type)](name)
+- full_cmd = "nsenter --target {} --mount --uts --ipc --net --pid -- ".format(pid)
++ pid = __salt__[f"{container_type}.pid"](name)
++ full_cmd = f"nsenter --target {pid} --mount --uts --ipc --net --pid -- "
+ if keep_env is not True:
+ full_cmd += "env -i "
+ if "PATH" not in to_keep:
+- full_cmd += "{} ".format(PATH)
++ full_cmd += f"{PATH} "
+ full_cmd += " ".join(
+- [
+- "{}={}".format(x, pipes.quote(os.environ[x]))
+- for x in to_keep
+- if x in os.environ
+- ]
++ [f"{x}={shlex.quote(os.environ[x])}" for x in to_keep if x in os.environ]
+ )
+- full_cmd += " {}".format(cmd)
++ full_cmd += f" {cmd}"
+ elif exec_driver == "docker-exec":
+ # We're using docker exec on the CLI as opposed to via docker-py, since
+ # the Docker API doesn't return stdout and stderr separately.
+ full_cmd = "docker exec "
+ if stdin:
+ full_cmd += "-i "
+- full_cmd += "{} ".format(name)
++ full_cmd += f"{name} "
+ if keep_env is not True:
+ full_cmd += "env -i "
+ if "PATH" not in to_keep:
+- full_cmd += "{} ".format(PATH)
++ full_cmd += f"{PATH} "
+ full_cmd += " ".join(
+- [
+- "{}={}".format(x, pipes.quote(os.environ[x]))
+- for x in to_keep
+- if x in os.environ
+- ]
++ [f"{x}={shlex.quote(os.environ[x])}" for x in to_keep if x in os.environ]
+ )
+- full_cmd += " {}".format(cmd)
++ full_cmd += f" {cmd}"
+
+ if not use_vt:
+ ret = __salt__[cmd_func](
+@@ -299,13 +289,13 @@ def copy_to(
+ salt myminion container_resource.copy_to mycontainer /local/file/path /container/file/path container_type=docker exec_driver=nsenter
+ """
+ # Get the appropriate functions
+- state = __salt__["{}.state".format(container_type)]
++ state = __salt__[f"{container_type}.state"]
+
+ def run_all(*args, **akwargs):
+ akwargs = copy.deepcopy(akwargs)
+ if container_type in ["lxc"] and "path" not in akwargs:
+ akwargs["path"] = path
+- return __salt__["{}.run_all".format(container_type)](*args, **akwargs)
++ return __salt__[f"{container_type}.run_all"](*args, **akwargs)
+
+ state_kwargs = {}
+ cmd_kwargs = {"ignore_retcode": True}
+@@ -321,7 +311,7 @@ def copy_to(
+
+ c_state = _state(name)
+ if c_state != "running":
+- raise CommandExecutionError("Container '{}' is not running".format(name))
++ raise CommandExecutionError(f"Container '{name}' is not running")
+
+ local_file = cache_file(source)
+ source_dir, source_name = os.path.split(local_file)
+@@ -330,17 +320,14 @@ def copy_to(
+ if not os.path.isabs(local_file):
+ raise SaltInvocationError("Source path must be absolute")
+ elif not os.path.exists(local_file):
+- raise SaltInvocationError("Source file {} does not exist".format(local_file))
++ raise SaltInvocationError(f"Source file {local_file} does not exist")
+ elif not os.path.isfile(local_file):
+ raise SaltInvocationError("Source must be a regular file")
+
+ # Destination file sanity checks
+ if not os.path.isabs(dest):
+ raise SaltInvocationError("Destination path must be absolute")
+- if (
+- run_all(name, "test -d {}".format(pipes.quote(dest)), **cmd_kwargs)["retcode"]
+- == 0
+- ):
++ if run_all(name, f"test -d {shlex.quote(dest)}", **cmd_kwargs)["retcode"] == 0:
+ # Destination is a directory, full path to dest file will include the
+ # basename of the source file.
+ dest = os.path.join(dest, source_name)
+@@ -350,14 +337,12 @@ def copy_to(
+ # parent directory.
+ dest_dir, dest_name = os.path.split(dest)
+ if (
+- run_all(name, "test -d {}".format(pipes.quote(dest_dir)), **cmd_kwargs)[
+- "retcode"
+- ]
++ run_all(name, f"test -d {shlex.quote(dest_dir)}", **cmd_kwargs)["retcode"]
+ != 0
+ ):
+ if makedirs:
+ result = run_all(
+- name, "mkdir -p {}".format(pipes.quote(dest_dir)), **cmd_kwargs
++ name, f"mkdir -p {shlex.quote(dest_dir)}", **cmd_kwargs
+ )
+ if result["retcode"] != 0:
+ error = (
+@@ -375,10 +360,7 @@ def copy_to(
+ )
+ if (
+ not overwrite
+- and run_all(name, "test -e {}".format(pipes.quote(dest)), **cmd_kwargs)[
+- "retcode"
+- ]
+- == 0
++ and run_all(name, f"test -e {shlex.quote(dest)}", **cmd_kwargs)["retcode"] == 0
+ ):
+ raise CommandExecutionError(
+ "Destination path {} already exists. Use overwrite=True to "
+@@ -401,14 +383,14 @@ def copy_to(
+ if exec_driver == "lxc-attach":
+ lxcattach = "lxc-attach"
+ if path:
+- lxcattach += " -P {}".format(pipes.quote(path))
++ lxcattach += f" -P {shlex.quote(path)}"
+ copy_cmd = (
+ 'cat "{0}" | {4} --clear-env --set-var {1} -n {2} -- tee "{3}"'.format(
+ local_file, PATH, name, dest, lxcattach
+ )
+ )
+ elif exec_driver == "nsenter":
+- pid = __salt__["{}.pid".format(container_type)](name)
++ pid = __salt__[f"{container_type}.pid"](name)
+ copy_cmd = 'cat "{}" | {} env -i {} tee "{}"'.format(
+ local_file, _nsenter(pid), PATH, dest
+ )
+diff --git a/salt/modules/deb_postgres.py b/salt/modules/deb_postgres.py
+index 3ecd4a8ba49..d92859562d4 100644
+--- a/salt/modules/deb_postgres.py
++++ b/salt/modules/deb_postgres.py
+@@ -2,10 +2,8 @@
+ Module to provide Postgres compatibility to salt for debian family specific tools.
+
+ """
+-
+-
+ import logging
+-import pipes
++import shlex
+
+ import salt.utils.path
+
+@@ -76,7 +74,7 @@ def cluster_create(
+ cmd += ["--data-checksums"]
+ if wal_segsize:
+ cmd += ["--wal-segsize", wal_segsize]
+- cmdstr = " ".join([pipes.quote(c) for c in cmd])
++ cmdstr = " ".join([shlex.quote(c) for c in cmd])
+ ret = __salt__["cmd.run_all"](cmdstr, python_shell=False)
+ if ret.get("retcode", 0) != 0:
+ log.error("Error creating a Postgresql cluster %s/%s", version, name)
+@@ -97,7 +95,7 @@ def cluster_list(verbose=False):
+ salt '*' postgres.cluster_list verbose=True
+ """
+ cmd = [salt.utils.path.which("pg_lsclusters"), "--no-header"]
+- ret = __salt__["cmd.run_all"](" ".join([pipes.quote(c) for c in cmd]))
++ ret = __salt__["cmd.run_all"](" ".join([shlex.quote(c) for c in cmd]))
+ if ret.get("retcode", 0) != 0:
+ log.error("Error listing clusters")
+ cluster_dict = _parse_pg_lscluster(ret["stdout"])
+@@ -118,7 +116,7 @@ def cluster_exists(version, name="main"):
+
+ salt '*' postgres.cluster_exists '9.3' 'main'
+ """
+- return "{}/{}".format(version, name) in cluster_list()
++ return f"{version}/{name}" in cluster_list()
+
+
+ def cluster_remove(version, name="main", stop=False):
+@@ -141,13 +139,13 @@ def cluster_remove(version, name="main", stop=False):
+ if stop:
+ cmd += ["--stop"]
+ cmd += [str(version), name]
+- cmdstr = " ".join([pipes.quote(c) for c in cmd])
++ cmdstr = " ".join([shlex.quote(c) for c in cmd])
+ ret = __salt__["cmd.run_all"](cmdstr, python_shell=False)
+ # FIXME - return Boolean ?
+ if ret.get("retcode", 0) != 0:
+ log.error("Error removing a Postgresql cluster %s/%s", version, name)
+ else:
+- ret["changes"] = "Successfully removed cluster {}/{}".format(version, name)
++ ret["changes"] = f"Successfully removed cluster {version}/{name}"
+ return ret
+
+
+@@ -158,7 +156,7 @@ def _parse_pg_lscluster(output):
+ cluster_dict = {}
+ for line in output.splitlines():
+ version, name, port, status, user, datadir, log = line.split()
+- cluster_dict["{}/{}".format(version, name)] = {
++ cluster_dict[f"{version}/{name}"] = {
+ "port": int(port),
+ "status": status,
+ "user": user,
+diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py
+index 69b722f0c95..331b6bb7482 100644
+--- a/salt/modules/dockermod.py
++++ b/salt/modules/dockermod.py
+@@ -206,8 +206,8 @@ import json
+ import logging
+ import os
+ import pathlib
+-import pipes
+ import re
++import shlex
+ import shutil
+ import string
+ import subprocess
+@@ -257,7 +257,6 @@ except ImportError:
+
+ HAS_NSENTER = bool(salt.utils.path.which("nsenter"))
+
+-# Set up logging
+ log = logging.getLogger(__name__)
+
+ # Don't shadow built-in's.
+@@ -397,7 +396,7 @@ def _get_client(timeout=NOTSET, **kwargs):
+ )
+ except Exception as exc: # pylint: disable=broad-except
+ raise CommandExecutionError(
+- "Docker machine {} failed: {}".format(docker_machine, exc)
++ f"Docker machine {docker_machine} failed: {exc}"
+ )
+ try:
+ # docker-py 2.0 renamed this client attribute
+@@ -497,7 +496,7 @@ def _change_state(name, action, expected, *args, **kwargs):
+ return {
+ "result": False,
+ "state": {"old": expected, "new": expected},
+- "comment": "Container '{}' already {}".format(name, expected),
++ "comment": f"Container '{name}' already {expected}",
+ }
+ _client_wrapper(action, name, *args, **kwargs)
+ _clear_context()
+@@ -535,9 +534,7 @@ def _get_md5(name, path):
+ """
+ Get the MD5 checksum of a file from a container
+ """
+- output = run_stdout(
+- name, "md5sum {}".format(pipes.quote(path)), ignore_retcode=True
+- )
++ output = run_stdout(name, f"md5sum {shlex.quote(path)}", ignore_retcode=True)
+ try:
+ return output.split()[0]
+ except IndexError:
+@@ -616,7 +613,7 @@ def _scrub_links(links, name):
+ if isinstance(links, list):
+ ret = []
+ for l in links:
+- ret.append(l.replace("/{}/".format(name), "/", 1))
++ ret.append(l.replace(f"/{name}/", "/", 1))
+ else:
+ ret = links
+
+@@ -639,11 +636,11 @@ def _size_fmt(num):
+ try:
+ num = int(num)
+ if num < 1024:
+- return "{} bytes".format(num)
++ return f"{num} bytes"
+ num /= 1024.0
+ for unit in ("KiB", "MiB", "GiB", "TiB", "PiB"):
+ if num < 1024.0:
+- return "{:3.1f} {}".format(num, unit)
++ return f"{num:3.1f} {unit}"
+ num /= 1024.0
+ except Exception: # pylint: disable=broad-except
+ log.error("Unable to format file size for '%s'", num)
+@@ -658,7 +655,7 @@ def _client_wrapper(attr, *args, **kwargs):
+ catch_api_errors = kwargs.pop("catch_api_errors", True)
+ func = getattr(__context__["docker.client"], attr, None)
+ if func is None or not hasattr(func, "__call__"):
+- raise SaltInvocationError("Invalid client action '{}'".format(attr))
++ raise SaltInvocationError(f"Invalid client action '{attr}'")
+ if attr in ("push", "pull"):
+ try:
+ # Refresh auth config from config.json
+@@ -678,7 +675,7 @@ def _client_wrapper(attr, *args, **kwargs):
+ if catch_api_errors:
+ # Generic handling of Docker API errors
+ raise CommandExecutionError(
+- "Error {}: {}".format(exc.response.status_code, exc.explanation)
++ f"Error {exc.response.status_code}: {exc.explanation}"
+ )
+ else:
+ # Allow API errors to be caught further up the stack
+@@ -693,9 +690,9 @@ def _client_wrapper(attr, *args, **kwargs):
+
+ # If we're here, it's because an exception was caught earlier, and the
+ # API command failed.
+- msg = "Unable to perform {}".format(attr)
++ msg = f"Unable to perform {attr}"
+ if err:
+- msg += ": {}".format(err)
++ msg += f": {err}"
+ raise CommandExecutionError(msg)
+
+
+@@ -722,7 +719,7 @@ def _import_status(data, item, repo_name, repo_tag):
+ return
+ elif all(x in string.hexdigits for x in status):
+ # Status is an image ID
+- data["Image"] = "{}:{}".format(repo_name, repo_tag)
++ data["Image"] = f"{repo_name}:{repo_tag}"
+ data["Id"] = status
+ except (AttributeError, TypeError):
+ pass
+@@ -881,7 +878,7 @@ def _get_create_kwargs(
+ ignore_collisions=False,
+ validate_ip_addrs=True,
+ client_args=None,
+- **kwargs
++ **kwargs,
+ ):
+ """
+ Take input kwargs and return a kwargs dict to pass to docker-py's
+@@ -899,7 +896,7 @@ def _get_create_kwargs(
+ skip_translate=skip_translate,
+ ignore_collisions=ignore_collisions,
+ validate_ip_addrs=validate_ip_addrs,
+- **__utils__["args.clean_kwargs"](**kwargs)
++ **__utils__["args.clean_kwargs"](**kwargs),
+ )
+
+ if networks:
+@@ -912,7 +909,7 @@ def _get_create_kwargs(
+ log.error(
+ "docker.create: Error getting client args: '%s'", exc, exc_info=True
+ )
+- raise CommandExecutionError("Failed to get client args: {}".format(exc))
++ raise CommandExecutionError(f"Failed to get client args: {exc}")
+
+ full_host_config = {}
+ host_kwargs = {}
+@@ -1473,15 +1470,15 @@ def login(*registries):
+ results = ret.setdefault("Results", {})
+ for registry in registries:
+ if registry not in registry_auth:
+- errors.append("No match found for registry '{}'".format(registry))
++ errors.append(f"No match found for registry '{registry}'")
+ continue
+ try:
+ username = registry_auth[registry]["username"]
+ password = registry_auth[registry]["password"]
+ except TypeError:
+- errors.append("Invalid configuration for registry '{}'".format(registry))
++ errors.append(f"Invalid configuration for registry '{registry}'")
+ except KeyError as exc:
+- errors.append("Missing {} for registry '{}'".format(exc, registry))
++ errors.append(f"Missing {exc} for registry '{registry}'")
+ else:
+ cmd = ["docker", "login", "-u", username, "-p", password]
+ if registry.lower() != "hub":
+@@ -1567,7 +1564,7 @@ def logout(*registries):
+ results = ret.setdefault("Results", {})
+ for registry in registries:
+ if registry not in registry_auth:
+- errors.append("No match found for registry '{}'".format(registry))
++ errors.append(f"No match found for registry '{registry}'")
+ continue
+ else:
+ cmd = ["docker", "logout"]
+@@ -1689,7 +1686,7 @@ def exists(name):
+
+ salt myminion docker.exists mycontainer
+ """
+- contextkey = "docker.exists.{}".format(name)
++ contextkey = f"docker.exists.{name}"
+ if contextkey in __context__:
+ return __context__[contextkey]
+ try:
+@@ -1780,7 +1777,7 @@ def history(name, quiet=False):
+ )
+ for param in ("Size",):
+ if param in step:
+- step["{}_Human".format(param)] = _size_fmt(step[param])
++ step[f"{param}_Human"] = _size_fmt(step[param])
+ ret.append(copy.deepcopy(step))
+ if quiet:
+ return [x.get("Command") for x in ret]
+@@ -1842,9 +1839,7 @@ def images(verbose=False, **kwargs):
+ )
+ for param in ("Size", "VirtualSize"):
+ if param in bucket.get(img_id, {}):
+- bucket[img_id]["{}_Human".format(param)] = _size_fmt(
+- bucket[img_id][param]
+- )
++ bucket[img_id][f"{param}_Human"] = _size_fmt(bucket[img_id][param])
+
+ context_data = __context__.get("docker.images", {})
+ ret = copy.deepcopy(context_data.get("tagged", {}))
+@@ -1927,7 +1922,7 @@ def inspect(name):
+ raise
+
+ raise CommandExecutionError(
+- "Error 404: No such image/container/volume/network: {}".format(name)
++ f"Error 404: No such image/container/volume/network: {name}"
+ )
+
+
+@@ -1983,7 +1978,7 @@ def inspect_image(name):
+ ret = _client_wrapper("inspect_image", name)
+ for param in ("Size", "VirtualSize"):
+ if param in ret:
+- ret["{}_Human".format(param)] = _size_fmt(ret[param])
++ ret[f"{param}_Human"] = _size_fmt(ret[param])
+ return ret
+
+
+@@ -2277,7 +2272,7 @@ def port(name, private_port=None):
+ else:
+ # Sanity checks
+ if isinstance(private_port, int):
+- pattern = "{}/*".format(private_port)
++ pattern = f"{private_port}/*"
+ else:
+ err = (
+ "Invalid private_port '{}'. Must either be a port number, "
+@@ -2398,7 +2393,7 @@ def state(name):
+
+ salt myminion docker.state mycontainer
+ """
+- contextkey = "docker.state.{}".format(name)
++ contextkey = f"docker.state.{name}"
+ if contextkey in __context__:
+ return __context__[contextkey]
+ __context__[contextkey] = _get_state(inspect_container(name))
+@@ -2438,9 +2433,7 @@ def search(name, official=False, trusted=False):
+ """
+ response = _client_wrapper("search", name)
+ if not response:
+- raise CommandExecutionError(
+- "No images matched the search string '{}'".format(name)
+- )
++ raise CommandExecutionError(f"No images matched the search string '{name}'")
+
+ key_map = {
+ "description": "Description",
+@@ -2555,7 +2548,7 @@ def create(
+ ignore_collisions=False,
+ validate_ip_addrs=True,
+ client_timeout=salt.utils.dockermod.CLIENT_TIMEOUT,
+- **kwargs
++ **kwargs,
+ ):
+ """
+ Create a new container
+@@ -3281,7 +3274,7 @@ def create(
+ skip_translate=skip_translate,
+ ignore_collisions=ignore_collisions,
+ validate_ip_addrs=validate_ip_addrs,
+- **kwargs
++ **kwargs,
+ )
+
+ if unused_kwargs:
+@@ -3293,7 +3286,7 @@ def create(
+
+ log.debug(
+ "docker.create: creating container %susing the following arguments: %s",
+- "with name '{}' ".format(name) if name is not None else "",
++ f"with name '{name}' " if name is not None else "",
+ kwargs,
+ )
+ time_started = time.time()
+@@ -3331,7 +3324,7 @@ def run_container(
+ replace=False,
+ force=False,
+ networks=None,
+- **kwargs
++ **kwargs,
+ ):
+ """
+ .. versionadded:: 2018.3.0
+@@ -3433,7 +3426,7 @@ def run_container(
+ skip_translate=skip_translate,
+ ignore_collisions=ignore_collisions,
+ validate_ip_addrs=validate_ip_addrs,
+- **kwargs
++ **kwargs,
+ )
+
+ # _get_create_kwargs() will have processed auto_remove and put it into the
+@@ -3458,7 +3451,7 @@ def run_container(
+
+ log.debug(
+ "docker.create: creating container %susing the following arguments: %s",
+- "with name '{}' ".format(name) if name is not None else "",
++ f"with name '{name}' " if name is not None else "",
+ kwargs,
+ )
+
+@@ -3498,7 +3491,7 @@ def run_container(
+ rm_(name)
+ except CommandExecutionError as rm_exc:
+ exc_info.setdefault("other_errors", []).append(
+- "Failed to auto_remove container: {}".format(rm_exc)
++ f"Failed to auto_remove container: {rm_exc}"
+ )
+ # Raise original exception with additional info
+ raise CommandExecutionError(exc.__str__(), info=exc_info)
+@@ -3593,7 +3586,7 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False):
+ """
+ c_state = state(name)
+ if c_state != "running":
+- raise CommandExecutionError("Container '{}' is not running".format(name))
++ raise CommandExecutionError(f"Container '{name}' is not running")
+
+ # Destination file sanity checks
+ if not os.path.isabs(dest):
+@@ -3619,9 +3612,7 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False):
+ )
+ )
+ else:
+- raise SaltInvocationError(
+- "Directory {} does not exist".format(dest_dir)
+- )
++ raise SaltInvocationError(f"Directory {dest_dir} does not exist")
+ if not overwrite and os.path.exists(dest):
+ raise CommandExecutionError(
+ "Destination path {} already exists. Use overwrite=True to "
+@@ -3632,19 +3623,14 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False):
+ if not os.path.isabs(source):
+ raise SaltInvocationError("Source path must be absolute")
+ else:
+- if (
+- retcode(name, "test -e {}".format(pipes.quote(source)), ignore_retcode=True)
+- == 0
+- ):
++ if retcode(name, f"test -e {shlex.quote(source)}", ignore_retcode=True) == 0:
+ if (
+- retcode(
+- name, "test -f {}".format(pipes.quote(source)), ignore_retcode=True
+- )
++ retcode(name, f"test -f {shlex.quote(source)}", ignore_retcode=True)
+ != 0
+ ):
+ raise SaltInvocationError("Source must be a regular file")
+ else:
+- raise SaltInvocationError("Source file {} does not exist".format(source))
++ raise SaltInvocationError(f"Source file {source} does not exist")
+
+ # Before we try to replace the file, compare checksums.
+ source_md5 = _get_md5(name, source)
+@@ -3657,7 +3643,7 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False):
+ try:
+ src_path = ":".join((name, source))
+ except TypeError:
+- src_path = "{}:{}".format(name, source)
++ src_path = f"{name}:{source}"
+ cmd = ["docker", "cp", src_path, dest_dir]
+ __salt__["cmd.run"](cmd, python_shell=False)
+ return source_md5 == __salt__["file.get_sum"](dest, "md5")
+@@ -3784,7 +3770,7 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar
+ salt myminion docker.export mycontainer /tmp/mycontainer.tar
+ salt myminion docker.export mycontainer /tmp/mycontainer.tar.xz push=True
+ """
+- err = "Path '{}' is not absolute".format(path)
++ err = f"Path '{path}' is not absolute"
+ try:
+ if not os.path.isabs(path):
+ raise SaltInvocationError(err)
+@@ -3792,7 +3778,7 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar
+ raise SaltInvocationError(err)
+
+ if os.path.exists(path) and not overwrite:
+- raise CommandExecutionError("{} already exists".format(path))
++ raise CommandExecutionError(f"{path} already exists")
+
+ if compression is None:
+ if path.endswith(".tar.gz") or path.endswith(".tgz"):
+@@ -3815,7 +3801,7 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar
+ compression = "xz"
+
+ if compression and compression not in ("gzip", "bzip2", "xz"):
+- raise SaltInvocationError("Invalid compression type '{}'".format(compression))
++ raise SaltInvocationError(f"Invalid compression type '{compression}'")
+
+ parent_dir = os.path.dirname(path)
+ if not os.path.isdir(parent_dir):
+@@ -3828,16 +3814,14 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar
+ os.makedirs(parent_dir)
+ except OSError as exc:
+ raise CommandExecutionError(
+- "Unable to make parent dir {}: {}".format(parent_dir, exc)
++ f"Unable to make parent dir {parent_dir}: {exc}"
+ )
+
+ if compression == "gzip":
+ try:
+ out = gzip.open(path, "wb")
+ except OSError as exc:
+- raise CommandExecutionError(
+- "Unable to open {} for writing: {}".format(path, exc)
+- )
++ raise CommandExecutionError(f"Unable to open {path} for writing: {exc}")
+ elif compression == "bzip2":
+ compressor = bz2.BZ2Compressor()
+ elif compression == "xz":
+@@ -3875,9 +3859,7 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar
+ os.remove(path)
+ except OSError:
+ pass
+- raise CommandExecutionError(
+- "Error occurred during container export: {}".format(exc)
+- )
++ raise CommandExecutionError(f"Error occurred during container export: {exc}")
+ finally:
+ out.close()
+ ret = {"Time_Elapsed": time.time() - time_started}
+@@ -4112,7 +4094,7 @@ def build(
+ # For the build function in the low-level API, the "tag" refers to the full
+ # tag (e.g. myuser/myimage:mytag). This is different than in other
+ # functions, where the repo and tag are passed separately.
+- image_tag = "{}:{}".format(repository, tag) if repository and tag else None
++ image_tag = f"{repository}:{tag}" if repository and tag else None
+
+ time_started = time.time()
+ response = _client_wrapper(
+@@ -4131,7 +4113,7 @@ def build(
+
+ if not response:
+ raise CommandExecutionError(
+- "Build failed for {}, no response returned from Docker API".format(path)
++ f"Build failed for {path}, no response returned from Docker API"
+ )
+
+ stream_data = []
+@@ -4168,7 +4150,7 @@ def build(
+ if "Id" not in ret:
+ # API returned information, but there was no confirmation of a
+ # successful build.
+- msg = "Build failed for {}".format(path)
++ msg = f"Build failed for {path}"
+ log.error(msg)
+ log.error(stream_data)
+ if errors:
+@@ -4179,7 +4161,7 @@ def build(
+ if resolved_tag:
+ ret["Image"] = resolved_tag
+ else:
+- ret["Warning"] = "Failed to tag image as {}".format(image_tag)
++ ret["Warning"] = f"Failed to tag image as {image_tag}"
+
+ if api_response:
+ ret["API_Response"] = stream_data
+@@ -4386,7 +4368,7 @@ def import_(source, repository, tag="latest", api_response=False):
+
+ if not response:
+ raise CommandExecutionError(
+- "Import failed for {}, no response returned from Docker API".format(source)
++ f"Import failed for {source}, no response returned from Docker API"
+ )
+ elif api_response:
+ ret["API_Response"] = response
+@@ -4406,7 +4388,7 @@ def import_(source, repository, tag="latest", api_response=False):
+ if "Id" not in ret:
+ # API returned information, but there was no confirmation of a
+ # successful push.
+- msg = "Import failed for {}".format(source)
++ msg = f"Import failed for {source}"
+ if errors:
+ msg += ". Error(s) follow:\n\n{}".format("\n\n".join(errors))
+ raise CommandExecutionError(msg)
+@@ -4481,7 +4463,7 @@ def load(path, repository=None, tag=None):
+
+ local_path = __salt__["container_resource.cache_file"](path)
+ if not os.path.isfile(local_path):
+- raise CommandExecutionError("Source file {} does not exist".format(path))
++ raise CommandExecutionError(f"Source file {path} does not exist")
+
+ pre = images(all=True)
+ cmd = ["docker", "load", "-i", local_path]
+@@ -4491,7 +4473,7 @@ def load(path, repository=None, tag=None):
+ _clear_context()
+ post = images(all=True)
+ if result["retcode"] != 0:
+- msg = "Failed to load image(s) from {}".format(path)
++ msg = f"Failed to load image(s) from {path}"
+ if result["stderr"]:
+ msg += ": {}".format(result["stderr"])
+ raise CommandExecutionError(msg)
+@@ -4512,7 +4494,7 @@ def load(path, repository=None, tag=None):
+ # strings when passed (e.g. a numeric tag would be loaded as an int
+ # or float), and because the tag_ function will stringify them if
+ # need be, a str.format is the correct thing to do here.
+- tagged_image = "{}:{}".format(repository, tag)
++ tagged_image = f"{repository}:{tag}"
+ try:
+ result = tag_(top_level_images[0], repository=repository, tag=tag)
+ ret["Image"] = tagged_image
+@@ -4549,7 +4531,7 @@ def layers(name):
+ ):
+ ret.append(line)
+ if not ret:
+- raise CommandExecutionError("Image '{}' not found".format(name))
++ raise CommandExecutionError(f"Image '{name}' not found")
+ return ret
+
+
+@@ -4620,7 +4602,7 @@ def pull(
+
+ if not response:
+ raise CommandExecutionError(
+- "Pull failed for {}, no response returned from Docker API".format(image)
++ f"Pull failed for {image}, no response returned from Docker API"
+ )
+ elif api_response:
+ ret["API_Response"] = response
+@@ -4633,7 +4615,7 @@ def pull(
+ event = salt.utils.json.loads(event)
+ except Exception as exc: # pylint: disable=broad-except
+ raise CommandExecutionError(
+- "Unable to interpret API event: '{}'".format(event),
++ f"Unable to interpret API event: '{event}'",
+ info={"Error": exc.__str__()},
+ )
+ try:
+@@ -4715,7 +4697,7 @@ def push(
+
+ if not response:
+ raise CommandExecutionError(
+- "Push failed for {}, no response returned from Docker API".format(image)
++ f"Push failed for {image}, no response returned from Docker API"
+ )
+ elif api_response:
+ ret["API_Response"] = response
+@@ -4727,7 +4709,7 @@ def push(
+ event = salt.utils.json.loads(event)
+ except Exception as exc: # pylint: disable=broad-except
+ raise CommandExecutionError(
+- "Unable to interpret API event: '{}'".format(event),
++ f"Unable to interpret API event: '{event}'",
+ info={"Error": exc.__str__()},
+ )
+ try:
+@@ -4807,9 +4789,7 @@ def rmi(*names, **kwargs):
+ err += "image(s): {}".format(", ".join(deps["Images"]))
+ errors.append(err)
+ else:
+- errors.append(
+- "Error {}: {}".format(exc.response.status_code, exc.explanation)
+- )
++ errors.append(f"Error {exc.response.status_code}: {exc.explanation}")
+
+ _clear_context()
+ ret = {
+@@ -4897,7 +4877,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs
+ salt myminion docker.save centos:7 /tmp/cent7.tar
+ salt myminion docker.save 0123456789ab cdef01234567 /tmp/saved.tar
+ """
+- err = "Path '{}' is not absolute".format(path)
++ err = f"Path '{path}' is not absolute"
+ try:
+ if not os.path.isabs(path):
+ raise SaltInvocationError(err)
+@@ -4905,7 +4885,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs
+ raise SaltInvocationError(err)
+
+ if os.path.exists(path) and not overwrite:
+- raise CommandExecutionError("{} already exists".format(path))
++ raise CommandExecutionError(f"{path} already exists")
+
+ if compression is None:
+ if path.endswith(".tar.gz") or path.endswith(".tgz"):
+@@ -4928,7 +4908,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs
+ compression = "xz"
+
+ if compression and compression not in ("gzip", "bzip2", "xz"):
+- raise SaltInvocationError("Invalid compression type '{}'".format(compression))
++ raise SaltInvocationError(f"Invalid compression type '{compression}'")
+
+ parent_dir = os.path.dirname(path)
+ if not os.path.isdir(parent_dir):
+@@ -4950,7 +4930,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs
+ time_started = time.time()
+ result = __salt__["cmd.run_all"](cmd, python_shell=False)
+ if result["retcode"] != 0:
+- err = "Failed to save image(s) to {}".format(path)
++ err = f"Failed to save image(s) to {path}"
+ if result["stderr"]:
+ err += ": {}".format(result["stderr"])
+ raise CommandExecutionError(err)
+@@ -4960,9 +4940,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs
+ try:
+ out = gzip.open(path, "wb")
+ except OSError as exc:
+- raise CommandExecutionError(
+- "Unable to open {} for writing: {}".format(path, exc)
+- )
++ raise CommandExecutionError(f"Unable to open {path} for writing: {exc}")
+ elif compression == "bzip2":
+ compressor = bz2.BZ2Compressor()
+ elif compression == "xz":
+@@ -4998,9 +4976,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs
+ os.remove(path)
+ except OSError:
+ pass
+- raise CommandExecutionError(
+- "Error occurred during image save: {}".format(exc)
+- )
++ raise CommandExecutionError(f"Error occurred during image save: {exc}")
+ finally:
+ try:
+ # Clean up temp file
+@@ -5120,7 +5096,7 @@ def create_network(
+ ignore_collisions=False,
+ validate_ip_addrs=True,
+ client_timeout=salt.utils.dockermod.CLIENT_TIMEOUT,
+- **kwargs
++ **kwargs,
+ ):
+ """
+ .. versionchanged:: 2018.3.0
+@@ -5360,7 +5336,7 @@ def create_network(
+ skip_translate=skip_translate,
+ ignore_collisions=ignore_collisions,
+ validate_ip_addrs=validate_ip_addrs,
+- **__utils__["args.clean_kwargs"](**kwargs)
++ **__utils__["args.clean_kwargs"](**kwargs),
+ )
+
+ if "ipam" not in kwargs:
+@@ -5692,7 +5668,7 @@ def pause(name):
+ return {
+ "result": False,
+ "state": {"old": orig_state, "new": orig_state},
+- "comment": "Container '{}' is stopped, cannot pause".format(name),
++ "comment": f"Container '{name}' is stopped, cannot pause",
+ }
+ return _change_state(name, "pause", "paused")
+
+@@ -5791,7 +5767,7 @@ def start_(name):
+ return {
+ "result": False,
+ "state": {"old": orig_state, "new": orig_state},
+- "comment": "Container '{}' is paused, cannot start".format(name),
++ "comment": f"Container '{name}' is paused, cannot start",
+ }
+
+ return _change_state(name, "start", "running")
+@@ -5896,7 +5872,7 @@ def unpause(name):
+ return {
+ "result": False,
+ "state": {"old": orig_state, "new": orig_state},
+- "comment": "Container '{}' is stopped, cannot unpause".format(name),
++ "comment": f"Container '{name}' is stopped, cannot unpause",
+ }
+ return _change_state(name, "unpause", "running")
+
+@@ -5945,7 +5921,7 @@ def wait(name, ignore_already_stopped=False, fail_on_exit_status=False):
+ # Container doesn't exist anymore
+ return {
+ "result": ignore_already_stopped,
+- "comment": "Container '{}' absent".format(name),
++ "comment": f"Container '{name}' absent",
+ }
+ already_stopped = pre == "stopped"
+ response = _client_wrapper("wait", name)
+@@ -5969,7 +5945,7 @@ def wait(name, ignore_already_stopped=False, fail_on_exit_status=False):
+ "exit_status": response,
+ }
+ if already_stopped:
+- result["comment"] = "Container '{}' already stopped".format(name)
++ result["comment"] = f"Container '{name}' already stopped"
+ if fail_on_exit_status and result["result"]:
+ result["result"] = result["exit_status"] == 0
+ return result
+@@ -5982,7 +5958,7 @@ def prune(
+ build=False,
+ volumes=False,
+ system=None,
+- **filters
++ **filters,
+ ):
+ """
+ .. versionadded:: 2019.2.0
+@@ -6668,7 +6644,7 @@ def script_retcode(
+
+
+ def _generate_tmp_path():
+- return os.path.join("/tmp", "salt.docker.{}".format(uuid.uuid4().hex[:6]))
++ return os.path.join("/tmp", f"salt.docker.{uuid.uuid4().hex[:6]}")
+
+
+ def _prepare_trans_tar(name, sls_opts, mods=None, pillar=None, extra_filerefs=""):
+@@ -6929,7 +6905,7 @@ def call(name, function, *args, **kwargs):
+ ]
+ + list(args)
+ + [
+- "{}={}".format(key, value)
++ f"{key}={value}"
+ for (key, value) in kwargs.items()
+ if not key.startswith("__")
+ ]
+diff --git a/salt/modules/lxc.py b/salt/modules/lxc.py
+index bea6445db98..d2c1e66491e 100644
+--- a/salt/modules/lxc.py
++++ b/salt/modules/lxc.py
+@@ -12,9 +12,9 @@ import datetime
+ import difflib
+ import logging
+ import os
+-import pipes
+ import random
+ import re
++import shlex
+ import shutil
+ import string
+ import tempfile
+@@ -1834,8 +1834,8 @@ def _after_ignition_network_profile(cmd, ret, name, network_profile, path, nic_o
+ # destroy the container if it was partially created
+ cmd = "lxc-destroy"
+ if path:
+- cmd += " -P {}".format(pipes.quote(path))
+- cmd += " -n {}".format(name)
++ cmd += f" -P {shlex.quote(path)}"
++ cmd += f" -n {name}"
+ __salt__["cmd.retcode"](cmd, python_shell=False)
+ raise CommandExecutionError(
+ "Container could not be created with cmd '{}': {}".format(
+@@ -1997,7 +1997,7 @@ def create(
+ )
+ options["imgtar"] = img_tar
+ if path:
+- cmd += " -P {}".format(pipes.quote(path))
++ cmd += f" -P {shlex.quote(path)}"
+ if not os.path.exists(path):
+ os.makedirs(path)
+ if config:
+@@ -2138,7 +2138,7 @@ def clone(name, orig, profile=None, network_profile=None, nic_opts=None, **kwarg
+ cmd = "lxc-clone"
+ cmd += " {} -o {} -n {}".format(snapshot, orig, name)
+ if path:
+- cmd += " -P {}".format(pipes.quote(path))
++ cmd += f" -P {shlex.quote(path)}"
+ if not os.path.exists(path):
+ os.makedirs(path)
+ if backing:
+@@ -2186,7 +2186,7 @@ def ls_(active=None, cache=True, path=None):
+ ret = []
+ cmd = "lxc-ls"
+ if path:
+- cmd += " -P {}".format(pipes.quote(path))
++ cmd += f" -P {shlex.quote(path)}"
+ if active:
+ cmd += " --active"
+ output = __salt__["cmd.run_stdout"](cmd, python_shell=False)
+@@ -2242,8 +2242,8 @@ def list_(extra=False, limit=None, path=None):
+ for container in ctnrs:
+ cmd = "lxc-info"
+ if path:
+- cmd += " -P {}".format(pipes.quote(path))
+- cmd += " -n {}".format(container)
++ cmd += f" -P {shlex.quote(path)}"
++ cmd += f" -n {container}"
+ c_info = __salt__["cmd.run"](cmd, python_shell=False, output_loglevel="debug")
+ c_state = None
+ for line in c_info.splitlines():
+@@ -2301,13 +2301,13 @@ def _change_state(
+ # Kill the container first
+ scmd = "lxc-stop"
+ if path:
+- scmd += " -P {}".format(pipes.quote(path))
+- scmd += " -k -n {}".format(name)
++ scmd += f" -P {shlex.quote(path)}"
++ scmd += f" -k -n {name}"
+ __salt__["cmd.run"](scmd, python_shell=False)
+
+ if path and " -P " not in cmd:
+- cmd += " -P {}".format(pipes.quote(path))
+- cmd += " -n {}".format(name)
++ cmd += f" -P {shlex.quote(path)}"
++ cmd += f" -n {name}"
+
+ # certain lxc commands need to be taken with care (lxc-start)
+ # as te command itself mess with double forks; we must not
+@@ -2337,8 +2337,8 @@ def _change_state(
+ # some commands do not wait, so we will
+ rcmd = "lxc-wait"
+ if path:
+- rcmd += " -P {}".format(pipes.quote(path))
+- rcmd += " -n {} -s {}".format(name, expected.upper())
++ rcmd += f" -P {shlex.quote(path)}"
++ rcmd += f" -n {name} -s {expected.upper()}"
+ __salt__["cmd.run"](rcmd, python_shell=False, timeout=30)
+ _clear_context()
+ post = state(name, path=path)
+@@ -2459,7 +2459,7 @@ def start(name, **kwargs):
+ lxc_config = os.path.join(cpath, name, "config")
+ # we try to start, even without config, if global opts are there
+ if os.path.exists(lxc_config):
+- cmd += " -f {}".format(pipes.quote(lxc_config))
++ cmd += f" -f {shlex.quote(lxc_config)}"
+ cmd += " -d"
+ _ensure_exists(name, path=path)
+ if state(name, path=path) == "frozen":
+@@ -2564,7 +2564,7 @@ def freeze(name, **kwargs):
+ start(name, path=path)
+ cmd = "lxc-freeze"
+ if path:
+- cmd += " -P {}".format(pipes.quote(path))
++ cmd += f" -P {shlex.quote(path)}"
+ ret = _change_state(cmd, name, "frozen", use_vt=use_vt, path=path)
+ if orig_state == "stopped" and start_:
+ ret["state"]["old"] = orig_state
+@@ -2599,7 +2599,7 @@ def unfreeze(name, path=None, use_vt=None):
+ raise CommandExecutionError("Container '{}' is stopped".format(name))
+ cmd = "lxc-unfreeze"
+ if path:
+- cmd += " -P {}".format(pipes.quote(path))
++ cmd += f" -P {shlex.quote(path)}"
+ return _change_state(cmd, name, "running", path=path, use_vt=use_vt)
+
+
+@@ -2693,8 +2693,8 @@ def state(name, path=None):
+ else:
+ cmd = "lxc-info"
+ if path:
+- cmd += " -P {}".format(pipes.quote(path))
+- cmd += " -n {}".format(name)
++ cmd += f" -P {shlex.quote(path)}"
++ cmd += f" -n {name}"
+ ret = __salt__["cmd.run_all"](cmd, python_shell=False)
+ if ret["retcode"] != 0:
+ _clear_context()
+@@ -2731,8 +2731,8 @@ def get_parameter(name, parameter, path=None):
+ _ensure_exists(name, path=path)
+ cmd = "lxc-cgroup"
+ if path:
+- cmd += " -P {}".format(pipes.quote(path))
+- cmd += " -n {} {}".format(name, parameter)
++ cmd += f" -P {shlex.quote(path)}"
++ cmd += f" -n {name} {parameter}"
+ ret = __salt__["cmd.run_all"](cmd, python_shell=False)
+ if ret["retcode"] != 0:
+ raise CommandExecutionError(
+@@ -2762,8 +2762,8 @@ def set_parameter(name, parameter, value, path=None):
+
+ cmd = "lxc-cgroup"
+ if path:
+- cmd += " -P {}".format(pipes.quote(path))
+- cmd += " -n {} {} {}".format(name, parameter, value)
++ cmd += f" -P {shlex.quote(path)}"
++ cmd += f" -n {name} {parameter} {value}"
+ ret = __salt__["cmd.run_all"](cmd, python_shell=False)
+ if ret["retcode"] != 0:
+ return False
+@@ -3662,8 +3662,8 @@ def attachable(name, path=None):
+ log.debug("Checking if LXC container %s is attachable", name)
+ cmd = "lxc-attach"
+ if path:
+- cmd += " -P {}".format(pipes.quote(path))
+- cmd += " --clear-env -n {} -- /usr/bin/env".format(name)
++ cmd += f" -P {shlex.quote(path)}"
++ cmd += f" --clear-env -n {name} -- /usr/bin/env"
+ result = (
+ __salt__["cmd.retcode"](
+ cmd, python_shell=False, output_loglevel="quiet", ignore_retcode=True
+diff --git a/salt/modules/mac_keychain.py b/salt/modules/mac_keychain.py
+index a823c428b76..7fdc162b9aa 100644
+--- a/salt/modules/mac_keychain.py
++++ b/salt/modules/mac_keychain.py
+@@ -11,20 +11,6 @@ import shlex
+
+ import salt.utils.platform
+
+-try:
+- import pipes
+-
+- HAS_DEPS = True
+-except ImportError:
+- HAS_DEPS = False
+-
+-if hasattr(shlex, "quote"):
+- _quote = shlex.quote
+-elif HAS_DEPS and hasattr(pipes, "quote"):
+- _quote = pipes.quote
+-else:
+- _quote = None
+-
+ log = logging.getLogger(__name__)
+
+ __virtualname__ = "keychain"
+@@ -34,7 +20,7 @@ def __virtual__():
+ """
+ Only work on Mac OS
+ """
+- if salt.utils.platform.is_darwin() and _quote is not None:
++ if salt.utils.platform.is_darwin():
+ return __virtualname__
+ return (False, "Only available on Mac OS systems with pipes")
+
+@@ -82,7 +68,7 @@ def install(
+ if keychain_password is not None:
+ unlock_keychain(keychain, keychain_password)
+
+- cmd = "security import {} -P {} -k {}".format(cert, password, keychain)
++ cmd = f"security import {cert} -P {password} -k {keychain}"
+ if allow_any:
+ cmd += " -A"
+ return __salt__["cmd.run"](cmd)
+@@ -117,7 +103,7 @@ def uninstall(
+ if keychain_password is not None:
+ unlock_keychain(keychain, keychain_password)
+
+- cmd = 'security delete-certificate -c "{}" {}'.format(cert_name, keychain)
++ cmd = f'security delete-certificate -c "{cert_name}" {keychain}'
+ return __salt__["cmd.run"](cmd)
+
+
+@@ -137,7 +123,7 @@ def list_certs(keychain="/Library/Keychains/System.keychain"):
+ """
+ cmd = (
+ 'security find-certificate -a {} | grep -o "alis".*\\" | '
+- "grep -o '\\\"[-A-Za-z0-9.:() ]*\\\"'".format(_quote(keychain))
++ "grep -o '\\\"[-A-Za-z0-9.:() ]*\\\"'".format(shlex.quote(keychain))
+ )
+ out = __salt__["cmd.run"](cmd, python_shell=True)
+ return out.replace('"', "").split("\n")
+@@ -165,7 +151,7 @@ def get_friendly_name(cert, password):
+ """
+ cmd = (
+ "openssl pkcs12 -in {} -passin pass:{} -info -nodes -nokeys 2> /dev/null | "
+- "grep friendlyName:".format(_quote(cert), _quote(password))
++ "grep friendlyName:".format(shlex.quote(cert), shlex.quote(password))
+ )
+ out = __salt__["cmd.run"](cmd, python_shell=True)
+ return out.replace("friendlyName: ", "").strip()
+@@ -187,7 +173,7 @@ def get_default_keychain(user=None, domain="user"):
+
+ salt '*' keychain.get_default_keychain
+ """
+- cmd = "security default-keychain -d {}".format(domain)
++ cmd = f"security default-keychain -d {domain}"
+ return __salt__["cmd.run"](cmd, runas=user)
+
+
+@@ -210,7 +196,7 @@ def set_default_keychain(keychain, domain="user", user=None):
+
+ salt '*' keychain.set_keychain /Users/fred/Library/Keychains/login.keychain
+ """
+- cmd = "security default-keychain -d {} -s {}".format(domain, keychain)
++ cmd = f"security default-keychain -d {domain} -s {keychain}"
+ return __salt__["cmd.run"](cmd, runas=user)
+
+
+@@ -233,7 +219,7 @@ def unlock_keychain(keychain, password):
+
+ salt '*' keychain.unlock_keychain /tmp/test.p12 test123
+ """
+- cmd = "security unlock-keychain -p {} {}".format(password, keychain)
++ cmd = f"security unlock-keychain -p {password} {keychain}"
+ __salt__["cmd.run"](cmd)
+
+
+@@ -261,7 +247,7 @@ def get_hash(name, password=None):
+ name, password
+ )
+ else:
+- cmd = 'security find-certificate -c "{}" -m -p'.format(name)
++ cmd = f'security find-certificate -c "{name}" -m -p'
+
+ out = __salt__["cmd.run"](cmd)
+ matches = re.search(
+diff --git a/salt/modules/macpackage.py b/salt/modules/macpackage.py
+index faf5810d4fc..f9a6b7bb95c 100644
+--- a/salt/modules/macpackage.py
++++ b/salt/modules/macpackage.py
+@@ -9,31 +9,16 @@ import shlex
+
+ import salt.utils.platform
+
+-try:
+- import pipes
+-
+- HAS_DEPS = True
+-except ImportError:
+- HAS_DEPS = False
+-
+-
+ log = logging.getLogger(__name__)
+-__virtualname__ = "macpackage"
+-
+
+-if hasattr(shlex, "quote"):
+- _quote = shlex.quote
+-elif HAS_DEPS and hasattr(pipes, "quote"):
+- _quote = pipes.quote
+-else:
+- _quote = None
++__virtualname__ = "macpackage"
+
+
+ def __virtual__():
+ """
+ Only work on Mac OS
+ """
+- if salt.utils.platform.is_darwin() and _quote is not None:
++ if salt.utils.platform.is_darwin():
+ return __virtualname__
+ return (False, "Only available on Mac OS systems with pipes")
+
+@@ -60,11 +45,11 @@ def install(pkg, target="LocalSystem", store=False, allow_untrusted=False):
+ """
+ if "*." not in pkg:
+ # If we use wildcards, we cannot use quotes
+- pkg = _quote(pkg)
++ pkg = shlex.quote(pkg)
+
+- target = _quote(target)
++ target = shlex.quote(target)
+
+- cmd = "installer -pkg {} -target {}".format(pkg, target)
++ cmd = f"installer -pkg {pkg} -target {target}"
+ if store:
+ cmd += " -store"
+ if allow_untrusted:
+@@ -109,7 +94,7 @@ def install_app(app, target="/Applications/"):
+ if not app[-1] == "/":
+ app += "/"
+
+- cmd = 'rsync -a --delete "{}" "{}"'.format(app, target)
++ cmd = f'rsync -a --delete "{app}" "{target}"'
+ return __salt__["cmd.run"](cmd)
+
+
+@@ -154,7 +139,7 @@ def mount(dmg):
+
+ temp_dir = __salt__["temp.dir"](prefix="dmg-")
+
+- cmd = 'hdiutil attach -readonly -nobrowse -mountpoint {} "{}"'.format(temp_dir, dmg)
++ cmd = f'hdiutil attach -readonly -nobrowse -mountpoint {temp_dir} "{dmg}"'
+
+ return __salt__["cmd.run"](cmd), temp_dir
+
+@@ -176,7 +161,7 @@ def unmount(mountpoint):
+ salt '*' macpackage.unmount /dev/disk2
+ """
+
+- cmd = 'hdiutil detach "{}"'.format(mountpoint)
++ cmd = f'hdiutil detach "{mountpoint}"'
+
+ return __salt__["cmd.run"](cmd)
+
+@@ -216,7 +201,7 @@ def get_pkg_id(pkg):
+
+ salt '*' macpackage.get_pkg_id /tmp/test.pkg
+ """
+- pkg = _quote(pkg)
++ pkg = shlex.quote(pkg)
+ package_ids = []
+
+ # Create temp directory
+@@ -224,7 +209,7 @@ def get_pkg_id(pkg):
+
+ try:
+ # List all of the PackageInfo files
+- cmd = "xar -t -f {} | grep PackageInfo".format(pkg)
++ cmd = f"xar -t -f {pkg} | grep PackageInfo"
+ out = __salt__["cmd.run"](cmd, python_shell=True, output_loglevel="quiet")
+ files = out.split("\n")
+
+@@ -264,12 +249,12 @@ def get_mpkg_ids(mpkg):
+
+ salt '*' macpackage.get_mpkg_ids /dev/disk2
+ """
+- mpkg = _quote(mpkg)
++ mpkg = shlex.quote(mpkg)
+ package_infos = []
+ base_path = os.path.dirname(mpkg)
+
+ # List all of the .pkg files
+- cmd = "find {} -name *.pkg".format(base_path)
++ cmd = f"find {base_path} -name *.pkg"
+ out = __salt__["cmd.run"](cmd, python_shell=True)
+
+ pkg_files = out.split("\n")
+@@ -281,7 +266,7 @@ def get_mpkg_ids(mpkg):
+
+ def _get_pkg_id_from_pkginfo(pkginfo):
+ # Find our identifiers
+- pkginfo = _quote(pkginfo)
++ pkginfo = shlex.quote(pkginfo)
+ cmd = "cat {} | grep -Eo 'identifier=\"[a-zA-Z.0-9\\-]*\"' | cut -c 13- | tr -d '\"'".format(
+ pkginfo
+ )
+@@ -294,8 +279,8 @@ def _get_pkg_id_from_pkginfo(pkginfo):
+
+
+ def _get_pkg_id_dir(path):
+- path = _quote(os.path.join(path, "Contents/Info.plist"))
+- cmd = '/usr/libexec/PlistBuddy -c "print :CFBundleIdentifier" {}'.format(path)
++ path = shlex.quote(os.path.join(path, "Contents/Info.plist"))
++ cmd = f'/usr/libexec/PlistBuddy -c "print :CFBundleIdentifier" {path}'
+
+ # We can only use wildcards in python_shell which is
+ # sent by the macpackage state
+diff --git a/salt/modules/openstack_config.py b/salt/modules/openstack_config.py
+index 823afbf1c60..937c10da61a 100644
+--- a/salt/modules/openstack_config.py
++++ b/salt/modules/openstack_config.py
+@@ -13,28 +13,11 @@ import shlex
+ import salt.exceptions
+ import salt.utils.decorators.path
+
+-try:
+- import pipes
+-
+- HAS_DEPS = True
+-except ImportError:
+- HAS_DEPS = False
+-
+-if hasattr(shlex, "quote"):
+- _quote = shlex.quote
+-elif HAS_DEPS and hasattr(pipes, "quote"):
+- _quote = pipes.quote
+-else:
+- _quote = None
+-
+-
+ # Don't shadow built-in's.
+ __func_alias__ = {"set_": "set"}
+
+
+ def __virtual__():
+- if _quote is None and not HAS_DEPS:
+- return (False, "Missing dependencies")
+ return True
+
+
+@@ -69,10 +52,10 @@ def set_(filename, section, parameter, value):
+ salt-call openstack_config.set /etc/keystone/keystone.conf sql connection foo
+ """
+
+- filename = _quote(filename)
+- section = _quote(section)
+- parameter = _quote(parameter)
+- value = _quote(str(value))
++ filename = shlex.quote(filename)
++ section = shlex.quote(section)
++ parameter = shlex.quote(parameter)
++ value = shlex.quote(str(value))
+
+ result = __salt__["cmd.run_all"](
+ "openstack-config --set {} {} {} {}".format(
+@@ -109,12 +92,12 @@ def get(filename, section, parameter):
+
+ """
+
+- filename = _quote(filename)
+- section = _quote(section)
+- parameter = _quote(parameter)
++ filename = shlex.quote(filename)
++ section = shlex.quote(section)
++ parameter = shlex.quote(parameter)
+
+ result = __salt__["cmd.run_all"](
+- "openstack-config --get {} {} {}".format(filename, section, parameter),
++ f"openstack-config --get {filename} {section} {parameter}",
+ python_shell=False,
+ )
+
+@@ -145,12 +128,12 @@ def delete(filename, section, parameter):
+ salt-call openstack_config.delete /etc/keystone/keystone.conf sql connection
+ """
+
+- filename = _quote(filename)
+- section = _quote(section)
+- parameter = _quote(parameter)
++ filename = shlex.quote(filename)
++ section = shlex.quote(section)
++ parameter = shlex.quote(parameter)
+
+ result = __salt__["cmd.run_all"](
+- "openstack-config --del {} {} {}".format(filename, section, parameter),
++ f"openstack-config --del {filename} {section} {parameter}",
+ python_shell=False,
+ )
+
+diff --git a/salt/modules/postgres.py b/salt/modules/postgres.py
+index 25a72f1063c..f73959a92ed 100644
+--- a/salt/modules/postgres.py
++++ b/salt/modules/postgres.py
+@@ -46,8 +46,8 @@ import hmac
+ import io
+ import logging
+ import os
+-import pipes
+ import re
++import shlex
+ import tempfile
+
+ import salt.utils.files
+@@ -136,7 +136,7 @@ def __virtual__():
+ for util in utils:
+ if not salt.utils.path.which(util):
+ if not _find_pg_binary(util):
+- return (False, "{} was not found".format(util))
++ return (False, f"{util} was not found")
+ return True
+
+
+@@ -241,14 +241,14 @@ def _run_initdb(
+ raise CommandExecutionError("initdb executable not found.")
+ cmd = [
+ _INITDB_BIN,
+- "--pgdata={}".format(name),
+- "--username={}".format(user),
+- "--auth={}".format(auth),
+- "--encoding={}".format(encoding),
++ f"--pgdata={name}",
++ f"--username={user}",
++ f"--auth={auth}",
++ f"--encoding={encoding}",
+ ]
+
+ if locale is not None:
+- cmd.append("--locale={}".format(locale))
++ cmd.append(f"--locale={locale}")
+
+ # intentionally use short option, as the long option name has been
+ # renamed from "xlogdir" to "waldir" in PostgreSQL 10
+@@ -262,9 +262,9 @@ def _run_initdb(
+ if password is not None:
+ pgpassfile = salt.utils.files.mkstemp(text=True)
+ with salt.utils.files.fopen(pgpassfile, "w") as fp_:
+- fp_.write(salt.utils.stringutils.to_str("{}".format(password)))
++ fp_.write(salt.utils.stringutils.to_str(f"{password}"))
+ __salt__["file.chown"](pgpassfile, runas, "")
+- cmd.extend(["--pwfile={}".format(pgpassfile)])
++ cmd.extend([f"--pwfile={pgpassfile}"])
+
+ kwargs = dict(
+ runas=runas,
+@@ -273,7 +273,7 @@ def _run_initdb(
+ "postgres.timeout", default=_DEFAULT_COMMAND_TIMEOUT_SECS
+ ),
+ )
+- cmdstr = " ".join([pipes.quote(c) for c in cmd])
++ cmdstr = " ".join([shlex.quote(c) for c in cmd])
+ ret = __salt__["cmd.run_all"](cmdstr, python_shell=False, **kwargs)
+
+ if ret.get("retcode", 0) != 0:
+@@ -582,9 +582,7 @@ def _quote_ddl_value(value, quote="'"):
+ if value is None:
+ return None
+ if quote in value: # detect trivial sqli
+- raise SaltInvocationError(
+- "Unsupported character {} in value: {}".format(quote, value)
+- )
++ raise SaltInvocationError(f"Unsupported character {quote} in value: {value}")
+ return "{quote}{value}{quote}".format(quote=quote, value=value)
+
+
+@@ -617,7 +615,7 @@ def db_create(
+ """
+
+ # Base query to create a database
+- query = 'CREATE DATABASE "{}"'.format(name)
++ query = f'CREATE DATABASE "{name}"'
+
+ # "With"-options to create a database
+ with_args = salt.utils.odict.OrderedDict(
+@@ -685,11 +683,9 @@ def db_alter(
+ else:
+ queries = []
+ if owner:
+- queries.append('ALTER DATABASE "{}" OWNER TO "{}"'.format(name, owner))
++ queries.append(f'ALTER DATABASE "{name}" OWNER TO "{owner}"')
+ if tablespace:
+- queries.append(
+- 'ALTER DATABASE "{}" SET TABLESPACE "{}"'.format(name, tablespace)
+- )
++ queries.append(f'ALTER DATABASE "{name}" SET TABLESPACE "{tablespace}"')
+ for query in queries:
+ ret = _psql_prepare_and_run(
+ ["-c", query],
+@@ -726,10 +722,10 @@ def db_remove(
+ salt '*' postgres.db_remove 'dbname'
+ """
+ for query in [
+- 'REVOKE CONNECT ON DATABASE "{db}" FROM public;'.format(db=name),
++ f'REVOKE CONNECT ON DATABASE "{name}" FROM public;',
+ "SELECT pid, pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname ="
+ " '{db}' AND pid <> pg_backend_pid();".format(db=name),
+- 'DROP DATABASE "{db}";'.format(db=name),
++ f'DROP DATABASE "{name}";',
+ ]:
+ ret = _psql_prepare_and_run(
+ ["-c", query],
+@@ -741,7 +737,7 @@ def db_remove(
+ password=password,
+ )
+ if ret["retcode"] != 0:
+- raise Exception("Failed: ret={}".format(ret))
++ raise Exception(f"Failed: ret={ret}")
+ return True
+
+
+@@ -846,10 +842,10 @@ def tablespace_create(
+ owner_query = ""
+ options_query = ""
+ if owner:
+- owner_query = 'OWNER "{}"'.format(owner)
++ owner_query = f'OWNER "{owner}"'
+ # should come out looking like: 'OWNER postgres'
+ if options:
+- optionstext = ["{} = {}".format(k, v) for k, v in options.items()]
++ optionstext = [f"{k} = {v}" for k, v in options.items()]
+ options_query = "WITH ( {} )".format(", ".join(optionstext))
+ # should come out looking like: 'WITH ( opt1 = 1.0, opt2 = 4.0 )'
+ query = "CREATE TABLESPACE \"{}\" {} LOCATION '{}' {}".format(
+@@ -902,9 +898,9 @@ def tablespace_alter(
+ queries = []
+
+ if new_name:
+- queries.append('ALTER TABLESPACE "{}" RENAME TO "{}"'.format(name, new_name))
++ queries.append(f'ALTER TABLESPACE "{name}" RENAME TO "{new_name}"')
+ if new_owner:
+- queries.append('ALTER TABLESPACE "{}" OWNER TO "{}"'.format(name, new_owner))
++ queries.append(f'ALTER TABLESPACE "{name}" OWNER TO "{new_owner}"')
+ if set_option:
+ queries.append(
+ 'ALTER TABLESPACE "{}" SET ({} = {})'.format(
+@@ -912,7 +908,7 @@ def tablespace_alter(
+ )
+ )
+ if reset_option:
+- queries.append('ALTER TABLESPACE "{}" RESET ({})'.format(name, reset_option))
++ queries.append(f'ALTER TABLESPACE "{name}" RESET ({reset_option})')
+
+ for query in queries:
+ ret = _psql_prepare_and_run(
+@@ -950,7 +946,7 @@ def tablespace_remove(
+
+ .. versionadded:: 2015.8.0
+ """
+- query = 'DROP TABLESPACE "{}"'.format(name)
++ query = f'DROP TABLESPACE "{name}"'
+ ret = _psql_prepare_and_run(
+ ["-c", query],
+ user=user,
+@@ -1158,11 +1154,11 @@ def _add_role_flag(string, test, flag, cond=None, prefix="NO", addtxt="", skip=F
+ cond = test
+ if test is not None:
+ if cond:
+- string = "{} {}".format(string, flag)
++ string = f"{string} {flag}"
+ else:
+- string = "{0} {2}{1}".format(string, flag, prefix)
++ string = f"{string} {prefix}{flag}"
+ if addtxt:
+- string = "{} {}".format(string, addtxt)
++ string = f"{string} {addtxt}"
+ return string
+
+
+@@ -1224,7 +1220,7 @@ def _verify_password(role, password, verifier, method):
+ def _md5_password(role, password):
+ return "md5{}".format(
+ hashlib.md5( # nosec
+- salt.utils.stringutils.to_bytes("{}{}".format(password, role))
++ salt.utils.stringutils.to_bytes(f"{password}{role}")
+ ).hexdigest()
+ )
+
+@@ -1343,7 +1339,7 @@ def _role_cmd_args(
+ if isinstance(groups, list):
+ groups = ",".join(groups)
+ for group in groups.split(","):
+- sub_cmd = '{}; GRANT "{}" TO "{}"'.format(sub_cmd, group, name)
++ sub_cmd = f'{sub_cmd}; GRANT "{group}" TO "{name}"'
+ return sub_cmd
+
+
+@@ -1380,7 +1376,7 @@ def _role_create(
+ log.info("%s '%s' already exists", typ_.capitalize(), name)
+ return False
+
+- sub_cmd = 'CREATE ROLE "{}" WITH'.format(name)
++ sub_cmd = f'CREATE ROLE "{name}" WITH'
+ sub_cmd = "{} {}".format(
+ sub_cmd,
+ _role_cmd_args(
+@@ -1506,7 +1502,7 @@ def _role_update(
+ log.info("%s '%s' could not be found", typ_.capitalize(), name)
+ return False
+
+- sub_cmd = 'ALTER ROLE "{}" WITH'.format(name)
++ sub_cmd = f'ALTER ROLE "{name}" WITH'
+ sub_cmd = "{} {}".format(
+ sub_cmd,
+ _role_cmd_args(
+@@ -1613,7 +1609,7 @@ def _role_remove(
+ return False
+
+ # user exists, proceed
+- sub_cmd = 'DROP ROLE "{}"'.format(name)
++ sub_cmd = f'DROP ROLE "{name}"'
+ _psql_prepare_and_run(
+ ["-c", sub_cmd],
+ runas=runas,
+@@ -1995,14 +1991,14 @@ def create_extension(
+ args = ["CREATE EXTENSION"]
+ if if_not_exists:
+ args.append("IF NOT EXISTS")
+- args.append('"{}"'.format(name))
++ args.append(f'"{name}"')
+ sargs = []
+ if schema:
+- sargs.append('SCHEMA "{}"'.format(schema))
++ sargs.append(f'SCHEMA "{schema}"')
+ if ext_version:
+- sargs.append("VERSION {}".format(ext_version))
++ sargs.append(f"VERSION {ext_version}")
+ if from_version:
+- sargs.append("FROM {}".format(from_version))
++ sargs.append(f"FROM {from_version}")
+ if sargs:
+ args.append("WITH")
+ args.extend(sargs)
+@@ -2011,13 +2007,9 @@ def create_extension(
+ else:
+ args = []
+ if schema and _EXTENSION_TO_MOVE in mtdata:
+- args.append(
+- 'ALTER EXTENSION "{}" SET SCHEMA "{}";'.format(name, schema)
+- )
++ args.append(f'ALTER EXTENSION "{name}" SET SCHEMA "{schema}";')
+ if ext_version and _EXTENSION_TO_UPGRADE in mtdata:
+- args.append(
+- 'ALTER EXTENSION "{}" UPDATE TO {};'.format(name, ext_version)
+- )
++ args.append(f'ALTER EXTENSION "{name}" UPDATE TO {ext_version};')
+ cmd = " ".join(args).strip()
+ if cmd:
+ _psql_prepare_and_run(
+@@ -2227,7 +2219,7 @@ def owner_to(
+
+ sqlfile = tempfile.NamedTemporaryFile()
+ sqlfile.write("begin;\n")
+- sqlfile.write('alter database "{}" owner to "{}";\n'.format(dbname, ownername))
++ sqlfile.write(f'alter database "{dbname}" owner to "{ownername}";\n')
+
+ queries = (
+ # schemas
+@@ -2335,9 +2327,9 @@ def schema_create(
+ log.info("'%s' already exists in '%s'", name, dbname)
+ return False
+
+- sub_cmd = 'CREATE SCHEMA "{}"'.format(name)
++ sub_cmd = f'CREATE SCHEMA "{name}"'
+ if owner is not None:
+- sub_cmd = '{} AUTHORIZATION "{}"'.format(sub_cmd, owner)
++ sub_cmd = f'{sub_cmd} AUTHORIZATION "{owner}"'
+
+ ret = _psql_prepare_and_run(
+ ["-c", sub_cmd],
+@@ -2401,7 +2393,7 @@ def schema_remove(
+ return False
+
+ # schema exists, proceed
+- sub_cmd = 'DROP SCHEMA "{}"'.format(name)
++ sub_cmd = f'DROP SCHEMA "{name}"'
+ _psql_prepare_and_run(
+ ["-c", sub_cmd],
+ runas=user,
+@@ -2721,7 +2713,7 @@ def language_create(
+ log.info("Language %s already exists in %s", name, maintenance_db)
+ return False
+
+- query = "CREATE LANGUAGE {}".format(name)
++ query = f"CREATE LANGUAGE {name}"
+
+ ret = _psql_prepare_and_run(
+ ["-c", query],
+@@ -2776,7 +2768,7 @@ def language_remove(
+ log.info("Language %s does not exist in %s", name, maintenance_db)
+ return False
+
+- query = "DROP LANGUAGE {}".format(name)
++ query = f"DROP LANGUAGE {name}"
+
+ ret = _psql_prepare_and_run(
+ ["-c", query],
+@@ -3035,9 +3027,7 @@ def _validate_privileges(object_type, privs, privileges):
+ _perms.append("ALL")
+
+ if object_type not in _PRIVILEGES_OBJECTS:
+- raise SaltInvocationError(
+- "Invalid object_type: {} provided".format(object_type)
+- )
++ raise SaltInvocationError(f"Invalid object_type: {object_type} provided")
+
+ if not set(privs).issubset(set(_perms)):
+ raise SaltInvocationError(
+@@ -3145,9 +3135,7 @@ def privileges_list(
+ query = _make_privileges_list_query(name, object_type, prepend)
+
+ if object_type not in _PRIVILEGES_OBJECTS:
+- raise SaltInvocationError(
+- "Invalid object_type: {} provided".format(object_type)
+- )
++ raise SaltInvocationError(f"Invalid object_type: {object_type} provided")
+
+ rows = psql_query(
+ query,
+@@ -3439,15 +3427,15 @@ def privileges_grant(
+ _grants = ",".join(_privs)
+
+ if object_type in ["table", "sequence"]:
+- on_part = '{}."{}"'.format(prepend, object_name)
++ on_part = f'{prepend}."{object_name}"'
+ elif object_type == "function":
+- on_part = "{}".format(object_name)
++ on_part = f"{object_name}"
+ else:
+- on_part = '"{}"'.format(object_name)
++ on_part = f'"{object_name}"'
+
+ if grant_option:
+ if object_type == "group":
+- query = 'GRANT {} TO "{}" WITH ADMIN OPTION'.format(object_name, name)
++ query = f'GRANT {object_name} TO "{name}" WITH ADMIN OPTION'
+ elif object_type in ("table", "sequence") and object_name.upper() == "ALL":
+ query = 'GRANT {} ON ALL {}S IN SCHEMA {} TO "{}" WITH GRANT OPTION'.format(
+ _grants, object_type.upper(), prepend, name
+@@ -3458,7 +3446,7 @@ def privileges_grant(
+ )
+ else:
+ if object_type == "group":
+- query = 'GRANT {} TO "{}"'.format(object_name, name)
++ query = f'GRANT {object_name} TO "{name}"'
+ elif object_type in ("table", "sequence") and object_name.upper() == "ALL":
+ query = 'GRANT {} ON ALL {}S IN SCHEMA {} TO "{}"'.format(
+ _grants, object_type.upper(), prepend, name
+@@ -3587,12 +3575,12 @@ def privileges_revoke(
+ _grants = ",".join(_privs)
+
+ if object_type in ["table", "sequence"]:
+- on_part = "{}.{}".format(prepend, object_name)
++ on_part = f"{prepend}.{object_name}"
+ else:
+ on_part = object_name
+
+ if object_type == "group":
+- query = "REVOKE {} FROM {}".format(object_name, name)
++ query = f"REVOKE {object_name} FROM {name}"
+ else:
+ query = "REVOKE {} ON {} {} FROM {}".format(
+ _grants, object_type.upper(), on_part, name
+diff --git a/salt/utils/cloud.py b/salt/utils/cloud.py
+index 9edf006c299..b7208dc4a64 100644
+--- a/salt/utils/cloud.py
++++ b/salt/utils/cloud.py
+@@ -10,8 +10,8 @@ import hashlib
+ import logging
+ import multiprocessing
+ import os
+-import pipes
+ import re
++import shlex
+ import shutil
+ import socket
+ import stat
+@@ -199,7 +199,7 @@ def __ssh_gateway_arguments(kwargs):
+ "-oUserKnownHostsFile=/dev/null",
+ "-oControlPath=none",
+ str(ssh_gateway_key),
+- "{}@{}".format(ssh_gateway_user, ssh_gateway),
++ f"{ssh_gateway_user}@{ssh_gateway}",
+ "-p",
+ str(ssh_gateway_port),
+ str(ssh_gateway_command),
+@@ -228,18 +228,18 @@ def os_script(os_, vm_=None, opts=None, minion=""):
+ # The user provided an absolute path to the deploy script, let's use it
+ return __render_script(os_, vm_, opts, minion)
+
+- if os.path.isabs("{}.sh".format(os_)):
++ if os.path.isabs(f"{os_}.sh"):
+ # The user provided an absolute path to the deploy script, although no
+ # extension was provided. Let's use it anyway.
+- return __render_script("{}.sh".format(os_), vm_, opts, minion)
++ return __render_script(f"{os_}.sh", vm_, opts, minion)
+
+ for search_path in opts["deploy_scripts_search_path"]:
+ if os.path.isfile(os.path.join(search_path, os_)):
+ return __render_script(os.path.join(search_path, os_), vm_, opts, minion)
+
+- if os.path.isfile(os.path.join(search_path, "{}.sh".format(os_))):
++ if os.path.isfile(os.path.join(search_path, f"{os_}.sh")):
+ return __render_script(
+- os.path.join(search_path, "{}.sh".format(os_)), vm_, opts, minion
++ os.path.join(search_path, f"{os_}.sh"), vm_, opts, minion
+ )
+ # No deploy script was found, return an empty string
+ return ""
+@@ -416,7 +416,7 @@ def bootstrap(vm_, opts=None):
+ )
+ if key_filename is not None and not os.path.isfile(key_filename):
+ raise SaltCloudConfigError(
+- "The defined ssh_keyfile '{}' does not exist".format(key_filename)
++ f"The defined ssh_keyfile '{key_filename}' does not exist"
+ )
+ has_ssh_agent = False
+ if (
+@@ -782,8 +782,8 @@ def wait_for_port(
+ # Don't add new hosts to the host key database
+ "-oStrictHostKeyChecking=no",
+ # make sure ssh can time out on connection lose
+- "-oServerAliveInterval={}".format(server_alive_interval),
+- "-oServerAliveCountMax={}".format(server_alive_count_max),
++ f"-oServerAliveInterval={server_alive_interval}",
++ f"-oServerAliveCountMax={server_alive_count_max}",
+ # Set hosts key database path to /dev/null, i.e., non-existing
+ "-oUserKnownHostsFile=/dev/null",
+ # Don't re-use the SSH connection. Less failures.
+@@ -808,21 +808,21 @@ def wait_for_port(
+ ]
+ )
+ # Netcat command testing remote port
+- command = "nc -z -w5 -q0 {} {}".format(host, port)
++ command = f"nc -z -w5 -q0 {host} {port}"
+ # SSH command
+ pcmd = "ssh {} {}@{} -p {} {}".format(
+ " ".join(ssh_args),
+ gateway["ssh_gateway_user"],
+ ssh_gateway,
+ ssh_gateway_port,
+- pipes.quote("date"),
++ shlex.quote("date"),
+ )
+ cmd = "ssh {} {}@{} -p {} {}".format(
+ " ".join(ssh_args),
+ gateway["ssh_gateway_user"],
+ ssh_gateway,
+ ssh_gateway_port,
+- pipes.quote(command),
++ shlex.quote(command),
+ )
+ log.debug("SSH command: '%s'", cmd)
+
+@@ -893,7 +893,7 @@ class Client:
+ service_name=None,
+ ):
+ self.service_name = service_name
+- self._exe_file = "{}.exe".format(self.service_name)
++ self._exe_file = f"{self.service_name}.exe"
+ self._client = PsExecClient(server, username, password, port, encrypt)
+ self._client._service = ScmrService(self.service_name, self._client.session)
+
+@@ -943,7 +943,7 @@ class Client:
+ # delete the PAExec executable
+ smb_tree = TreeConnect(
+ self._client.session,
+- r"\\{}\ADMIN$".format(self._client.connection.server_name),
++ rf"\\{self._client.connection.server_name}\ADMIN$",
+ )
+ log.info("Connecting to SMB Tree %s", smb_tree.share_name)
+ smb_tree.connect()
+@@ -968,10 +968,10 @@ def run_winexe_command(cmd, args, host, username, password, port=445):
+ """
+ Run a command remotely via the winexe executable
+ """
+- creds = "-U '{}%{}' //{}".format(username, password, host)
+- logging_creds = "-U '{}%XXX-REDACTED-XXX' //{}".format(username, host)
+- cmd = "winexe {} {} {}".format(creds, cmd, args)
+- logging_cmd = "winexe {} {} {}".format(logging_creds, cmd, args)
++ creds = f"-U '{username}%{password}' //{host}"
++ logging_creds = f"-U '{username}%XXX-REDACTED-XXX' //{host}"
++ cmd = f"winexe {creds} {cmd} {args}"
++ logging_cmd = f"winexe {logging_creds} {cmd} {args}"
+ return win_cmd(cmd, logging_command=logging_cmd)
+
+
+@@ -979,7 +979,7 @@ def run_psexec_command(cmd, args, host, username, password, port=445):
+ """
+ Run a command remotely using the psexec protocol
+ """
+- service_name = "PS-Exec-{}".format(uuid.uuid4())
++ service_name = f"PS-Exec-{uuid.uuid4()}"
+ with Client(
+ host, username, password, port=port, encrypt=False, service_name=service_name
+ ) as client:
+@@ -1098,7 +1098,7 @@ def validate_windows_cred_winexe(
+ """
+ Check if the windows credentials are valid
+ """
+- cmd = "winexe -U '{}%{}' //{} \"hostname\"".format(username, password, host)
++ cmd = f"winexe -U '{username}%{password}' //{host} \"hostname\""
+ logging_cmd = "winexe -U '{}%XXX-REDACTED-XXX' //{} \"hostname\"".format(
+ username, host
+ )
+@@ -1230,7 +1230,7 @@ def deploy_windows(
+ winrm_port=5986,
+ winrm_use_ssl=True,
+ winrm_verify_ssl=True,
+- **kwargs
++ **kwargs,
+ ):
+ """
+ Copy the install files to a remote Windows box, and execute them
+@@ -1289,20 +1289,20 @@ def deploy_windows(
+
+ salt.utils.smb.mkdirs("salttemp", conn=smb_conn)
+ root_dir = "ProgramData/Salt Project/Salt"
+- salt.utils.smb.mkdirs("{}/conf/pki/minion".format(root_dir), conn=smb_conn)
++ salt.utils.smb.mkdirs(f"{root_dir}/conf/pki/minion", conn=smb_conn)
+ root_dir = "ProgramData\\Salt Project\\Salt"
+
+ if minion_pub:
+ salt.utils.smb.put_str(
+ minion_pub,
+- "{}\\conf\\pki\\minion\\minion.pub".format(root_dir),
++ f"{root_dir}\\conf\\pki\\minion\\minion.pub",
+ conn=smb_conn,
+ )
+
+ if minion_pem:
+ salt.utils.smb.put_str(
+ minion_pem,
+- "{}\\conf\\pki\\minion\\minion.pem".format(root_dir),
++ f"{root_dir}\\conf\\pki\\minion\\minion.pem",
+ conn=smb_conn,
+ )
+
+@@ -1314,7 +1314,7 @@ def deploy_windows(
+ try:
+ salt.utils.smb.put_file(
+ master_sign_pub_file,
+- "{}\\conf\\pki\\minion\\master_sign.pub".format(root_dir),
++ f"{root_dir}\\conf\\pki\\minion\\master_sign.pub",
+ conn=smb_conn,
+ )
+ except Exception as e: # pylint: disable=broad-except
+@@ -1332,26 +1332,27 @@ def deploy_windows(
+ installer = comps[-1]
+ salt.utils.smb.put_file(
+ win_installer,
+- "salttemp\\{}".format(installer),
++ f"salttemp\\{installer}",
+ "C$",
+ conn=smb_conn,
+ )
+
++ cmd = f"c:\\salttemp\\{installer}"
++ args = [
++ "/S",
++ f"/master={_format_master_param(master)}",
++ f"/minion-name={name}",
++ ]
++
+ if use_winrm:
+- winrm_cmd(
+- winrm_session,
+- "c:\\salttemp\\{}".format(installer),
+- ["/S", "/master={}".format(master), "/minion-name={}".format(name)],
+- )
++ winrm_cmd(winrm_session, cmd, args)
+ else:
+- cmd = "c:\\salttemp\\{}".format(installer)
+- args = "/S /master={} /minion-name={}".format(master, name)
+ stdout, stderr, ret_code = run_psexec_command(
+- cmd, args, host, username, password
++ cmd, " ".join(args), host, username, password
+ )
+
+ if ret_code != 0:
+- raise Exception("Fail installer {}".format(ret_code))
++ raise Exception(f"Fail installer {ret_code}")
+
+ # Copy over minion_conf
+ if minion_conf:
+@@ -1367,7 +1368,7 @@ def deploy_windows(
+ if minion_grains:
+ salt.utils.smb.put_str(
+ salt_config_to_yaml(minion_grains, line_break="\r\n"),
+- "{}\\conf\\grains".format(root_dir),
++ f"{root_dir}\\conf\\grains",
+ conn=smb_conn,
+ )
+ # Add special windows minion configuration
+@@ -1384,7 +1385,7 @@ def deploy_windows(
+ minion_conf = dict(minion_conf, **windows_minion_conf)
+ salt.utils.smb.put_str(
+ salt_config_to_yaml(minion_conf, line_break="\r\n"),
+- "{}\\conf\\minion".format(root_dir),
++ f"{root_dir}\\conf\\minion",
+ conn=smb_conn,
+ )
+ # Delete C:\salttmp\ and installer file
+@@ -1394,7 +1395,7 @@ def deploy_windows(
+ winrm_cmd(winrm_session, "rmdir", ["/Q", "/S", "C:\\salttemp\\"])
+ else:
+ salt.utils.smb.delete_file(
+- "salttemp\\{}".format(installer), "C$", conn=smb_conn
++ f"salttemp\\{installer}", "C$", conn=smb_conn
+ )
+ salt.utils.smb.delete_directory("salttemp", "C$", conn=smb_conn)
+ # Shell out to psexec to ensure salt-minion service started
+@@ -1418,8 +1419,8 @@ def deploy_windows(
+ # Fire deploy action
+ fire_event(
+ "event",
+- "{} has been deployed at {}".format(name, host),
+- "salt/cloud/{}/deploy_windows".format(name),
++ f"{name} has been deployed at {host}",
++ f"salt/cloud/{name}/deploy_windows",
+ args={"name": name},
+ sock_dir=opts.get("sock_dir", os.path.join(__opts__["sock_dir"], "master")),
+ transport=opts.get("transport", "zeromq"),
+@@ -1469,7 +1470,7 @@ def deploy_script(
+ master_sign_pub_file=None,
+ cloud_grains=None,
+ force_minion_config=False,
+- **kwargs
++ **kwargs,
+ ):
+ """
+ Copy a deploy script to a remote server, execute it, and remove it
+@@ -1485,7 +1486,7 @@ def deploy_script(
+ )
+ if key_filename is not None and not os.path.isfile(key_filename):
+ raise SaltCloudConfigError(
+- "The defined key_filename '{}' does not exist".format(key_filename)
++ f"The defined key_filename '{key_filename}' does not exist"
+ )
+
+ gateway = None
+@@ -1532,35 +1533,28 @@ def deploy_script(
+ ssh_kwargs["password"] = password
+
+ if root_cmd(
+- "test -e '{}'".format(tmp_dir),
+- tty,
+- sudo,
+- allow_failure=True,
+- **ssh_kwargs
++ f"test -e '{tmp_dir}'", tty, sudo, allow_failure=True, **ssh_kwargs
+ ):
+ ret = root_cmd(
+- "sh -c \"( mkdir -p -m 700 '{}' )\"".format(tmp_dir),
++ f"sh -c \"( mkdir -p -m 700 '{tmp_dir}' )\"",
+ tty,
+ sudo,
+- **ssh_kwargs
++ **ssh_kwargs,
+ )
+ if ret:
+ raise SaltCloudSystemExit(
+- "Can't create temporary directory in {} !".format(tmp_dir)
++ f"Can't create temporary directory in {tmp_dir} !"
+ )
+ if sudo:
+ comps = tmp_dir.lstrip("/").rstrip("/").split("/")
+ if comps:
+ if len(comps) > 1 or comps[0] != "tmp":
+ ret = root_cmd(
+- 'chown {} "{}"'.format(username, tmp_dir),
+- tty,
+- sudo,
+- **ssh_kwargs
++ f'chown {username} "{tmp_dir}"', tty, sudo, **ssh_kwargs
+ )
+ if ret:
+ raise SaltCloudSystemExit(
+- "Cant set {} ownership on {}".format(username, tmp_dir)
++ f"Cant set {username} ownership on {tmp_dir}"
+ )
+
+ if not isinstance(file_map, dict):
+@@ -1590,15 +1584,13 @@ def deploy_script(
+ remote_dir = os.path.dirname(remote_file)
+
+ if remote_dir not in remote_dirs:
+- root_cmd(
+- "mkdir -p '{}'".format(remote_dir), tty, sudo, **ssh_kwargs
+- )
++ root_cmd(f"mkdir -p '{remote_dir}'", tty, sudo, **ssh_kwargs)
+ if ssh_kwargs["username"] != "root":
+ root_cmd(
+ "chown {} '{}'".format(ssh_kwargs["username"], remote_dir),
+ tty,
+ sudo,
+- **ssh_kwargs
++ **ssh_kwargs,
+ )
+ remote_dirs.append(remote_dir)
+ ssh_file(opts, remote_file, kwargs=ssh_kwargs, local_file=local_file)
+@@ -1606,21 +1598,21 @@ def deploy_script(
+
+ # Minion configuration
+ if minion_pem:
+- ssh_file(opts, "{}/minion.pem".format(tmp_dir), minion_pem, ssh_kwargs)
++ ssh_file(opts, f"{tmp_dir}/minion.pem", minion_pem, ssh_kwargs)
+ ret = root_cmd(
+- "chmod 600 '{}/minion.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs
++ f"chmod 600 '{tmp_dir}/minion.pem'", tty, sudo, **ssh_kwargs
+ )
+ if ret:
+ raise SaltCloudSystemExit(
+- "Can't set perms on {}/minion.pem".format(tmp_dir)
++ f"Can't set perms on {tmp_dir}/minion.pem"
+ )
+ if minion_pub:
+- ssh_file(opts, "{}/minion.pub".format(tmp_dir), minion_pub, ssh_kwargs)
++ ssh_file(opts, f"{tmp_dir}/minion.pub", minion_pub, ssh_kwargs)
+
+ if master_sign_pub_file:
+ ssh_file(
+ opts,
+- "{}/master_sign.pub".format(tmp_dir),
++ f"{tmp_dir}/master_sign.pub",
+ kwargs=ssh_kwargs,
+ local_file=master_sign_pub_file,
+ )
+@@ -1638,7 +1630,7 @@ def deploy_script(
+ if minion_grains:
+ ssh_file(
+ opts,
+- "{}/grains".format(tmp_dir),
++ f"{tmp_dir}/grains",
+ salt_config_to_yaml(minion_grains),
+ ssh_kwargs,
+ )
+@@ -1646,24 +1638,22 @@ def deploy_script(
+ minion_conf["grains"] = {"salt-cloud": cloud_grains}
+ ssh_file(
+ opts,
+- "{}/minion".format(tmp_dir),
++ f"{tmp_dir}/minion",
+ salt_config_to_yaml(minion_conf),
+ ssh_kwargs,
+ )
+
+ # Master configuration
+ if master_pem:
+- ssh_file(opts, "{}/master.pem".format(tmp_dir), master_pem, ssh_kwargs)
++ ssh_file(opts, f"{tmp_dir}/master.pem", master_pem, ssh_kwargs)
+ ret = root_cmd(
+- "chmod 600 '{}/master.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs
++ f"chmod 600 '{tmp_dir}/master.pem'", tty, sudo, **ssh_kwargs
+ )
+ if ret:
+- raise SaltCloudSystemExit(
+- "Cant set perms on {}/master.pem".format(tmp_dir)
+- )
++ raise SaltCloudSystemExit(f"Cant set perms on {tmp_dir}/master.pem")
+
+ if master_pub:
+- ssh_file(opts, "{}/master.pub".format(tmp_dir), master_pub, ssh_kwargs)
++ ssh_file(opts, f"{tmp_dir}/master.pub", master_pub, ssh_kwargs)
+
+ if master_conf:
+ if not isinstance(master_conf, dict):
+@@ -1677,34 +1667,31 @@ def deploy_script(
+
+ ssh_file(
+ opts,
+- "{}/master".format(tmp_dir),
++ f"{tmp_dir}/master",
+ salt_config_to_yaml(master_conf),
+ ssh_kwargs,
+ )
+
+ # XXX: We need to make these paths configurable
+- preseed_minion_keys_tempdir = "{}/preseed-minion-keys".format(tmp_dir)
++ preseed_minion_keys_tempdir = f"{tmp_dir}/preseed-minion-keys"
+ if preseed_minion_keys is not None:
+ # Create remote temp dir
+ ret = root_cmd(
+- "mkdir '{}'".format(preseed_minion_keys_tempdir),
+- tty,
+- sudo,
+- **ssh_kwargs
++ f"mkdir '{preseed_minion_keys_tempdir}'", tty, sudo, **ssh_kwargs
+ )
+ if ret:
+ raise SaltCloudSystemExit(
+- "Cant create {}".format(preseed_minion_keys_tempdir)
++ f"Cant create {preseed_minion_keys_tempdir}"
+ )
+ ret = root_cmd(
+- "chmod 700 '{}'".format(preseed_minion_keys_tempdir),
++ f"chmod 700 '{preseed_minion_keys_tempdir}'",
+ tty,
+ sudo,
+- **ssh_kwargs
++ **ssh_kwargs,
+ )
+ if ret:
+ raise SaltCloudSystemExit(
+- "Can't set perms on {}".format(preseed_minion_keys_tempdir)
++ f"Can't set perms on {preseed_minion_keys_tempdir}"
+ )
+ if ssh_kwargs["username"] != "root":
+ root_cmd(
+@@ -1713,7 +1700,7 @@ def deploy_script(
+ ),
+ tty,
+ sudo,
+- **ssh_kwargs
++ **ssh_kwargs,
+ )
+
+ # Copy pre-seed minion keys
+@@ -1723,10 +1710,10 @@ def deploy_script(
+
+ if ssh_kwargs["username"] != "root":
+ root_cmd(
+- "chown -R root '{}'".format(preseed_minion_keys_tempdir),
++ f"chown -R root '{preseed_minion_keys_tempdir}'",
+ tty,
+ sudo,
+- **ssh_kwargs
++ **ssh_kwargs,
+ )
+ if ret:
+ raise SaltCloudSystemExit(
+@@ -1740,25 +1727,21 @@ def deploy_script(
+ for command in preflight_cmds:
+ cmd_ret = root_cmd(command, tty, sudo, **ssh_kwargs)
+ if cmd_ret:
+- raise SaltCloudSystemExit(
+- "Pre-flight command failed: '{}'".format(command)
+- )
++ raise SaltCloudSystemExit(f"Pre-flight command failed: '{command}'")
+
+ # The actual deploy script
+ if script:
+ # got strange escaping issues with sudoer, going onto a
+ # subshell fixes that
+- ssh_file(opts, "{}/deploy.sh".format(tmp_dir), script, ssh_kwargs)
++ ssh_file(opts, f"{tmp_dir}/deploy.sh", script, ssh_kwargs)
+ ret = root_cmd(
+- "sh -c \"( chmod +x '{}/deploy.sh' )\";exit $?".format(tmp_dir),
++ f"sh -c \"( chmod +x '{tmp_dir}/deploy.sh' )\";exit $?",
+ tty,
+ sudo,
+- **ssh_kwargs
++ **ssh_kwargs,
+ )
+ if ret:
+- raise SaltCloudSystemExit(
+- "Can't set perms on {}/deploy.sh".format(tmp_dir)
+- )
++ raise SaltCloudSystemExit(f"Can't set perms on {tmp_dir}/deploy.sh")
+
+ time_used = time.mktime(time.localtime()) - time.mktime(starttime)
+ newtimeout = timeout - time_used
+@@ -1774,7 +1757,7 @@ def deploy_script(
+ kwargs=dict(
+ name=name, sock_dir=sock_dir, timeout=newtimeout, queue=queue
+ ),
+- name="DeployScriptCheckAuth({})".format(name),
++ name=f"DeployScriptCheckAuth({name})",
+ )
+ log.debug("Starting new process to wait for salt-minion")
+ process.start()
+@@ -1782,7 +1765,7 @@ def deploy_script(
+ # Run the deploy script
+ if script:
+ if "bootstrap-salt" in script:
+- deploy_command += " -c '{}'".format(tmp_dir)
++ deploy_command += f" -c '{tmp_dir}'"
+ if force_minion_config:
+ deploy_command += " -F"
+ if make_syndic is True:
+@@ -1794,9 +1777,9 @@ def deploy_script(
+ if keep_tmp is True:
+ deploy_command += " -K"
+ if preseed_minion_keys is not None:
+- deploy_command += " -k '{}'".format(preseed_minion_keys_tempdir)
++ deploy_command += f" -k '{preseed_minion_keys_tempdir}'"
+ if script_args:
+- deploy_command += " {}".format(script_args)
++ deploy_command += f" {script_args}"
+
+ if script_env:
+ if not isinstance(script_env, dict):
+@@ -1815,15 +1798,15 @@ def deploy_script(
+ # Upload our environ setter wrapper
+ ssh_file(
+ opts,
+- "{}/environ-deploy-wrapper.sh".format(tmp_dir),
++ f"{tmp_dir}/environ-deploy-wrapper.sh",
+ "\n".join(environ_script_contents),
+ ssh_kwargs,
+ )
+ root_cmd(
+- "chmod +x '{}/environ-deploy-wrapper.sh'".format(tmp_dir),
++ f"chmod +x '{tmp_dir}/environ-deploy-wrapper.sh'",
+ tty,
+ sudo,
+- **ssh_kwargs
++ **ssh_kwargs,
+ )
+ # The deploy command is now our wrapper
+ deploy_command = "'{}/environ-deploy-wrapper.sh'".format(
+@@ -1831,22 +1814,20 @@ def deploy_script(
+ )
+ if root_cmd(deploy_command, tty, sudo, **ssh_kwargs) != 0:
+ raise SaltCloudSystemExit(
+- "Executing the command '{}' failed".format(deploy_command)
++ f"Executing the command '{deploy_command}' failed"
+ )
+ log.debug("Executed command '%s'", deploy_command)
+
+ # Remove the deploy script
+ if not keep_tmp:
+- root_cmd(
+- "rm -f '{}/deploy.sh'".format(tmp_dir), tty, sudo, **ssh_kwargs
+- )
++ root_cmd(f"rm -f '{tmp_dir}/deploy.sh'", tty, sudo, **ssh_kwargs)
+ log.debug("Removed %s/deploy.sh", tmp_dir)
+ if script_env:
+ root_cmd(
+- "rm -f '{}/environ-deploy-wrapper.sh'".format(tmp_dir),
++ f"rm -f '{tmp_dir}/environ-deploy-wrapper.sh'",
+ tty,
+ sudo,
+- **ssh_kwargs
++ **ssh_kwargs,
+ )
+ log.debug("Removed %s/environ-deploy-wrapper.sh", tmp_dir)
+
+@@ -1855,57 +1836,40 @@ def deploy_script(
+ else:
+ # Remove minion configuration
+ if minion_pub:
+- root_cmd(
+- "rm -f '{}/minion.pub'".format(tmp_dir), tty, sudo, **ssh_kwargs
+- )
++ root_cmd(f"rm -f '{tmp_dir}/minion.pub'", tty, sudo, **ssh_kwargs)
+ log.debug("Removed %s/minion.pub", tmp_dir)
+ if minion_pem:
+- root_cmd(
+- "rm -f '{}/minion.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs
+- )
++ root_cmd(f"rm -f '{tmp_dir}/minion.pem'", tty, sudo, **ssh_kwargs)
+ log.debug("Removed %s/minion.pem", tmp_dir)
+ if minion_conf:
+- root_cmd(
+- "rm -f '{}/grains'".format(tmp_dir), tty, sudo, **ssh_kwargs
+- )
++ root_cmd(f"rm -f '{tmp_dir}/grains'", tty, sudo, **ssh_kwargs)
+ log.debug("Removed %s/grains", tmp_dir)
+- root_cmd(
+- "rm -f '{}/minion'".format(tmp_dir), tty, sudo, **ssh_kwargs
+- )
++ root_cmd(f"rm -f '{tmp_dir}/minion'", tty, sudo, **ssh_kwargs)
+ log.debug("Removed %s/minion", tmp_dir)
+ if master_sign_pub_file:
+ root_cmd(
+- "rm -f {}/master_sign.pub".format(tmp_dir),
+- tty,
+- sudo,
+- **ssh_kwargs
++ f"rm -f {tmp_dir}/master_sign.pub", tty, sudo, **ssh_kwargs
+ )
+ log.debug("Removed %s/master_sign.pub", tmp_dir)
+
+ # Remove master configuration
+ if master_pub:
+- root_cmd(
+- "rm -f '{}/master.pub'".format(tmp_dir), tty, sudo, **ssh_kwargs
+- )
++ root_cmd(f"rm -f '{tmp_dir}/master.pub'", tty, sudo, **ssh_kwargs)
+ log.debug("Removed %s/master.pub", tmp_dir)
+ if master_pem:
+- root_cmd(
+- "rm -f '{}/master.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs
+- )
++ root_cmd(f"rm -f '{tmp_dir}/master.pem'", tty, sudo, **ssh_kwargs)
+ log.debug("Removed %s/master.pem", tmp_dir)
+ if master_conf:
+- root_cmd(
+- "rm -f '{}/master'".format(tmp_dir), tty, sudo, **ssh_kwargs
+- )
++ root_cmd(f"rm -f '{tmp_dir}/master'", tty, sudo, **ssh_kwargs)
+ log.debug("Removed %s/master", tmp_dir)
+
+ # Remove pre-seed keys directory
+ if preseed_minion_keys is not None:
+ root_cmd(
+- "rm -rf '{}'".format(preseed_minion_keys_tempdir),
++ f"rm -rf '{preseed_minion_keys_tempdir}'",
+ tty,
+ sudo,
+- **ssh_kwargs
++ **ssh_kwargs,
+ )
+ log.debug("Removed %s", preseed_minion_keys_tempdir)
+
+@@ -1920,15 +1884,13 @@ def deploy_script(
+ # for line in output:
+ # print(line)
+ log.info("Executing %s on the salt-minion", start_action)
+- root_cmd(
+- "salt-call {}".format(start_action), tty, sudo, **ssh_kwargs
+- )
++ root_cmd(f"salt-call {start_action}", tty, sudo, **ssh_kwargs)
+ log.info("Finished executing %s on the salt-minion", start_action)
+ # Fire deploy action
+ fire_event(
+ "event",
+- "{} has been deployed at {}".format(name, host),
+- "salt/cloud/{}/deploy_script".format(name),
++ f"{name} has been deployed at {host}",
++ f"salt/cloud/{name}/deploy_script",
+ args={"name": name, "host": host},
+ sock_dir=opts.get(
+ "sock_dir", os.path.join(__opts__["sock_dir"], "master")
+@@ -1961,7 +1923,7 @@ def run_inline_script(
+ tty=None,
+ opts=None,
+ tmp_dir="/tmp/.saltcloud-inline_script",
+- **kwargs
++ **kwargs,
+ ):
+ """
+ Run the inline script commands, one by one
+@@ -2018,11 +1980,11 @@ def run_inline_script(
+ # TODO: check edge cases (e.g. ssh gateways, salt deploy disabled, etc.)
+ if (
+ root_cmd(
+- 'test -e \\"{}\\"'.format(tmp_dir),
++ f'test -e \\"{tmp_dir}\\"',
+ tty,
+ sudo,
+ allow_failure=True,
+- **ssh_kwargs
++ **ssh_kwargs,
+ )
+ and inline_script
+ ):
+@@ -2030,11 +1992,11 @@ def run_inline_script(
+ for cmd_line in inline_script:
+ log.info("Executing inline command: %s", cmd_line)
+ ret = root_cmd(
+- 'sh -c "( {} )"'.format(cmd_line),
++ f'sh -c "( {cmd_line} )"',
+ tty,
+ sudo,
+ allow_failure=True,
+- **ssh_kwargs
++ **ssh_kwargs,
+ )
+ if ret:
+ log.info("[%s] Output: %s", cmd_line, ret)
+@@ -2138,7 +2100,7 @@ def _exec_ssh_cmd(cmd, error_msg=None, allow_failure=False, **kwargs):
+ time.sleep(0.5)
+ if proc.exitstatus != 0 and allow_failure is False:
+ raise SaltCloudSystemExit(
+- "Command '{}' failed. Exit code: {}".format(cmd, proc.exitstatus)
++ f"Command '{cmd}' failed. Exit code: {proc.exitstatus}"
+ )
+ return proc.exitstatus
+ except salt.utils.vt.TerminalException as err:
+@@ -2241,7 +2203,7 @@ def scp_file(dest_path, contents=None, kwargs=None, local_file=None):
+ cmd,
+ error_msg="Failed to upload file '{0}': {1}\n{2}",
+ password_retries=3,
+- **kwargs
++ **kwargs,
+ )
+ finally:
+ if contents is not None:
+@@ -2359,7 +2321,7 @@ def sftp_file(dest_path, contents=None, kwargs=None, local_file=None):
+ cmd,
+ error_msg="Failed to upload file '{0}': {1}\n{2}",
+ password_retries=3,
+- **kwargs
++ **kwargs,
+ )
+ finally:
+ if contents is not None:
+@@ -2419,11 +2381,11 @@ def root_cmd(command, tty, sudo, allow_failure=False, **kwargs):
+
+ if sudo:
+ if sudo_password is None:
+- command = "sudo {}".format(command)
++ command = f"sudo {command}"
+ logging_command = command
+ else:
+- logging_command = 'sudo -S "XXX-REDACTED-XXX" {}'.format(command)
+- command = "sudo -S {}".format(command)
++ logging_command = f'sudo -S "XXX-REDACTED-XXX" {command}'
++ command = f"sudo -S {command}"
+
+ log.debug("Using sudo to run command %s", logging_command)
+
+@@ -2442,9 +2404,9 @@ def root_cmd(command, tty, sudo, allow_failure=False, **kwargs):
+ ssh_args.extend(
+ [
+ # Don't add new hosts to the host key database
+- "-oStrictHostKeyChecking={}".format(host_key_checking),
++ f"-oStrictHostKeyChecking={host_key_checking}",
+ # Set hosts key database path to /dev/null, i.e., non-existing
+- "-oUserKnownHostsFile={}".format(known_hosts_file),
++ f"-oUserKnownHostsFile={known_hosts_file}",
+ # Don't re-use the SSH connection. Less failures.
+ "-oControlPath=none",
+ ]
+@@ -2477,12 +2439,12 @@ def root_cmd(command, tty, sudo, allow_failure=False, **kwargs):
+
+ cmd = "ssh {0} {1[username]}@{1[hostname]} ".format(" ".join(ssh_args), kwargs)
+ logging_command = cmd + logging_command
+- cmd = cmd + pipes.quote(command)
++ cmd = cmd + shlex.quote(command)
+
+ hard_timeout = kwargs.get("hard_timeout")
+ if hard_timeout is not None:
+- logging_command = "timeout {} {}".format(hard_timeout, logging_command)
+- cmd = "timeout {} {}".format(hard_timeout, cmd)
++ logging_command = f"timeout {hard_timeout} {logging_command}"
++ cmd = f"timeout {hard_timeout} {cmd}"
+
+ log.debug("SSH command: '%s'", logging_command)
+
+@@ -2504,7 +2466,7 @@ def check_auth(name, sock_dir=None, queue=None, timeout=300):
+ ret = event.get_event(full=True)
+ if ret is None:
+ continue
+- if ret["tag"] == "salt/minion/{}/start".format(name):
++ if ret["tag"] == f"salt/minion/{name}/start":
+ queue.put(name)
+ newtimeout = 0
+ log.debug("Minion %s is ready to receive commands", name)
+@@ -2550,7 +2512,7 @@ def check_name(name, safe_chars):
+ """
+ Check whether the specified name contains invalid characters
+ """
+- regexp = re.compile("[^{}]".format(safe_chars))
++ regexp = re.compile(f"[^{safe_chars}]")
+ if regexp.search(name):
+ raise SaltCloudException(
+ "{} contains characters not supported by this cloud provider. "
+@@ -2844,7 +2806,7 @@ def request_minion_cachedir(
+ "provider": provider,
+ }
+
+- fname = "{}.p".format(minion_id)
++ fname = f"{minion_id}.p"
+ path = os.path.join(base, "requested", fname)
+ with salt.utils.files.fopen(path, "wb") as fh_:
+ salt.utils.msgpack.dump(data, fh_, encoding=MSGPACK_ENCODING)
+@@ -2875,7 +2837,7 @@ def change_minion_cachedir(
+ if base is None:
+ base = __opts__["cachedir"]
+
+- fname = "{}.p".format(minion_id)
++ fname = f"{minion_id}.p"
+ path = os.path.join(base, cachedir, fname)
+
+ with salt.utils.files.fopen(path, "r") as fh_:
+@@ -2898,7 +2860,7 @@ def activate_minion_cachedir(minion_id, base=None):
+ if base is None:
+ base = __opts__["cachedir"]
+
+- fname = "{}.p".format(minion_id)
++ fname = f"{minion_id}.p"
+ src = os.path.join(base, "requested", fname)
+ dst = os.path.join(base, "active")
+ shutil.move(src, dst)
+@@ -2920,7 +2882,7 @@ def delete_minion_cachedir(minion_id, provider, opts, base=None):
+ base = __opts__["cachedir"]
+
+ driver = next(iter(__opts__["providers"][provider].keys()))
+- fname = "{}.p".format(minion_id)
++ fname = f"{minion_id}.p"
+ for cachedir in "requested", "active":
+ path = os.path.join(base, cachedir, driver, provider, fname)
+ log.debug("path: %s", path)
+@@ -3013,7 +2975,7 @@ def update_bootstrap(config, url=None):
+ # in last case, assuming we got a script content
+ else:
+ script_content = url
+- script_name = "{}.sh".format(hashlib.sha1(script_content).hexdigest())
++ script_name = f"{hashlib.sha1(script_content).hexdigest()}.sh"
+
+ if not script_content:
+ raise ValueError("No content in bootstrap script !")
+@@ -3107,7 +3069,7 @@ def cache_node_list(nodes, provider, opts):
+
+ for node in nodes:
+ diff_node_cache(prov_dir, node, nodes[node], opts)
+- path = os.path.join(prov_dir, "{}.p".format(node))
++ path = os.path.join(prov_dir, f"{node}.p")
+ with salt.utils.files.fopen(path, "wb") as fh_:
+ salt.utils.msgpack.dump(nodes[node], fh_, encoding=MSGPACK_ENCODING)
+
+@@ -3162,7 +3124,7 @@ def missing_node_cache(prov_dir, node_list, provider, opts):
+ fire_event(
+ "event",
+ "cached node missing from provider",
+- "salt/cloud/{}/cache_node_missing".format(node),
++ f"salt/cloud/{node}/cache_node_missing",
+ args={"missing node": node},
+ sock_dir=opts.get(
+ "sock_dir", os.path.join(__opts__["sock_dir"], "master")
+@@ -3190,7 +3152,7 @@ def diff_node_cache(prov_dir, node, new_data, opts):
+
+ if node is None:
+ return
+- path = "{}.p".format(os.path.join(prov_dir, node))
++ path = f"{os.path.join(prov_dir, node)}.p"
+
+ if not os.path.exists(path):
+ event_data = _strip_cache_events(new_data, opts)
+@@ -3198,7 +3160,7 @@ def diff_node_cache(prov_dir, node, new_data, opts):
+ fire_event(
+ "event",
+ "new node found",
+- "salt/cloud/{}/cache_node_new".format(node),
++ f"salt/cloud/{node}/cache_node_new",
+ args={"new_data": event_data},
+ sock_dir=opts.get("sock_dir", os.path.join(__opts__["sock_dir"], "master")),
+ transport=opts.get("transport", "zeromq"),
+@@ -3222,7 +3184,7 @@ def diff_node_cache(prov_dir, node, new_data, opts):
+ fire_event(
+ "event",
+ "node data differs",
+- "salt/cloud/{}/cache_node_diff".format(node),
++ f"salt/cloud/{node}/cache_node_diff",
+ args={
+ "new_data": _strip_cache_events(new_data, opts),
+ "cache_data": _strip_cache_events(cache_data, opts),
+@@ -3266,7 +3228,7 @@ def _salt_cloud_force_ascii(exc):
+ errors.
+ """
+ if not isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)):
+- raise TypeError("Can't handle {}".format(exc))
++ raise TypeError(f"Can't handle {exc}")
+
+ unicode_trans = {
+ # Convert non-breaking space to space
+@@ -3326,7 +3288,7 @@ def store_password_in_keyring(credential_id, username, password=None):
+
+ # pylint: enable=import-error
+ if password is None:
+- prompt = "Please enter password for {}: ".format(credential_id)
++ prompt = f"Please enter password for {credential_id}: "
+ try:
+ password = getpass.getpass(prompt)
+ except EOFError:
+diff --git a/salt/utils/http.py b/salt/utils/http.py
+index 91c5cbf08ed..26f2e85c2ee 100644
+--- a/salt/utils/http.py
++++ b/salt/utils/http.py
+@@ -5,7 +5,7 @@ and the like, but also useful for basic HTTP testing.
+ .. versionadded:: 2015.5.0
+ """
+
+-import cgi
++import email.message
+ import gzip
+ import http.client
+ import http.cookiejar
+@@ -84,7 +84,7 @@ except ImportError:
+ HAS_CERTIFI = False
+
+ log = logging.getLogger(__name__)
+-USERAGENT = "Salt/{}".format(salt.version.__version__)
++USERAGENT = f"Salt/{salt.version.__version__}"
+
+
+ def __decompressContent(coding, pgctnt):
+@@ -170,7 +170,7 @@ def query(
+ formdata_fieldname=None,
+ formdata_filename=None,
+ decode_body=True,
+- **kwargs
++ **kwargs,
+ ):
+ """
+ Query a resource, and decode the return data
+@@ -295,7 +295,7 @@ def query(
+ auth = (username, password)
+
+ if agent == USERAGENT:
+- agent = "{} http.query()".format(agent)
++ agent = f"{agent} http.query()"
+ header_dict["User-agent"] = agent
+
+ if backend == "requests":
+@@ -360,14 +360,14 @@ def query(
+ url,
+ params=params,
+ files={formdata_fieldname: (formdata_filename, io.StringIO(data))},
+- **req_kwargs
++ **req_kwargs,
+ )
+ else:
+ result = sess.request(method, url, params=params, data=data, **req_kwargs)
+ result.raise_for_status()
+ if stream is True:
+ # fake a HTTP response header
+- header_callback("HTTP/1.0 {} MESSAGE".format(result.status_code))
++ header_callback(f"HTTP/1.0 {result.status_code} MESSAGE")
+ # fake streaming the content
+ streaming_callback(result.content)
+ return {
+@@ -483,15 +483,12 @@ def query(
+ result_headers = dict(result.info())
+ result_text = result.read()
+ if "Content-Type" in result_headers:
+- res_content_type, res_params = cgi.parse_header(
+- result_headers["Content-Type"]
+- )
+- if (
+- res_content_type.startswith("text/")
+- and "charset" in res_params
+- and not isinstance(result_text, str)
+- ):
+- result_text = result_text.decode(res_params["charset"])
++ msg = email.message.EmailMessage()
++ msg.add_header("Content-Type", result_headers["Content-Type"])
++ if msg.get_content_type().startswith("text/"):
++ content_charset = msg.get_content_charset()
++ if content_charset and not isinstance(result_text, str):
++ result_text = result_text.decode(content_charset)
+ if isinstance(result_text, bytes) and decode_body:
+ result_text = result_text.decode("utf-8")
+ ret["body"] = result_text
+@@ -636,15 +633,12 @@ def query(
+ result_headers = result.headers
+ result_text = result.body
+ if "Content-Type" in result_headers:
+- res_content_type, res_params = cgi.parse_header(
+- result_headers["Content-Type"]
+- )
+- if (
+- res_content_type.startswith("text/")
+- and "charset" in res_params
+- and not isinstance(result_text, str)
+- ):
+- result_text = result_text.decode(res_params["charset"])
++ msg = email.message.EmailMessage()
++ msg.add_header("Content-Type", result_headers["Content-Type"])
++ if msg.get_content_type().startswith("text/"):
++ content_charset = msg.get_content_charset()
++ if content_charset and not isinstance(result_text, str):
++ result_text = result_text.decode(content_charset)
+ if isinstance(result_text, bytes) and decode_body:
+ result_text = result_text.decode("utf-8")
+ ret["body"] = result_text
+@@ -1038,12 +1032,12 @@ def _sanitize_url_components(comp_list, field):
+ """
+ if not comp_list:
+ return ""
+- elif comp_list[0].startswith("{}=".format(field)):
+- ret = "{}=XXXXXXXXXX&".format(field)
++ elif comp_list[0].startswith(f"{field}="):
++ ret = f"{field}=XXXXXXXXXX&"
+ comp_list.remove(comp_list[0])
+ return ret + _sanitize_url_components(comp_list, field)
+ else:
+- ret = "{}&".format(comp_list[0])
++ ret = f"{comp_list[0]}&"
+ comp_list.remove(comp_list[0])
+ return ret + _sanitize_url_components(comp_list, field)
+
+diff --git a/salt/utils/jinja.py b/salt/utils/jinja.py
+index a6a8a279605..d90957a0087 100644
+--- a/salt/utils/jinja.py
++++ b/salt/utils/jinja.py
+@@ -2,13 +2,12 @@
+ Jinja loading utils to enable a more powerful backend for jinja templates
+ """
+
+-
+ import itertools
+ import logging
+ import os.path
+-import pipes
+ import pprint
+ import re
++import shlex
+ import time
+ import uuid
+ import warnings
+@@ -242,11 +241,11 @@ class PrintableDict(OrderedDict):
+ if isinstance(value, str):
+ # keeps quotes around strings
+ # pylint: disable=repr-flag-used-in-string
+- output.append("{!r}: {!r}".format(key, value))
++ output.append(f"{key!r}: {value!r}")
+ # pylint: enable=repr-flag-used-in-string
+ else:
+ # let default output
+- output.append("{!r}: {!s}".format(key, value))
++ output.append(f"{key!r}: {value!s}")
+ return "{" + ", ".join(output) + "}"
+
+ def __repr__(self): # pylint: disable=W0221
+@@ -255,7 +254,7 @@ class PrintableDict(OrderedDict):
+ # Raw string formatter required here because this is a repr
+ # function.
+ # pylint: disable=repr-flag-used-in-string
+- output.append("{!r}: {!r}".format(key, value))
++ output.append(f"{key!r}: {value!r}")
+ # pylint: enable=repr-flag-used-in-string
+ return "{" + ", ".join(output) + "}"
+
+@@ -441,7 +440,7 @@ def quote(txt):
+
+ 'my_text'
+ """
+- return pipes.quote(txt)
++ return shlex.quote(txt)
+
+
+ @jinja_filter()
+@@ -1095,13 +1094,13 @@ class SerializerExtension(Extension):
+ # to the stringified version of the exception.
+ msg += str(exc)
+ else:
+- msg += "{}\n".format(problem)
++ msg += f"{problem}\n"
+ msg += salt.utils.stringutils.get_context(
+ buf, line, marker=" <======================"
+ )
+ raise TemplateRuntimeError(msg)
+ except AttributeError:
+- raise TemplateRuntimeError("Unable to load yaml from {}".format(value))
++ raise TemplateRuntimeError(f"Unable to load yaml from {value}")
+
+ def load_json(self, value):
+ if isinstance(value, TemplateModule):
+@@ -1109,7 +1108,7 @@ class SerializerExtension(Extension):
+ try:
+ return salt.utils.json.loads(value)
+ except (ValueError, TypeError, AttributeError):
+- raise TemplateRuntimeError("Unable to load json from {}".format(value))
++ raise TemplateRuntimeError(f"Unable to load json from {value}")
+
+ def load_text(self, value):
+ if isinstance(value, TemplateModule):
+@@ -1144,7 +1143,7 @@ class SerializerExtension(Extension):
+ return self._parse_profile_block(parser, label, "profile block", body, lineno)
+
+ def _create_profile_id(self, parser):
+- return "_salt_profile_{}".format(parser.free_identifier().name)
++ return f"_salt_profile_{parser.free_identifier().name}"
+
+ def _profile_start(self, label, source):
+ return (label, source, time.time())
+@@ -1186,7 +1185,7 @@ class SerializerExtension(Extension):
+ filter_name = parser.stream.current.value
+ lineno = next(parser.stream).lineno
+ if filter_name not in self.environment.filters:
+- parser.fail("Unable to parse {}".format(filter_name), lineno)
++ parser.fail(f"Unable to parse {filter_name}", lineno)
+
+ parser.stream.expect("name:as")
+ target = parser.parse_assign_target()
+@@ -1225,7 +1224,7 @@ class SerializerExtension(Extension):
+ nodes.Name(target, "store").set_lineno(lineno),
+ nodes.Filter(
+ nodes.Name(target, "load").set_lineno(lineno),
+- "load_{}".format(converter),
++ f"load_{converter}",
+ [],
+ [],
+ None,
+@@ -1234,7 +1233,7 @@ class SerializerExtension(Extension):
+ ).set_lineno(lineno),
+ ]
+ return self._parse_profile_block(
+- parser, import_node.template, "import_{}".format(converter), body, lineno
++ parser, import_node.template, f"import_{converter}", body, lineno
+ )
+
+ def dict_to_sls_yaml_params(self, value, flow_style=False):
+diff --git a/salt/utils/locales.py b/salt/utils/locales.py
+index 8017958d5de..a380ddbe7a2 100644
+--- a/salt/utils/locales.py
++++ b/salt/utils/locales.py
+@@ -1,8 +1,7 @@
+ """
+ the locale utils used by salt
+ """
+-
+-
++import locale
+ import sys
+
+ from salt.utils.decorators import memoize as real_memoize
+@@ -83,3 +82,39 @@ def normalize_locale(loc):
+ comps["codeset"] = comps["codeset"].lower().replace("-", "")
+ comps["charmap"] = ""
+ return join_locale(comps)
++
++
++def getdefaultlocale(envvars=("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE")):
++ """
++ This function was backported from Py3.11 which started triggering a
++ deprecation warning about it's removal in 3.13.
++ """
++ try:
++ # check if it's supported by the _locale module
++ import _locale
++
++ code, encoding = _locale._getdefaultlocale()
++ except (ImportError, AttributeError):
++ pass
++ else:
++ # make sure the code/encoding values are valid
++ if sys.platform == "win32" and code and code[:2] == "0x":
++ # map windows language identifier to language name
++ code = locale.windows_locale.get(int(code, 0))
++ # ...add other platform-specific processing here, if
++ # necessary...
++ return code, encoding
++
++ # fall back on POSIX behaviour
++ import os
++
++ lookup = os.environ.get
++ for variable in envvars:
++ localename = lookup(variable, None)
++ if localename:
++ if variable == "LANGUAGE":
++ localename = localename.split(":")[0]
++ break
++ else:
++ localename = "C"
++ return locale._parse_localename(localename)
+diff --git a/tests/integration/states/test_ssh_auth.py b/tests/integration/states/test_ssh_auth.py
+index 660c3f62d6a..46ffc9b4115 100644
+--- a/tests/integration/states/test_ssh_auth.py
++++ b/tests/integration/states/test_ssh_auth.py
+@@ -24,6 +24,20 @@ class SSHAuthStateTests(ModuleCase, SaltReturnAssertsMixin):
+ user_ssh_dir = os.path.join(userdetails["home"], ".ssh")
+ authorized_keys_file = os.path.join(user_ssh_dir, "authorized_keys")
+
++ key1 = (
++ # Explicit no ending line break
++ "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC3dd5ACsvJhnIOrn6bSOkX5"
++ "KyVDpTYsVAaJj3AmEo6Fr5cHXJFJoJS+Ld8K5vCscPzuXashdYUdrhL1E5Liz"
++ "bza+zneQ5AkJ7sn2NXymD6Bbra+infO4NgnQXbGMp/NyY65jbQGqJeQ081iEV"
++ f"YbDP2zXp6fmrqqmFCaakZfGRbVw== root"
++ )
++ key2 = (
++ "AAAAB3NzaC1yc2EAAAADAQABAAAAgQC7h77HyBPCUDONCs5bI/PrrPwyYJegl0"
++ "f9YWLaBofVYOUl/uSv1ux8zjIoLVs4kguY1ihtIoK2kho4YsjNtIaAd6twdua9"
++ "oqCg2g/54cIK/8WbIjwnb3LFRgyTG5DFuj+7526EdJycAZvhSzIZYui3RUj4Vp"
++ "eMoF7mcB6TIK2/2w=="
++ )
++
+ ret = self.run_state(
+ "file.managed",
+ name=authorized_keys_file,
+@@ -31,23 +45,22 @@ class SSHAuthStateTests(ModuleCase, SaltReturnAssertsMixin):
+ makedirs=True,
+ contents_newline=False,
+ # Explicit no ending line break
+- contents="ssh-rsa AAAAB3NzaC1kc3MAAACBAL0sQ9fJ5bYTEyY== root",
++ contents=key1,
+ )
+
+ ret = self.run_state(
+ "ssh_auth.present",
+- name="AAAAB3NzaC1kcQ9J5bYTEyZ==",
++ name=key2,
+ enc="ssh-rsa",
+ user=username,
+ comment=username,
+ )
+ self.assertSaltTrueReturn(ret)
+- self.assertSaltStateChangesEqual(ret, {"AAAAB3NzaC1kcQ9J5bYTEyZ==": "New"})
++ self.assertSaltStateChangesEqual(ret, {key2: "New"})
+ with salt.utils.files.fopen(authorized_keys_file, "r") as fhr:
+ self.assertEqual(
+ fhr.read(),
+- "ssh-rsa AAAAB3NzaC1kc3MAAACBAL0sQ9fJ5bYTEyY== root\n"
+- "ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {}\n".format(username),
++ f"{key1}\nssh-rsa {key2} {username}\n",
+ )
+
+ @pytest.mark.destructive_test
+@@ -60,39 +73,48 @@ class SSHAuthStateTests(ModuleCase, SaltReturnAssertsMixin):
+ authorized_keys_file = os.path.join(user_ssh_dir, "authorized_keys")
+
+ key_fname = "issue_10198.id_rsa.pub"
++ key_contents = (
++ "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC3dd5ACsvJhnIOrn6bSOkX5"
++ "KyVDpTYsVAaJj3AmEo6Fr5cHXJFJoJS+Ld8K5vCscPzuXashdYUdrhL1E5Liz"
++ "bza+zneQ5AkJ7sn2NXymD6Bbra+infO4NgnQXbGMp/NyY65jbQGqJeQ081iEV"
++ f"YbDP2zXp6fmrqqmFCaakZfGRbVw== {username}\n"
++ )
+
+ # Create the keyfile that we expect to get back on the state call
+ with salt.utils.files.fopen(
+ os.path.join(RUNTIME_VARS.TMP_PRODENV_STATE_TREE, key_fname), "w"
+ ) as kfh:
+- kfh.write("ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {}\n".format(username))
++ kfh.write(key_contents)
+
+ # Create a bogus key file on base environment
+ with salt.utils.files.fopen(
+ os.path.join(RUNTIME_VARS.TMP_STATE_TREE, key_fname), "w"
+ ) as kfh:
+- kfh.write("ssh-rsa BAAAB3NzaC1kcQ9J5bYTEyZ== {}\n".format(username))
++ kfh.write(
++ "ssh-rsa A!AAB3NzaC1yc2EAAAADAQABAAAAgQC3dd5ACsvJhnIOrn6bSOkX5"
++ "KyVDpTYsVAaJj3AmEo6Fr5cHXJFJoJS+Ld8K5vCscPzuXashdYUdrhL1E5Liz"
++ "bza+zneQ5AkJ7sn2NXymD6Bbra+infO4NgnQXbGMp/NyY65jbQGqJeQ081iEV"
++ f"YbDP2zXp6fmrqqmFCaakZfGRbVw== {username}\n"
++ )
+
+ ret = self.run_state(
+ "ssh_auth.present",
+ name="Setup Keys",
+- source="salt://{}?saltenv=prod".format(key_fname),
++ source=f"salt://{key_fname}?saltenv=prod",
+ enc="ssh-rsa",
+ user=username,
+ comment=username,
+ )
+ self.assertSaltTrueReturn(ret)
+ with salt.utils.files.fopen(authorized_keys_file, "r") as fhr:
+- self.assertEqual(
+- fhr.read(), "ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {}\n".format(username)
+- )
++ self.assertEqual(fhr.read(), key_contents)
+
+ os.unlink(authorized_keys_file)
+
+ ret = self.run_state(
+ "ssh_auth.present",
+ name="Setup Keys",
+- source="salt://{}".format(key_fname),
++ source=f"salt://{key_fname}",
+ enc="ssh-rsa",
+ user=username,
+ comment=username,
+@@ -100,6 +122,4 @@ class SSHAuthStateTests(ModuleCase, SaltReturnAssertsMixin):
+ )
+ self.assertSaltTrueReturn(ret)
+ with salt.utils.files.fopen(authorized_keys_file, "r") as fhr:
+- self.assertEqual(
+- fhr.read(), "ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {}\n".format(username)
+- )
++ self.assertEqual(fhr.read(), key_contents)
+diff --git a/tests/pytests/unit/modules/state/test_state.py b/tests/pytests/unit/modules/state/test_state.py
+index 7c42646bcf7..cff66defa9d 100644
+--- a/tests/pytests/unit/modules/state/test_state.py
++++ b/tests/pytests/unit/modules/state/test_state.py
+@@ -610,7 +610,7 @@ def test_show_states_missing_sls():
+ chunks_mock = MagicMock(side_effect=[msg])
+ mock = MagicMock(side_effect=["A", None])
+ with patch.object(state, "_check_queue", mock), patch(
+- "salt.state.HighState.compile_low_chunks", chunks_mock
++ "salt.modules.state.salt.state.HighState.compile_low_chunks", chunks_mock
+ ):
+ assert state.show_low_sls("foo") == "A"
+ assert state.show_states("foo") == [msg[0]]
+diff --git a/tests/unit/states/test_module.py b/tests/unit/states/test_module.py
+index a705bd30285..4853c24ca07 100644
+--- a/tests/unit/states/test_module.py
++++ b/tests/unit/states/test_module.py
+@@ -4,7 +4,7 @@
+
+
+ import logging
+-from inspect import ArgSpec
++from inspect import FullArgSpec
+
+ import salt.states.module as module
+ from tests.support.mixins import LoaderModuleMockMixin
+@@ -117,11 +117,25 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
+
+ @classmethod
+ def setUpClass(cls):
+- cls.aspec = ArgSpec(
+- args=["hello", "world"], varargs=None, keywords=None, defaults=False
++ cls.aspec = FullArgSpec(
++ args=["hello", "world"],
++ varargs=None,
++ varkw=None,
++ defaults=False,
++ kwonlyargs=None,
++ kwonlydefaults=None,
++ annotations=None,
+ )
+
+- cls.bspec = ArgSpec(args=[], varargs="names", keywords="kwargs", defaults=None)
++ cls.bspec = FullArgSpec(
++ args=[],
++ varargs="names",
++ varkw=None,
++ defaults=None,
++ kwonlyargs="kwargs",
++ kwonlydefaults=None,
++ annotations=None,
++ )
+
+ @classmethod
+ def tearDownClass(cls):
+@@ -137,8 +151,8 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
+ module.__opts__, {"use_superseded": ["module.run"]}
+ ):
+ ret = module.run(**{CMD: None})
+- if ret["comment"] != "Unavailable function: {}.".format(CMD) or ret["result"]:
+- self.fail("module.run did not fail as expected: {}".format(ret))
++ if ret["comment"] != f"Unavailable function: {CMD}." or ret["result"]:
++ self.fail(f"module.run did not fail as expected: {ret}")
+
+ def test_run_module_not_available_testmode(self):
+ """
+@@ -151,10 +165,10 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
+ ):
+ ret = module.run(**{CMD: None})
+ if (
+- ret["comment"] != "Unavailable function: {}.".format(CMD)
++ ret["comment"] != f"Unavailable function: {CMD}."
+ or ret["result"] is not False
+ ):
+- self.fail("module.run did not fail as expected: {}".format(ret))
++ self.fail(f"module.run did not fail as expected: {ret}")
+
+ def test_run_module_noop(self):
+ """
+@@ -166,7 +180,7 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
+ ):
+ ret = module.run()
+ if ret["comment"] != "No function provided." or ret["result"] is not False:
+- self.fail("module.run did not fail as expected: {}".format(ret))
++ self.fail(f"module.run did not fail as expected: {ret}")
+
+ def test_module_run_hidden_varargs(self):
+ """
+@@ -189,10 +203,10 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
+ ):
+ ret = module.run(**{CMD: None})
+ if (
+- ret["comment"] != "Function {} to be executed.".format(CMD)
++ ret["comment"] != f"Function {CMD} to be executed."
+ or ret["result"] is not None
+ ):
+- self.fail("module.run failed: {}".format(ret))
++ self.fail(f"module.run failed: {ret}")
+
+ def test_run_missing_arg(self):
+ """
+@@ -203,9 +217,7 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
+ module.__opts__, {"use_superseded": ["module.run"]}
+ ):
+ ret = module.run(**{CMD: None})
+- self.assertEqual(
+- ret["comment"], "'{}' failed: Missing arguments: name".format(CMD)
+- )
++ self.assertEqual(ret["comment"], f"'{CMD}' failed: Missing arguments: name")
+
+ def test_run_correct_arg(self):
+ """
+@@ -216,8 +228,8 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
+ module.__opts__, {"use_superseded": ["module.run"]}
+ ):
+ ret = module.run(**{CMD: ["Fred"]})
+- if ret["comment"] != "{}: Success".format(CMD) or not ret["result"]:
+- self.fail("module.run failed: {}".format(ret))
++ if ret["comment"] != f"{CMD}: Success" or not ret["result"]:
++ self.fail(f"module.run failed: {ret}")
+
+ def test_run_state_apply_result_false(self):
+ """
+@@ -294,9 +306,7 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
+ ):
+ ret = module.run(**{CMD: ["bla", {"example": "bla"}]})
+ self.assertFalse(ret["result"])
+- self.assertEqual(
+- ret["comment"], "'{}' failed: Missing arguments: arg2".format(CMD)
+- )
++ self.assertEqual(ret["comment"], f"'{CMD}' failed: Missing arguments: arg2")
+
+ def test_run_42270_kwargs_to_args(self):
+ """
+@@ -390,9 +400,7 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
+ with patch.dict(module.__salt__, {}, clear=True):
+ ret = module._legacy_run(CMD)
+ self.assertFalse(ret["result"])
+- self.assertEqual(
+- ret["comment"], "Module function {} is not available".format(CMD)
+- )
++ self.assertEqual(ret["comment"], f"Module function {CMD} is not available")
+
+ def test_module_run_test_true(self):
+ """
+@@ -400,9 +408,7 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin):
+ """
+ with patch.dict(module.__opts__, {"test": True}):
+ ret = module._legacy_run(CMD)
+- self.assertEqual(
+- ret["comment"], "Module function {} is set to execute".format(CMD)
+- )
++ self.assertEqual(ret["comment"], f"Module function {CMD} is set to execute")
+
+ def test_module_run_missing_arg(self):
+ """
+diff --git a/tests/unit/test_master.py b/tests/unit/test_master.py
+index b454882f06c..96fe2a54595 100644
+--- a/tests/unit/test_master.py
++++ b/tests/unit/test_master.py
+@@ -56,6 +56,7 @@ class TransportMethodsTest(TestCase):
+ "__format__",
+ "__ge__",
+ "__getattribute__",
++ "__getstate__",
+ "__gt__",
+ "__hash__",
+ "__init__",
+@@ -71,9 +72,9 @@ class TransportMethodsTest(TestCase):
+ "__sizeof__",
+ "__str__",
+ "__subclasshook__",
++ "destroy",
+ "get_method",
+ "run_func",
+- "destroy",
+ ]
+ for name in dir(aes_funcs):
+ if name in aes_funcs.expose_methods:
+@@ -108,6 +109,7 @@ class TransportMethodsTest(TestCase):
+ "__format__",
+ "__ge__",
+ "__getattribute__",
++ "__getstate__",
+ "__gt__",
+ "__hash__",
+ "__init__",
+@@ -128,9 +130,9 @@ class TransportMethodsTest(TestCase):
+ "_prep_pub",
+ "_send_pub",
+ "_send_ssh_pub",
+- "get_method",
+- "destroy",
+ "connect",
++ "destroy",
++ "get_method",
+ ]
+ for name in dir(clear_funcs):
+ if name in clear_funcs.expose_methods:
+--
+2.44.0
+
diff --git a/fix-tests-failures-and-errors-when-detected-on-vm-ex.patch b/fix-tests-failures-and-errors-when-detected-on-vm-ex.patch
new file mode 100644
index 0000000..624a532
--- /dev/null
+++ b/fix-tests-failures-and-errors-when-detected-on-vm-ex.patch
@@ -0,0 +1,772 @@
+From 737b0bd931c07239d50e7395eb7425c06f485848 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?=
+
+Date: Thu, 14 Mar 2024 13:03:00 +0000
+Subject: [PATCH] Fix tests failures and errors when detected on VM
+ execution from Salt Shaker (#636)
+
+* test_chmod: fix test expectation
+
+* test_pkg: Adjust package expectation for SUSE family
+
+* test_docker_network: Skip non-supported operation for SUSE family
+
+* Fix tests failing due wrong docker-py version
+
+* test_version: skip test in packaged scenario when setup.py is missing
+
+* Fix issue related to docker version used during testing
+
+* Fix test errors when setup.py is not available
+
+* test_loader: do not run if setup.py is missing
+
+* test_install: Fix test errors when setup.py is not available
+
+* test_master: use a right service name expected on SUSE family
+
+* test_jinja_filters: prevent test failure when which binary is not available
+
+* Prevent errors when x509 utils cannot be loaded
+
+* test_thin: skip test if virtualenv binary is missing
+---
+ tests/integration/pillar/test_git_pillar.py | 12 +++++++++++-
+ tests/pytests/functional/cache/test_consul.py | 5 +++++
+ tests/pytests/functional/cache/test_mysql.py | 5 +++++
+ tests/pytests/functional/loader/test_loader.py | 9 +++++++++
+ .../functional/modules/state/test_jinja_filters.py | 4 ++--
+ tests/pytests/functional/modules/test_cmdmod.py | 2 +-
+ tests/pytests/functional/modules/test_dockermod.py | 8 +++++++-
+ tests/pytests/functional/modules/test_pkg.py | 2 ++
+ tests/pytests/functional/modules/test_swarm.py | 6 +++++-
+ tests/pytests/functional/states/rabbitmq/conftest.py | 11 +++++++++++
+ .../functional/states/rabbitmq/test_cluster.py | 7 ++++++-
+ .../functional/states/rabbitmq/test_plugin.py | 8 +++++++-
+ .../functional/states/rabbitmq/test_policy.py | 7 ++++++-
+ .../functional/states/rabbitmq/test_upstream.py | 7 ++++++-
+ .../pytests/functional/states/rabbitmq/test_user.py | 7 ++++++-
+ .../pytests/functional/states/rabbitmq/test_vhost.py | 7 ++++++-
+ .../pytests/functional/states/test_docker_network.py | 7 ++++++-
+ tests/pytests/functional/states/test_pkg.py | 2 +-
+ tests/pytests/functional/test_version.py | 9 +++++++++
+ tests/pytests/integration/modules/test_virt.py | 5 +++++
+ tests/pytests/integration/modules/test_x509_v2.py | 2 +-
+ tests/pytests/integration/ssh/test_log.py | 7 ++++++-
+ tests/pytests/integration/ssh/test_master.py | 2 +-
+ tests/pytests/integration/ssh/test_py_versions.py | 7 ++++++-
+ tests/pytests/integration/ssh/test_ssh_setup.py | 7 ++++++-
+ tests/pytests/integration/states/test_x509_v2.py | 2 +-
+ tests/pytests/scenarios/setup/test_install.py | 8 ++++++++
+ tests/pytests/unit/modules/test_pip.py | 8 ++++++++
+ tests/pytests/unit/utils/test_x509.py | 3 ++-
+ tests/unit/states/test_pip_state.py | 6 ++++++
+ tests/unit/utils/test_thin.py | 3 +++
+ 31 files changed, 164 insertions(+), 21 deletions(-)
+
+diff --git a/tests/integration/pillar/test_git_pillar.py b/tests/integration/pillar/test_git_pillar.py
+index 5b4cbda95c9..d56785f97c2 100644
+--- a/tests/integration/pillar/test_git_pillar.py
++++ b/tests/integration/pillar/test_git_pillar.py
+@@ -79,6 +79,7 @@ from salt.utils.gitfs import (
+ PYGIT2_VERSION,
+ FileserverConfigError,
+ )
++from salt.utils.versions import Version
+ from tests.support.gitfs import ( # pylint: disable=unused-import
+ PASSWORD,
+ USERNAME,
+@@ -101,11 +102,20 @@ try:
+ except Exception: # pylint: disable=broad-except
+ HAS_PYGIT2 = False
+
++docker = pytest.importorskip("docker")
++
+ INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
++
+ pytestmark = [
+ SKIP_INITIAL_PHOTONOS_FAILURES,
+ pytest.mark.skip_on_platforms(windows=True, darwin=True),
+- pytest.mark.skipif(INSIDE_CONTAINER, reason="Communication problems between containers."),
++ pytest.mark.skipif(
++ INSIDE_CONTAINER, reason="Communication problems between containers."
++ ),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/functional/cache/test_consul.py b/tests/pytests/functional/cache/test_consul.py
+index c6e16d2588e..30dc6925f26 100644
+--- a/tests/pytests/functional/cache/test_consul.py
++++ b/tests/pytests/functional/cache/test_consul.py
+@@ -8,6 +8,7 @@ from saltfactories.utils import random_string
+
+ import salt.cache
+ import salt.loader
++from salt.utils.versions import Version
+ from tests.pytests.functional.cache.helpers import run_common_cache_tests
+
+ docker = pytest.importorskip("docker")
+@@ -20,6 +21,10 @@ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("dockerd"),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/functional/cache/test_mysql.py b/tests/pytests/functional/cache/test_mysql.py
+index e15fc732a4a..93c6c7c6f6f 100644
+--- a/tests/pytests/functional/cache/test_mysql.py
++++ b/tests/pytests/functional/cache/test_mysql.py
+@@ -5,6 +5,7 @@ import pytest
+
+ import salt.cache
+ import salt.loader
++from salt.utils.versions import Version
+ from tests.pytests.functional.cache.helpers import run_common_cache_tests
+ from tests.support.pytest.mysql import * # pylint: disable=wildcard-import,unused-wildcard-import
+
+@@ -18,6 +19,10 @@ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("dockerd"),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/functional/loader/test_loader.py b/tests/pytests/functional/loader/test_loader.py
+index 963d33f59c3..e81ef126ca3 100644
+--- a/tests/pytests/functional/loader/test_loader.py
++++ b/tests/pytests/functional/loader/test_loader.py
+@@ -1,14 +1,23 @@
+ import json
++import os
+
+ import pytest
+
+ from salt.utils.versions import Version
+ from tests.support.helpers import SaltVirtualEnv
+ from tests.support.pytest.helpers import FakeSaltExtension
++from tests.support.runtests import RUNTIME_VARS
++
++MISSING_SETUP_PY_FILE = not os.path.exists(
++ os.path.join(RUNTIME_VARS.CODE_DIR, "setup.py")
++)
+
+ pytestmark = [
+ # These are slow because they create a virtualenv and install salt in it
+ pytest.mark.slow_test,
++ pytest.mark.skipif(
++ MISSING_SETUP_PY_FILE, reason="This test only work if setup.py is available"
++ ),
+ ]
+
+
+diff --git a/tests/pytests/functional/modules/state/test_jinja_filters.py b/tests/pytests/functional/modules/state/test_jinja_filters.py
+index 220310aaaf0..cc8ffcb731b 100644
+--- a/tests/pytests/functional/modules/state/test_jinja_filters.py
++++ b/tests/pytests/functional/modules/state/test_jinja_filters.py
+@@ -798,9 +798,9 @@ def _filter_id(value):
+ ),
+ Filter(
+ name="which",
+- expected={"ret": salt.utils.path.which("which")},
++ expected={"ret": salt.utils.path.which("ls")},
+ sls="""
+- {% set result = 'which' | which() %}
++ {% set result = 'ls' | which() %}
+ test:
+ module.run:
+ - name: test.echo
+diff --git a/tests/pytests/functional/modules/test_cmdmod.py b/tests/pytests/functional/modules/test_cmdmod.py
+index d30b474c6d2..adaf469c283 100644
+--- a/tests/pytests/functional/modules/test_cmdmod.py
++++ b/tests/pytests/functional/modules/test_cmdmod.py
+@@ -105,7 +105,7 @@ def test_run(cmdmod):
+ template="jinja",
+ python_shell=True,
+ )
+- == "func-tests-minion"
++ == "func-tests-minion-opts"
+ )
+ assert cmdmod.run("grep f", stdin="one\ntwo\nthree\nfour\nfive\n") == "four\nfive"
+ assert cmdmod.run('echo "a=b" | sed -e s/=/:/g', python_shell=True) == "a:b"
+diff --git a/tests/pytests/functional/modules/test_dockermod.py b/tests/pytests/functional/modules/test_dockermod.py
+index a5b40869352..eb0cc20f9ff 100644
+--- a/tests/pytests/functional/modules/test_dockermod.py
++++ b/tests/pytests/functional/modules/test_dockermod.py
+@@ -8,7 +8,9 @@ import pytest
+ from saltfactories.utils import random_string
+ from saltfactories.utils.functional import StateResult
+
+-pytest.importorskip("docker")
++from salt.utils.versions import Version
++
++docker = pytest.importorskip("docker")
+
+ log = logging.getLogger(__name__)
+
+@@ -18,6 +20,10 @@ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("docker", "dockerd", check_all=False),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run inside a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/functional/modules/test_pkg.py b/tests/pytests/functional/modules/test_pkg.py
+index 707361c227b..7cedd32bf6c 100644
+--- a/tests/pytests/functional/modules/test_pkg.py
++++ b/tests/pytests/functional/modules/test_pkg.py
+@@ -67,6 +67,8 @@ def test_pkg(grains):
+ _pkg = "units"
+ elif grains["os_family"] == "Debian":
+ _pkg = "ifenslave"
++ elif grains["os_family"] == "Suse":
++ _pkg = "wget"
+ return _pkg
+
+
+diff --git a/tests/pytests/functional/modules/test_swarm.py b/tests/pytests/functional/modules/test_swarm.py
+index 9dc70f5b3dc..fc3c2b739cd 100644
+--- a/tests/pytests/functional/modules/test_swarm.py
++++ b/tests/pytests/functional/modules/test_swarm.py
+@@ -20,7 +20,11 @@ pytest.importorskip("docker")
+ def docker_version(shell, grains):
+ ret = shell.run("docker", "--version")
+ assert ret.returncode == 0
+- return salt.utils.versions.Version(ret.stdout.split(",")[0].split()[-1].strip())
++ # Example output:
++ # Docker version 24.0.7-ce, build 311b9ff0aa93
++ return salt.utils.versions.Version(
++ ret.stdout.split(",")[0].split()[-1].split("-")[0].strip()
++ )
+
+
+ @pytest.fixture
+diff --git a/tests/pytests/functional/states/rabbitmq/conftest.py b/tests/pytests/functional/states/rabbitmq/conftest.py
+index d8ccc1761b8..60f8206a088 100644
+--- a/tests/pytests/functional/states/rabbitmq/conftest.py
++++ b/tests/pytests/functional/states/rabbitmq/conftest.py
+@@ -5,8 +5,19 @@ import attr
+ import pytest
+ from saltfactories.utils import random_string
+
++from salt.utils.versions import Version
++
+ log = logging.getLogger(__name__)
+
++docker = pytest.importorskip("docker")
++
++pytestmark = [
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
++]
++
+
+ @attr.s(kw_only=True, slots=True)
+ class RabbitMQImage:
+diff --git a/tests/pytests/functional/states/rabbitmq/test_cluster.py b/tests/pytests/functional/states/rabbitmq/test_cluster.py
+index 210b22a2360..df85f04f78d 100644
+--- a/tests/pytests/functional/states/rabbitmq/test_cluster.py
++++ b/tests/pytests/functional/states/rabbitmq/test_cluster.py
+@@ -9,8 +9,9 @@ import pytest
+
+ import salt.modules.rabbitmq as rabbitmq
+ import salt.states.rabbitmq_cluster as rabbitmq_cluster
++from salt.utils.versions import Version
+
+-pytest.importorskip("docker")
++docker = pytest.importorskip("docker")
+
+ log = logging.getLogger(__name__)
+
+@@ -22,6 +23,10 @@ pytestmark = [
+ "docker", "dockerd", reason="Docker not installed"
+ ),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/rabbitmq/test_plugin.py b/tests/pytests/functional/states/rabbitmq/test_plugin.py
+index f1191490536..6ed4cdc9238 100644
+--- a/tests/pytests/functional/states/rabbitmq/test_plugin.py
++++ b/tests/pytests/functional/states/rabbitmq/test_plugin.py
+@@ -9,11 +9,13 @@ import pytest
+
+ import salt.modules.rabbitmq as rabbitmq
+ import salt.states.rabbitmq_plugin as rabbitmq_plugin
++from salt.utils.versions import Version
+ from tests.support.mock import patch
+
+ log = logging.getLogger(__name__)
+
+-pytest.importorskip("docker")
++docker = pytest.importorskip("docker")
++
+
+ INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
+@@ -23,6 +25,10 @@ pytestmark = [
+ "docker", "dockerd", reason="Docker not installed"
+ ),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/rabbitmq/test_policy.py b/tests/pytests/functional/states/rabbitmq/test_policy.py
+index 7ccf6a522e0..c648c9ff947 100644
+--- a/tests/pytests/functional/states/rabbitmq/test_policy.py
++++ b/tests/pytests/functional/states/rabbitmq/test_policy.py
+@@ -9,11 +9,12 @@ import pytest
+
+ import salt.modules.rabbitmq as rabbitmq
+ import salt.states.rabbitmq_policy as rabbitmq_policy
++from salt.utils.versions import Version
+ from tests.support.mock import MagicMock, patch
+
+ log = logging.getLogger(__name__)
+
+-pytest.importorskip("docker")
++docker = pytest.importorskip("docker")
+
+ INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
+@@ -23,6 +24,10 @@ pytestmark = [
+ "docker", "dockerd", reason="Docker not installed"
+ ),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/rabbitmq/test_upstream.py b/tests/pytests/functional/states/rabbitmq/test_upstream.py
+index c7bcf3b0d44..0a9686d6948 100644
+--- a/tests/pytests/functional/states/rabbitmq/test_upstream.py
++++ b/tests/pytests/functional/states/rabbitmq/test_upstream.py
+@@ -9,10 +9,11 @@ import pytest
+
+ import salt.modules.rabbitmq as rabbitmq
+ import salt.states.rabbitmq_upstream as rabbitmq_upstream
++from salt.utils.versions import Version
+
+ log = logging.getLogger(__name__)
+
+-pytest.importorskip("docker")
++docker = pytest.importorskip("docker")
+
+ INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
+@@ -22,6 +23,10 @@ pytestmark = [
+ "docker", "dockerd", reason="Docker not installed"
+ ),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/rabbitmq/test_user.py b/tests/pytests/functional/states/rabbitmq/test_user.py
+index 31723df7be8..a6b0766087f 100644
+--- a/tests/pytests/functional/states/rabbitmq/test_user.py
++++ b/tests/pytests/functional/states/rabbitmq/test_user.py
+@@ -9,10 +9,11 @@ import pytest
+
+ import salt.modules.rabbitmq as rabbitmq
+ import salt.states.rabbitmq_user as rabbitmq_user
++from salt.utils.versions import Version
+
+ log = logging.getLogger(__name__)
+
+-pytest.importorskip("docker")
++docker = pytest.importorskip("docker")
+
+ INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
+@@ -22,6 +23,10 @@ pytestmark = [
+ "docker", "dockerd", reason="Docker not installed"
+ ),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/rabbitmq/test_vhost.py b/tests/pytests/functional/states/rabbitmq/test_vhost.py
+index d6ac6901a25..f3553c03e58 100644
+--- a/tests/pytests/functional/states/rabbitmq/test_vhost.py
++++ b/tests/pytests/functional/states/rabbitmq/test_vhost.py
+@@ -9,10 +9,11 @@ import pytest
+
+ import salt.modules.rabbitmq as rabbitmq
+ import salt.states.rabbitmq_vhost as rabbitmq_vhost
++from salt.utils.versions import Version
+
+ log = logging.getLogger(__name__)
+
+-pytest.importorskip("docker")
++docker = pytest.importorskip("docker")
+
+ INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
+@@ -22,6 +23,10 @@ pytestmark = [
+ "docker", "dockerd", reason="Docker not installed"
+ ),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/functional/states/test_docker_network.py b/tests/pytests/functional/states/test_docker_network.py
+index 0da01ed8bac..19868d03ad1 100644
+--- a/tests/pytests/functional/states/test_docker_network.py
++++ b/tests/pytests/functional/states/test_docker_network.py
+@@ -220,10 +220,15 @@ def test_present_with_containers(network, docker, docker_network, container):
+
+
+ @pytest.mark.parametrize("reconnect", [True, False])
+-def test_present_with_reconnect(network, docker, docker_network, container, reconnect):
++def test_present_with_reconnect(
++ network, docker, docker_network, container, reconnect, grains
++):
+ """
+ Test reconnecting with containers not passed to state
+ """
++ if grains["os_family"] == "Suse":
++ pytest.skip("This test is failing for SUSE family")
++
+ with network() as net:
+ ret = docker_network.present(name=net.name, driver="bridge")
+ assert ret.result is True
+diff --git a/tests/pytests/functional/states/test_pkg.py b/tests/pytests/functional/states/test_pkg.py
+index 12318c996d1..864c1d025f3 100644
+--- a/tests/pytests/functional/states/test_pkg.py
++++ b/tests/pytests/functional/states/test_pkg.py
+@@ -55,7 +55,7 @@ def PKG_TARGETS(grains):
+ else:
+ _PKG_TARGETS = ["units", "zsh-html"]
+ elif grains["os_family"] == "Suse":
+- _PKG_TARGETS = ["lynx", "htop"]
++ _PKG_TARGETS = ["iotop", "screen"]
+ return _PKG_TARGETS
+
+
+diff --git a/tests/pytests/functional/test_version.py b/tests/pytests/functional/test_version.py
+index dfa8850557e..3b85c05ccc6 100644
+--- a/tests/pytests/functional/test_version.py
++++ b/tests/pytests/functional/test_version.py
+@@ -1,14 +1,23 @@
+ import json
+ import logging
++import os
+
+ import pytest
+
+ from tests.support.helpers import SaltVirtualEnv
+ from tests.support.pytest.helpers import FakeSaltExtension
++from tests.support.runtests import RUNTIME_VARS
++
++MISSING_SETUP_PY_FILE = not os.path.exists(
++ os.path.join(RUNTIME_VARS.CODE_DIR, "setup.py")
++)
+
+ pytestmark = [
+ # These are slow because they create a virtualenv and install salt in it
+ pytest.mark.slow_test,
++ pytest.mark.skipif(
++ MISSING_SETUP_PY_FILE, reason="This test only work if setup.py is available"
++ ),
+ ]
+
+ log = logging.getLogger(__name__)
+diff --git a/tests/pytests/integration/modules/test_virt.py b/tests/pytests/integration/modules/test_virt.py
+index 1b7f30154a7..572923764bb 100644
+--- a/tests/pytests/integration/modules/test_virt.py
++++ b/tests/pytests/integration/modules/test_virt.py
+@@ -9,6 +9,7 @@ from xml.etree import ElementTree
+ import pytest
+
+ import salt.version
++from salt.utils.versions import Version
+ from tests.support.virt import SaltVirtMinionContainerFactory
+
+ docker = pytest.importorskip("docker")
+@@ -21,6 +22,10 @@ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("docker"),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/integration/modules/test_x509_v2.py b/tests/pytests/integration/modules/test_x509_v2.py
+index 2fd005778c5..cc8712e45cd 100644
+--- a/tests/pytests/integration/modules/test_x509_v2.py
++++ b/tests/pytests/integration/modules/test_x509_v2.py
+@@ -11,7 +11,7 @@ from pathlib import Path
+ import pytest
+ from saltfactories.utils import random_string
+
+-import salt.utils.x509 as x509util
++x509util = pytest.importorskip("salt.utils.x509")
+
+ try:
+ import cryptography
+diff --git a/tests/pytests/integration/ssh/test_log.py b/tests/pytests/integration/ssh/test_log.py
+index 683feb8bd91..a63dd72373d 100644
+--- a/tests/pytests/integration/ssh/test_log.py
++++ b/tests/pytests/integration/ssh/test_log.py
+@@ -8,9 +8,10 @@ import time
+ import pytest
+ from saltfactories.utils import random_string
+
++from salt.utils.versions import Version
+ from tests.support.helpers import Keys
+
+-pytest.importorskip("docker")
++docker = pytest.importorskip("docker")
+
+ INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
+@@ -20,6 +21,10 @@ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("dockerd"),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/integration/ssh/test_master.py b/tests/pytests/integration/ssh/test_master.py
+index 0c2f482cf9f..c658123726b 100644
+--- a/tests/pytests/integration/ssh/test_master.py
++++ b/tests/pytests/integration/ssh/test_master.py
+@@ -23,7 +23,7 @@ def test_service(salt_ssh_cli, grains):
+ os_release = grains["osrelease"]
+ if os_family == "RedHat":
+ service = "crond"
+- elif os_family == "Arch":
++ elif os_family in ["Suse", "Arch"]:
+ service = "sshd"
+ elif os_family == "MacOS":
+ service = "org.ntp.ntpd"
+diff --git a/tests/pytests/integration/ssh/test_py_versions.py b/tests/pytests/integration/ssh/test_py_versions.py
+index 71d4cfaa94e..991a3b71c44 100644
+--- a/tests/pytests/integration/ssh/test_py_versions.py
++++ b/tests/pytests/integration/ssh/test_py_versions.py
+@@ -9,9 +9,10 @@ import time
+ import pytest
+ from saltfactories.utils import random_string
+
++from salt.utils.versions import Version
+ from tests.support.helpers import Keys
+
+-pytest.importorskip("docker")
++docker = pytest.importorskip("docker")
+
+ INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
+@@ -21,6 +22,10 @@ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("dockerd"),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/integration/ssh/test_ssh_setup.py b/tests/pytests/integration/ssh/test_ssh_setup.py
+index 79b55ad90a5..97494bed36b 100644
+--- a/tests/pytests/integration/ssh/test_ssh_setup.py
++++ b/tests/pytests/integration/ssh/test_ssh_setup.py
+@@ -13,9 +13,10 @@ import pytest
+ from pytestshellutils.utils.processes import ProcessResult, terminate_process
+ from saltfactories.utils import random_string
+
++from salt.utils.versions import Version
+ from tests.support.helpers import Keys
+
+-pytest.importorskip("docker")
++docker = pytest.importorskip("docker")
+
+ INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
+@@ -25,6 +26,10 @@ pytestmark = [
+ pytest.mark.slow_test,
+ pytest.mark.skip_if_binaries_missing("dockerd"),
+ pytest.mark.skipif(INSIDE_CONTAINER, reason="Cannot run in a container"),
++ pytest.mark.skipif(
++ Version(docker.__version__) < Version("4.0.0"),
++ reason="Test does not work in this version of docker-py",
++ ),
+ ]
+
+
+diff --git a/tests/pytests/integration/states/test_x509_v2.py b/tests/pytests/integration/states/test_x509_v2.py
+index 9a1c09bb8bd..4f943412950 100644
+--- a/tests/pytests/integration/states/test_x509_v2.py
++++ b/tests/pytests/integration/states/test_x509_v2.py
+@@ -10,7 +10,7 @@ from pathlib import Path
+ import pytest
+ from saltfactories.utils import random_string
+
+-import salt.utils.x509 as x509util
++x509util = pytest.importorskip("salt.utils.x509")
+
+ try:
+ import cryptography
+diff --git a/tests/pytests/scenarios/setup/test_install.py b/tests/pytests/scenarios/setup/test_install.py
+index 7664fda804e..7a4abfc6e9e 100644
+--- a/tests/pytests/scenarios/setup/test_install.py
++++ b/tests/pytests/scenarios/setup/test_install.py
+@@ -14,11 +14,16 @@ import salt.utils.path
+ import salt.utils.platform
+ import salt.version
+ from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
++from tests.support.runtests import RUNTIME_VARS
+
+ log = logging.getLogger(__name__)
+
+ INSIDE_CONTAINER = os.getenv("HOSTNAME", "") == "salt-test-container"
+
++MISSING_SETUP_PY_FILE = not os.path.exists(
++ os.path.join(RUNTIME_VARS.CODE_DIR, "setup.py")
++)
++
+ pytestmark = [
+ pytest.mark.core_test,
+ pytest.mark.windows_whitelisted,
+@@ -27,6 +32,9 @@ pytestmark = [
+ pytest.mark.skipif(
+ INSIDE_CONTAINER, reason="No gcc and python3-devel in container."
+ ),
++ pytest.mark.skipif(
++ MISSING_SETUP_PY_FILE, reason="This test only work if setup.py is available"
++ ),
+ ]
+
+
+diff --git a/tests/pytests/unit/modules/test_pip.py b/tests/pytests/unit/modules/test_pip.py
+index c03e6ed292b..4b2da77786b 100644
+--- a/tests/pytests/unit/modules/test_pip.py
++++ b/tests/pytests/unit/modules/test_pip.py
+@@ -9,6 +9,11 @@ import salt.utils.files
+ import salt.utils.platform
+ from salt.exceptions import CommandExecutionError
+ from tests.support.mock import MagicMock, patch
++from tests.support.runtests import RUNTIME_VARS
++
++MISSING_SETUP_PY_FILE = not os.path.exists(
++ os.path.join(RUNTIME_VARS.CODE_DIR, "setup.py")
++)
+
+
+ class FakeFopen:
+@@ -1738,6 +1743,9 @@ def test_when_version_is_called_with_a_user_it_should_be_passed_to_undelying_run
+ )
+
+
++@pytest.mark.skipif(
++ MISSING_SETUP_PY_FILE, reason="This test only work if setup.py is available"
++)
+ @pytest.mark.parametrize(
+ "bin_env,target,target_env,expected_target",
+ [
+diff --git a/tests/pytests/unit/utils/test_x509.py b/tests/pytests/unit/utils/test_x509.py
+index 25971af40d8..dade9eda46b 100644
+--- a/tests/pytests/unit/utils/test_x509.py
++++ b/tests/pytests/unit/utils/test_x509.py
+@@ -4,9 +4,10 @@ import ipaddress
+ import pytest
+
+ import salt.exceptions
+-import salt.utils.x509 as x509
+ from tests.support.mock import ANY, Mock, patch
+
++x509 = pytest.importorskip("salt.utils.x509")
++
+ try:
+ import cryptography
+ import cryptography.x509 as cx509
+diff --git a/tests/unit/states/test_pip_state.py b/tests/unit/states/test_pip_state.py
+index 981ad46a135..d70b1150008 100644
+--- a/tests/unit/states/test_pip_state.py
++++ b/tests/unit/states/test_pip_state.py
+@@ -27,6 +27,9 @@ try:
+ except ImportError:
+ HAS_PIP = False
+
++MISSING_SETUP_PY_FILE = not os.path.exists(
++ os.path.join(RUNTIME_VARS.CODE_DIR, "setup.py")
++)
+
+ log = logging.getLogger(__name__)
+
+@@ -408,6 +411,9 @@ class PipStateUtilsTest(TestCase):
+
+ @pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False)
+ @pytest.mark.requires_network
++@pytest.mark.skipif(
++ MISSING_SETUP_PY_FILE, reason="This test only work if setup.py is available"
++)
+ class PipStateInstallationErrorTest(TestCase):
+ @pytest.mark.slow_test
+ def test_importable_installation_error(self):
+diff --git a/tests/unit/utils/test_thin.py b/tests/unit/utils/test_thin.py
+index 7fd1e7b5dc3..c4e9c3b3bef 100644
+--- a/tests/unit/utils/test_thin.py
++++ b/tests/unit/utils/test_thin.py
+@@ -1379,6 +1379,9 @@ class SSHThinTestCase(TestCase):
+ assert [x for x in calls if "{}".format(_file) in x[-2]]
+
+ @pytest.mark.slow_test
++ @pytest.mark.skip_if_binaries_missing(
++ "virtualenv", reason="Needs virtualenv binary"
++ )
+ @pytest.mark.skip_on_windows(reason="salt-ssh does not deploy to/from windows")
+ def test_thin_dir(self):
+ """
+--
+2.43.0
+
+
diff --git a/make-importing-seco.range-thread-safe-bsc-1211649.patch b/make-importing-seco.range-thread-safe-bsc-1211649.patch
new file mode 100644
index 0000000..f60e60c
--- /dev/null
+++ b/make-importing-seco.range-thread-safe-bsc-1211649.patch
@@ -0,0 +1,63 @@
+From 0913a58a36ef69d957dd9cc5c95fafe6d56448d5 Mon Sep 17 00:00:00 2001
+From: Marek Czernek
+Date: Mon, 4 Mar 2024 11:27:35 +0100
+Subject: [PATCH] Make importing seco.range thread safe (bsc#1211649)
+
+---
+ salt/roster/range.py | 5 +++++
+ salt/utils/roster_matcher.py | 5 +++++
+ 2 files changed, 10 insertions(+)
+
+diff --git a/salt/roster/range.py b/salt/roster/range.py
+index 3f039dcef42..1525f70c32b 100644
+--- a/salt/roster/range.py
++++ b/salt/roster/range.py
+@@ -15,16 +15,21 @@ import copy
+ import fnmatch
+ import logging
+
++import salt.loader
++
+ log = logging.getLogger(__name__)
+
+ # Try to import range from https://github.com/ytoolshed/range
+ HAS_RANGE = False
+ try:
++ salt.loader.LOAD_LOCK.acquire()
+ import seco.range
+
+ HAS_RANGE = True
+ except ImportError:
+ log.error("Unable to load range library")
++finally:
++ salt.loader.LOAD_LOCK.release()
+ # pylint: enable=import-error
+
+
+diff --git a/salt/utils/roster_matcher.py b/salt/utils/roster_matcher.py
+index db5dfda3e03..5165dc122b7 100644
+--- a/salt/utils/roster_matcher.py
++++ b/salt/utils/roster_matcher.py
+@@ -8,14 +8,19 @@ import functools
+ import logging
+ import re
+
++import salt.loader
++
+ # Try to import range from https://github.com/ytoolshed/range
+ HAS_RANGE = False
+ try:
++ salt.loader.LOAD_LOCK.acquire()
+ import seco.range
+
+ HAS_RANGE = True
+ except ImportError:
+ pass
++finally:
++ salt.loader.LOAD_LOCK.release()
+ # pylint: enable=import-error
+
+
+--
+2.44.0
+
diff --git a/salt.changes b/salt.changes
index 98a9fc4..6fd1e75 100644
--- a/salt.changes
+++ b/salt.changes
@@ -1,3 +1,27 @@
+-------------------------------------------------------------------
+Tue Apr 16 15:32:23 UTC 2024 - Yeray Gutiérrez Cedrés
+
+- Convert oscap output to UTF-8
+- Make Salt compatible with Python 3.11
+- Ignore non-ascii chars in oscap output (bsc#1219001)
+- Fix detected issues in Salt tests when running on VMs
+- Make importing seco.range thread safe (bsc#1211649)
+- Fix problematic tests and allow smooth tests executions on containers
+- Discover Ansible playbook files as "*.yml" or "*.yaml" files (bsc#1211888)
+- Provide user(salt)/group(salt) capabilities for RPM 4.19
+- Extend dependencies for python3-salt-testsuite and python3-salt packages
+- Improve Salt and testsuite packages multibuild
+- Enable multibuilld and create test flavor
+
+- Added:
+ * fix-tests-failures-and-errors-when-detected-on-vm-ex.patch
+ * decode-oscap-byte-stream-to-string-bsc-1219001.patch
+ * make-importing-seco.range-thread-safe-bsc-1211649.patch
+ * switch-oscap-encoding-to-utf-8-639.patch
+ * fix-salt-warnings-and-testuite-for-python-3.11-635.patch
+ * fix-problematic-tests-and-allow-smooth-tests-executi.patch
+ * discover-both-.yml-and-.yaml-playbooks-bsc-1211888.patch
+
-------------------------------------------------------------------
Thu Feb 1 14:48:40 UTC 2024 - Pablo Suárez Hernández
diff --git a/salt.spec b/salt.spec
index 1fee237..91787d2 100644
--- a/salt.spec
+++ b/salt.spec
@@ -16,6 +16,13 @@
#
%global debug_package %{nil}
+%global flavor @BUILD_FLAVOR@%{nil}
+%if "%{flavor}" == "testsuite"
+%define psuffix -test
+%else
+%define psuffix %{nil}
+%endif
+
%if 0%{?suse_version} > 1210 || 0%{?rhel} >= 7 || 0%{?fedora} >=28
%bcond_without systemd
%else
@@ -31,11 +38,10 @@
%bcond_with fish_completion
%bcond_with zsh_completion
%endif
-%bcond_with test
%bcond_without docs
%bcond_with builddocs
-Name: salt
+Name: salt%{psuffix}
Version: 3006.0
Release: 0
Summary: A parallel remote execution system
@@ -343,7 +349,27 @@ Patch97: fixed-keyerror-in-logs-when-running-a-state-that-fai.patch
Patch98: improve-pip-target-override-condition-with-venv_pip_.patch
# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/65819
Patch99: allow-kwargs-for-fileserver-roots-update-bsc-1218482.patch
-
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/66048
+Patch100: discover-both-.yml-and-.yaml-playbooks-bsc-1211888.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/66130
+Patch101: fix-problematic-tests-and-allow-smooth-tests-executi.patch
+# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/628
+Patch102: make-importing-seco.range-thread-safe-bsc-1211649.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/66130
+PAtch103: fix-tests-failures-and-errors-when-detected-on-vm-ex.patch
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/66234 (modified at Patch106)
+Patch104: decode-oscap-byte-stream-to-string-bsc-1219001.patch
+### Commits to make Salt compatible with Python 3.11 (and 3.6)
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/commit/d92b5423464f93da6e3feb47d05a9acef8da75f9
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/commit/60b36489225f958772b6a2f99d8fe6ae33ee9d73
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/commit/eee0eca5e48922e5e404f812ced08ca7484bb568
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/commit/991f7cf0a7baf08a31194ce52f4ec08290db8e52
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/commit/2688c86f45eea9f8a6b916fcdf8eb94b3f5e185b
+# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/commit/a1873a1d6d50d1769ddef528d7442e38aba9de23
+# PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/commit/c7ecccb0a080ca9cca097f760ef0992ab34f82df
+Patch105: fix-salt-warnings-and-testuite-for-python-3.11-635.patch
+# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/639
+Patch106: switch-oscap-encoding-to-utf-8-639.patch
### IMPORTANT: The line below is used as a snippet marker. Do not touch it.
### SALT PATCHES LIST END
@@ -359,6 +385,8 @@ Obsoletes: python2-%{name}
Requires(pre): %{_sbindir}/groupadd
Requires(pre): %{_sbindir}/useradd
+Provides: user(salt)
+Provides: group(salt)
%if 0%{?suse_version}
Requires(pre): %fillup_prereq
@@ -422,6 +450,8 @@ malleable. Salt accomplishes this via its ability to handle larger loads of
information, and not just dozens, but hundreds or even thousands of individual
servers, handle them quickly and through a simple and manageable interface.
+%if "%{flavor}" != "testsuite"
+
%package -n python3-salt
Summary: python3 library for salt
Group: System/Management
@@ -462,7 +492,7 @@ BuildRequires: python3-packaging
# requirements/zeromq.txt
%if %{with test}
BuildRequires: python3-boto >= 2.32.1
-BuildRequires: python3-mock
+BuildRequires: %{python3-mock if %python-base < 3.8}
BuildRequires: python3-moto >= 0.3.6
BuildRequires: python3-pip
BuildRequires: python3-salt-testing >= 2015.2.16
@@ -531,6 +561,12 @@ Recommends: python3-netaddr
Recommends: python3-pyinotify
%endif
+# Required by Salt modules
+Requires: iputils
+Requires: sudo
+Requires: file
+Requires: man
+
Provides: bundled(python3-tornado) = 4.5.3
%description -n python3-salt
@@ -699,31 +735,6 @@ Requires(pre): %fillup_prereq
Salt ssh is a master running without zmq.
it enables the management of minions over a ssh connection.
-%package -n python3-salt-testsuite
-Summary: Unit and integration tests for Salt
-Requires: %{name} = %{version}-%{release}
-Requires: python3-CherryPy
-Requires: python3-Genshi
-Requires: python3-Mako
-%if !0%{?suse_version} > 1600 || 0%{?centos}
-Requires: python3-boto
-%endif
-Requires: python3-boto3
-Requires: python3-docker
-Requires: python3-mock
-Requires: python3-pygit2
-Requires: python3-pytest >= 7.0.1
-Requires: python3-pytest-httpserver
-Requires: python3-pytest-salt-factories >= 1.0.0~rc21
-Requires: python3-pytest-subtests
-Requires: python3-testinfra
-Requires: python3-yamllint
-
-Obsoletes: %{name}-tests
-
-%description -n python3-salt-testsuite
-Collection of unit, functional, and integration tests for %{name}.
-
%if %{with bash_completion}
%package bash-completion
Summary: Bash Completion for %{name}
@@ -790,6 +801,51 @@ For transactional systems, like MicroOS, Salt can operate
transparently if the executor "transactional-update" is registered in
list of active executors. This package add the configuration file.
+%endif
+
+%if "%{flavor}" == "testsuite"
+
+%package -n python3-salt-testsuite
+Summary: Unit and integration tests for Salt
+
+%if 0%{?rhel} == 8
+BuildRequires: platform-python
+%else
+BuildRequires: python3
+%endif
+BuildRequires: python3-devel
+BuildRequires: python3-setuptools
+
+Requires: salt = %{version}
+Requires: python3-CherryPy
+Requires: python3-Genshi
+Requires: python3-Mako
+%if !0%{?suse_version} > 1600 || 0%{?centos}
+Requires: python3-boto
+%endif
+Requires: python3-boto3
+Requires: python3-docker
+%if 0%{?suse_version} < 1600
+Requires: python3-mock
+%endif
+Requires: python3-pygit2
+Requires: python3-pytest >= 7.0.1
+Requires: python3-pytest-httpserver
+Requires: python3-pytest-salt-factories >= 1.0.0~rc21
+Requires: python3-pytest-subtests
+Requires: python3-testinfra
+Requires: python3-yamllint
+Requires: python3-pip
+Requires: docker
+Requires: openssh
+Requires: git
+
+Obsoletes: %{name}-tests
+
+%description -n python3-salt-testsuite
+Collection of unit, functional, and integration tests for %{name}.
+
+%endif
%prep
%setup -q -n salt-%{version}-suse
@@ -799,6 +855,8 @@ cp %{S:6} .
%autopatch -p1
%build
+%if "%{flavor}" != "testsuite"
+
# Putting /usr/bin at the front of $PATH is needed for RHEL/RES 7. Without this
# change, the RPM will require /bin/python, which is not provided by any package
# on RHEL/RES 7.
@@ -821,7 +879,11 @@ popd
cd doc && make html && rm _build/html/.buildinfo && rm _build/html/_images/proxy_minions.png && cd _build/html && chmod -R -x+X *
%endif
+%endif
+
%install
+%if "%{flavor}" != "testsuite"
+
mv _build.python3 build
python3 setup.py --salt-transport=both install --prefix=%{_prefix} --root=%{buildroot}
mv build _build.python3
@@ -869,6 +931,9 @@ install -Dd -m 0755 %{buildroot}%{_sysconfdir}/logrotate.d/
# Install salt-support profiles
install -Dpm 0644 salt/cli/support/profiles/* %{buildroot}%{python3_sitelib}/salt/cli/support/profiles
+%endif
+
+%if "%{flavor}" == "testsuite"
# Install Salt tests
install -Dd %{buildroot}%{python3_sitelib}/salt-testsuite
cp -a tests %{buildroot}%{python3_sitelib}/salt-testsuite/
@@ -876,6 +941,9 @@ cp -a tests %{buildroot}%{python3_sitelib}/salt-testsuite/
rm %{buildroot}%{python3_sitelib}/salt-testsuite/tests/runtests.py
# Copy conf files to the testsuite as they are used by the tests
cp -a conf %{buildroot}%{python3_sitelib}/salt-testsuite/
+%endif
+
+%if "%{flavor}" != "testsuite"
## Install Zypper plugins only on SUSE machines
%if 0%{?suse_version}
@@ -986,11 +1054,10 @@ install -Dpm 0640 conf/suse/standalone-formulas-configuration.conf %{buildroot}%
%fdupes %{buildroot}%{python3_sitelib}
%endif
-%check
-%if %{with test}
-python3 setup.py test --runtests-opts=-u
%endif
+%if "%{flavor}" != "testsuite"
+
%pre
S_HOME="/var/lib/salt"
S_PHOME="/srv/salt"
@@ -1464,9 +1531,6 @@ rm -f %{_localstatedir}/cache/salt/minion/thin/version
%doc doc/_build/html
%endif
-%files -n python3-salt-testsuite
-%{python3_sitelib}/salt-testsuite
-
%if %{with bash_completion}
%files bash-completion
%defattr(-,root,root)
@@ -1503,6 +1567,12 @@ rm -f %{_localstatedir}/cache/salt/minion/thin/version
%defattr(-,root,root)
%config(noreplace) %attr(0640, root, root) %{_sysconfdir}/salt/minion.d/transactional_update.conf
+%endif
+
+%if "%{flavor}" == "testsuite"
+%files -n python3-salt-testsuite
+%{python3_sitelib}/salt-testsuite
+%endif
%changelog
diff --git a/switch-oscap-encoding-to-utf-8-639.patch b/switch-oscap-encoding-to-utf-8-639.patch
new file mode 100644
index 0000000..78bc5b6
--- /dev/null
+++ b/switch-oscap-encoding-to-utf-8-639.patch
@@ -0,0 +1,80 @@
+From 4ec5c8bdb8aecac6752c639f494b86c7f8f57ba2 Mon Sep 17 00:00:00 2001
+From: Marek Czernek
+Date: Tue, 26 Mar 2024 09:20:30 +0100
+Subject: [PATCH] Switch oscap encoding to utf-8 (#639)
+
+---
+ salt/modules/openscap.py | 7 ++++---
+ tests/unit/modules/test_openscap.py | 8 ++++----
+ 2 files changed, 8 insertions(+), 7 deletions(-)
+
+diff --git a/salt/modules/openscap.py b/salt/modules/openscap.py
+index 89712ae722b..7322c667dcc 100644
+--- a/salt/modules/openscap.py
++++ b/salt/modules/openscap.py
+@@ -153,7 +153,7 @@ def xccdf_eval(xccdffile, ovalfiles=None, **kwargs):
+ tempdir = tempfile.mkdtemp()
+ proc = Popen(cmd_opts, stdout=PIPE, stderr=PIPE, cwd=tempdir)
+ (_, error) = proc.communicate()
+- error = error.decode('ascii', errors='ignore')
++ error = error.decode('utf-8', errors='surogateescape')
+ success = _OSCAP_EXIT_CODES_MAP.get(proc.returncode, False)
+ if proc.returncode < 0:
+ error += "\nKilled by signal {}\n".format(proc.returncode)
+@@ -204,10 +204,11 @@ def xccdf(params):
+ cmd = _XCCDF_MAP[action]["cmd_pattern"].format(args.profile, policy)
+ tempdir = tempfile.mkdtemp()
+ proc = Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE, cwd=tempdir)
+- (stdoutdata, error) = proc.communicate()
++ (_, error) = proc.communicate()
++ error = error.decode('utf-8', errors='surogateescape')
+ success = _OSCAP_EXIT_CODES_MAP.get(proc.returncode, False)
+ if proc.returncode < 0:
+- error += "\nKilled by signal {}\n".format(proc.returncode).encode('ascii')
++ error += "\nKilled by signal {}\n".format(proc.returncode)
+ returncode = proc.returncode
+ if success:
+ __salt__["cp.push_dir"](tempdir)
+diff --git a/tests/unit/modules/test_openscap.py b/tests/unit/modules/test_openscap.py
+index 6fbdfed7cf9..c20220ea977 100644
+--- a/tests/unit/modules/test_openscap.py
++++ b/tests/unit/modules/test_openscap.py
+@@ -35,7 +35,7 @@ class OpenscapTestCase(TestCase):
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+- **{"returncode": 0, "communicate.return_value": ("", "")}
++ **{"returncode": 0, "communicate.return_value": (bytes(0), bytes(0))}
+ )
+ ),
+ ):
+@@ -82,7 +82,7 @@ class OpenscapTestCase(TestCase):
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+- **{"returncode": 2, "communicate.return_value": ("", "some error")}
++ **{"returncode": 2, "communicate.return_value": (bytes(0), bytes("some error", "UTF-8"))}
+ )
+ ),
+ ):
+@@ -137,7 +137,7 @@ class OpenscapTestCase(TestCase):
+ "salt.modules.openscap.Popen",
+ MagicMock(
+ return_value=Mock(
+- **{"returncode": 2, "communicate.return_value": ("", "some error")}
++ **{"returncode": 2, "communicate.return_value": (bytes(0), bytes("some error", "UTF-8"))}
+ )
+ ),
+ ):
+@@ -180,7 +180,7 @@ class OpenscapTestCase(TestCase):
+ return_value=Mock(
+ **{
+ "returncode": 1,
+- "communicate.return_value": ("", "evaluation error"),
++ "communicate.return_value": (bytes(0), bytes("evaluation error", "UTF-8")),
+ }
+ )
+ ),
+--
+2.44.0
+