From ca50e204d9c50988f353c14cc1e0d4280e5b1ada7d1570a780c966c6108a3013 Mon Sep 17 00:00:00 2001 From: Dominique Leuenberger Date: Thu, 12 Jan 2023 14:44:08 +0000 Subject: [PATCH] https://bugzilla.opensuse.org/show_bug.cgi?id=1207071 OBS-URL: https://build.opensuse.org/package/show/openSUSE:Factory/salt?expand=0&rev=133 --- ...nsider-skipped-targets-as-failed-for.patch | 1440 ++++++++ ...ostgresql-json-support-in-pillar-423.patch | 1008 ++++++ _lastrevision | 2 +- _service | 6 +- ...beacons-sources-config-pillar-grains.patch | 8 +- ...etection-for-virtual-grains-bsc-1195.patch | 22 +- add-custom-suse-capabilities-as-grains.patch | 4 +- ...variable-to-know-if-yum-is-invoked-f.patch | 16 +- ...te-and-gpg-key-management-functions-.patch | 71 +- ...ble-module-functions-to-whitelist-in.patch | 40 + ..._batch-to-clearfuncs-exposed-methods.patch | 8 +- ...ython-library-for-version-comparison.patch | 402 +++ ...pport-with-venv-salt-minion-3004-493.patch | 179 +- ...eption-handling-on-minion-connection.patch | 8 +- ...onfiguration-file-for-enabling-packa.patch | 8 +- add-support-for-gpgautoimport-539.patch | 32 +- ...name-pkgs-and-diff_attr-parameters-t.patch | 47 +- adds-explicit-type-cast-for-port.patch | 32 + ...-nitro-grains-with-upstream-pr-bsc-1.patch | 124 - allow-vendor-change-option-with-zypper.patch | 50 +- async-batch-implementation.patch | 68 +- ...ve-syslogging-by-watchdog-cronjob-58.patch | 4 +- backport-syndic-auth-fixes.patch | 355 ++ ...xception-when-minion-does-not-respon.patch | 46 + ...file-directory-user-and-group-owners.patch | 22 +- ...eters-to-prevent-possible-tracebacks.patch | 8 +- ...notify-is-executable-bsc-1186674-376.patch | 23 + ...g.installed-pkg_verify-documentation.patch | 51 - ...n-info_installed-compatibility-50453.patch | 16 +- detect-module.run-syntax.patch | 28 - ...set-plugin-implementation-3002.2-450.patch | 4 +- ...n-unexpected-cmd-output-at-listing-p.patch | 79 + ...state-if-there-is-no-3rd-party-depen.patch | 8 +- ...use-shell-sbin-nologin-in-requisites.patch | 8 +- ...from-event.unpack-in-cli.batch_async.patch | 4 +- early-feature-support-config.patch | 52 +- ...-unix_socket-for-mysql-returners-bsc.patch | 4 +- ...when-inotify-beacon-is-missing-pyino.patch | 30 + ...nscap-module-add-xccdf_eval-call-386.patch | 4 +- ...zmq.error.zmqerror-to-set-hwm-for-zm.patch | 35 + fix-bsc-1065792.patch | 6 +- ...hen-calling-manage.not_alive-runners.patch | 81 + ...-yumpkg.remove-for-not-installed-pac.patch | 76 + fix-for-cve-2022-22967-bsc-1200566.patch | 75 + fix-for-suse-expanded-support-detection.patch | 8 +- ...dule-export-function-bsc-1097531-481.patch | 68 + ...e-grain-to-not-leak-secondary-ipv4-a.patch | 32 + fix-issue-2068-test.patch | 8 +- ...ssues-with-salt-ssh-s-extra-filerefs.patch | 24 + ...xtfuntion-base-on-version-bsc-119874.patch | 83 + ...ing-minion-returns-in-batch-mode-360.patch | 8 +- ...multiple-security-issues-bsc-1197417.patch | 2946 +++++++++++++++++ ...-salt-thin-directory-when-using-the-.patch | 4 +- ...ith-depending-client.ssh-on-psutil-b.patch | 10 +- ...ent.send-call-with-grains-and-pillar.patch | 70 + ...-opts-poisoning-bsc-1197637-3004-501.patch | 34 +- ...file.managed-for-follow_symlinks-tru.patch | 270 +- ...tringutils.to_str-calls-to-make-it-w.patch | 20 +- ...in-test-mode-with-file-state-module-.patch | 84 +- fix-test_ipc-unit-tests.patch | 10 +- ...-regression-for-yumnotify-plugin-456.patch | 4 +- ...on-in-schedule-module-releasded-in-3.patch | 820 +++++ ...int_exc-calls-for-test_pip_state-432.patch | 8 +- ...od_del_repo_multiline_values-test-af.patch | 82 + ...144-to-enable-hotadd-profile-support.patch | 63 + fixes-for-python-3.10-502.patch | 68 +- ...nd-bad-buffering-for-binary-mode-563.patch | 5 +- ...-to-prefer-packages.db-than-packages.patch | 29 + html.tar.bz2 | 4 +- ...reading-license-files-with-dpkg_lowp.patch | 4 +- ...declarations-from-excluded-sls-files.patch | 250 -- ...characters-while-reading-files-with-.patch | 17 +- ...f-held-unheld-functions-for-state-pk.patch | 813 +++++ ...f-suse_ip-execution-module-bsc-10999.patch | 1360 ++++++++ improvements-on-ansiblegate-module-354.patch | 556 ++++ include-aliases-in-the-fqdns-grains.patch | 56 +- ...t-in-error-message-for-zypperpkg-559.patch | 63 - ...talled-works-without-status-attr-now.patch | 12 +- ...-platform-python-binary-in-rhel8-191.patch | 10 +- ..._repos-compatible-on-enabled-disable.patch | 8 +- ...enderer-configurable-other-fixes-532.patch | 8 +- ...script-to-not-require-setuptools-9.1.patch | 8 +- ...cheloader-use-correct-fileclient-519.patch | 8 +- ...rs-in-utils-minions.py-unit-test-443.patch | 90 + ...e-names-once-with-pkg.installed-remo.patch | 22 +- ...beacon-for-debian-ubuntu-systems-347.patch | 92 + pass-the-context-to-pillar-ext-modules.patch | 276 -- ...n-of-ssh.opts-with-lazyloader-bsc-11.patch | 55 +- ...ins-errors-on-missing-cookie-path-bs.patch | 47 +- ...jection-via-pre_flight_script_args-4.patch | 172 +- ...ithout-using-interpolation-bsc-11356.patch | 8 +- ...rovements-for-transactional-updates-.patch | 1063 ++++++ ...default-behaviour-of-pkg-list-return.patch | 30 +- ...-lock-is-temporarily-unavailable-547.patch | 6 +- ...expected-powerpc-os-arch-bsc-1117995.patch | 6 +- ...use-case-when-multiple-inotify-beaco.patch | 38 +- run-salt-api-as-user-salt-bsc-1064520.patch | 6 +- run-salt-master-as-dedicated-salt-user.patch | 6 +- salt.changes | 164 - salt.spec | 217 +- save-log-to-logfile-with-docker.build.patch | 12 +- ...et-for-pip-from-venv_pip_target-envi.patch | 1798 +++++++++- ...don-t-check-for-cached-pillar-errors.patch | 6 +- ...e_single-does-not-pass-pillar-none-4.patch | 4 +- support-transactional-systems-microos.patch | 226 ++ ...ewalld-state-to-use-change_interface.patch | 4 +- ...tend-the-whitelist-of-allowed-comman.patch | 6 +- ...x-for-salt-ssh-to-process-targets-li.patch | 14 +- ...lgorithm-to-compute-string-checksums.patch | 16 +- ...rlock-to-avoid-deadlocks-in-salt-ssh.patch | 27 - use-salt-bundle-in-dockermod.patch | 10 +- v3004.tar.gz | 3 + v3005.1.tar.gz | 3 - ...et-from-env-in-cmdmod-bsc-1193357-30.patch | 84 + x509-fixes-111.patch | 28 +- ...-retcode-104-for-search-bsc-1176697-.patch | 16 +- 116 files changed, 15356 insertions(+), 1728 deletions(-) create mode 100644 3003.3-do-not-consider-skipped-targets-as-failed-for.patch create mode 100644 3003.3-postgresql-json-support-in-pillar-423.patch create mode 100644 add-missing-ansible-module-functions-to-whitelist-in.patch create mode 100644 add-rpm_vercmp-python-library-for-version-comparison.patch create mode 100644 adds-explicit-type-cast-for-port.patch delete mode 100644 align-amazon-ec2-nitro-grains-with-upstream-pr-bsc-1.patch create mode 100644 backport-syndic-auth-fixes.patch create mode 100644 batch.py-avoid-exception-when-minion-does-not-respon.patch create mode 100644 check-if-dpkgnotify-is-executable-bsc-1186674-376.patch delete mode 100644 clarify-pkg.installed-pkg_verify-documentation.patch delete mode 100644 detect-module.run-syntax.patch create mode 100644 do-not-crash-when-unexpected-cmd-output-at-listing-p.patch create mode 100644 enhance-logging-when-inotify-beacon-is-missing-pyino.patch create mode 100644 fix-62092-catch-zmq.error.zmqerror-to-set-hwm-for-zm.patch create mode 100644 fix-crash-when-calling-manage.not_alive-runners.patch create mode 100644 fix-exception-in-yumpkg.remove-for-not-installed-pac.patch create mode 100644 fix-for-cve-2022-22967-bsc-1200566.patch create mode 100644 fix-inspector-module-export-function-bsc-1097531-481.patch create mode 100644 fix-ip6_interface-grain-to-not-leak-secondary-ipv4-a.patch create mode 100644 fix-issues-with-salt-ssh-s-extra-filerefs.patch create mode 100644 fix-jinja2-contextfuntion-base-on-version-bsc-119874.patch create mode 100644 fix-multiple-security-issues-bsc-1197417.patch create mode 100644 fix-salt-call-event.send-call-with-grains-and-pillar.patch create mode 100644 fix-the-regression-in-schedule-module-releasded-in-3.patch create mode 100644 fix-wrong-test_mod_del_repo_multiline_values-test-af.patch create mode 100644 fixes-56144-to-enable-hotadd-profile-support.patch create mode 100644 force-zyppnotify-to-prefer-packages.db-than-packages.patch delete mode 100644 ignore-extend-declarations-from-excluded-sls-files.patch create mode 100644 implementation-of-held-unheld-functions-for-state-pk.patch create mode 100644 implementation-of-suse_ip-execution-module-bsc-10999.patch create mode 100644 improvements-on-ansiblegate-module-354.patch delete mode 100644 include-stdout-in-error-message-for-zypperpkg-559.patch create mode 100644 mock-ip_addrs-in-utils-minions.py-unit-test-443.patch create mode 100644 notify-beacon-for-debian-ubuntu-systems-347.patch delete mode 100644 pass-the-context-to-pillar-ext-modules.patch create mode 100644 refactor-and-improvements-for-transactional-updates-.patch create mode 100644 support-transactional-systems-microos.patch delete mode 100644 use-rlock-to-avoid-deadlocks-in-salt-ssh.patch create mode 100644 v3004.tar.gz delete mode 100644 v3005.1.tar.gz create mode 100644 wipe-notify_socket-from-env-in-cmdmod-bsc-1193357-30.patch diff --git a/3003.3-do-not-consider-skipped-targets-as-failed-for.patch b/3003.3-do-not-consider-skipped-targets-as-failed-for.patch new file mode 100644 index 0000000..fa963c0 --- /dev/null +++ b/3003.3-do-not-consider-skipped-targets-as-failed-for.patch @@ -0,0 +1,1440 @@ +From c2dbf6ccdf320f1cce3d42f746f3c65f8633ee55 Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Wed, 19 Jan 2022 17:28:29 +0100 +Subject: [PATCH] 3003.3: Do not consider skipped targets as failed for + ansible.playbooks state (bsc#1190446) (#436) + +* Do not consider skipped targets as failed for ansible.playbooks state (bsc#1190446) + +* Fix unit test after backport + +* Fix unit test + +* Fix remaining problems in unit tests after backport +--- + salt/states/ansiblegate.py | 19 +- + tests/pytests/unit/states/test_ansiblegate.py | 30 + + .../success_example_with_skipped.json | 1320 +++++++++++++++++ + 3 files changed, 1354 insertions(+), 15 deletions(-) + create mode 100644 tests/unit/files/playbooks/success_example_with_skipped.json + +diff --git a/salt/states/ansiblegate.py b/salt/states/ansiblegate.py +index af5cb0f0e5..4de7fb096d 100644 +--- a/salt/states/ansiblegate.py ++++ b/salt/states/ansiblegate.py +@@ -189,19 +189,13 @@ def playbooks(name, rundir=None, git_repo=None, git_kwargs=None, ansible_kwargs= + ret["result"] = False + ret["changes"] = {} + elif all( +- not check["changed"] +- and not check["failures"] +- and not check["unreachable"] +- and not check["skipped"] ++ not check["changed"] and not check["failures"] and not check["unreachable"] + for check in checks["stats"].values() + ): + ret["comment"] = "No changes to be made from playbook {}".format(name) + ret["result"] = True + elif any( +- check["changed"] +- and not check["failures"] +- and not check["unreachable"] +- and not check["skipped"] ++ check["changed"] and not check["failures"] and not check["unreachable"] + for check in checks["stats"].values() + ): + ret["comment"] = "Changes will be made from playbook {}".format(name) +@@ -222,10 +216,7 @@ def playbooks(name, rundir=None, git_repo=None, git_kwargs=None, ansible_kwargs= + ret["result"] = False + ret["changes"] = {} + elif all( +- not check["changed"] +- and not check["failures"] +- and not check["unreachable"] +- and not check["skipped"] ++ not check["changed"] and not check["failures"] and not check["unreachable"] + for check in results["stats"].values() + ): + ret["comment"] = "No changes to be made from playbook {}".format(name) +@@ -234,9 +225,7 @@ def playbooks(name, rundir=None, git_repo=None, git_kwargs=None, ansible_kwargs= + else: + ret["changes"] = _changes(results) + ret["result"] = all( +- not check["failures"] +- and not check["unreachable"] +- and not check["skipped"] ++ not check["failures"] and not check["unreachable"] + for check in results["stats"].values() + ) + if ret["result"]: +diff --git a/tests/pytests/unit/states/test_ansiblegate.py b/tests/pytests/unit/states/test_ansiblegate.py +index fa1a1cd264..02157d3420 100644 +--- a/tests/pytests/unit/states/test_ansiblegate.py ++++ b/tests/pytests/unit/states/test_ansiblegate.py +@@ -42,6 +42,36 @@ def test_ansible_playbooks_states_success(playbooks_examples_dir): + } + + ++def test_ansible_playbooks_states_success_with_skipped(playbooks_examples_dir): ++ """ ++ Test ansible.playbooks states executions success. ++ """ ++ ++ success_output = json.loads( ++ playbooks_examples_dir.joinpath("success_example_with_skipped.json").read_text() ++ ) ++ ++ with patch.dict( ++ ansiblegate.__salt__, ++ {"ansible.playbooks": MagicMock(return_value=success_output)}, ++ ), patch("salt.utils.path.which", return_value=True), patch.dict( ++ ansiblegate.__opts__, {"test": False} ++ ): ++ ret = ansiblegate.playbooks("foobar") ++ assert ret["result"] is True ++ assert ret["comment"] == "No changes to be made from playbook foobar" ++ assert ret["changes"] == { ++ "all": { ++ "install git CentOS": {"uyuni-stable-min-sles15sp3.tf.local": {}}, ++ "install git SUSE": {"uyuni-stable-min-centos7.tf.local": {}}, ++ "install git Ubuntu": { ++ "uyuni-stable-min-centos7.tf.local": {}, ++ "uyuni-stable-min-sles15sp3.tf.local": {}, ++ }, ++ } ++ } ++ ++ + def test_ansible_playbooks_states_failed(playbooks_examples_dir): + """ + Test ansible.playbooks failed states executions. +diff --git a/tests/unit/files/playbooks/success_example_with_skipped.json b/tests/unit/files/playbooks/success_example_with_skipped.json +new file mode 100644 +index 0000000000..21bdb290c1 +--- /dev/null ++++ b/tests/unit/files/playbooks/success_example_with_skipped.json +@@ -0,0 +1,1320 @@ ++{ ++ "custom_stats": {}, ++ "global_custom_stats": {}, ++ "plays": [ ++ { ++ "play": { ++ "duration": { ++ "end": "2021-10-01T09:36:32.534379Z", ++ "start": "2021-10-01T09:36:22.615869Z" ++ }, ++ "id": "525400d6-b9c8-afcf-67b6-000000000007", ++ "name": "all" ++ }, ++ "tasks": [ ++ { ++ "hosts": { ++ "uyuni-stable-min-centos7.tf.local": { ++ "_ansible_no_log": false, ++ "_ansible_verbose_override": true, ++ "action": "gather_facts", ++ "ansible_facts": { ++ "ansible_all_ipv4_addresses": [ ++ "192.168.122.250" ++ ], ++ "ansible_all_ipv6_addresses": [ ++ "fe80::5054:ff:fe6e:3554" ++ ], ++ "ansible_apparmor": { ++ "status": "disabled" ++ }, ++ "ansible_architecture": "x86_64", ++ "ansible_bios_date": "04/01/2014", ++ "ansible_bios_version": "rel-1.14.0-0-g155821a-rebuilt.opensuse.org", ++ "ansible_cmdline": { ++ "BOOT_IMAGE": "/boot/vmlinuz-3.10.0-1127.el7.x86_64", ++ "LANG": "en_US.UTF-8", ++ "console": "ttyS0", ++ "crashkernel": "auto", ++ "net.ifnames": "0", ++ "ro": true, ++ "root": "UUID=6cd50e51-cfc6-40b9-9ec5-f32fa2e4ff02" ++ }, ++ "ansible_date_time": { ++ "date": "2021-10-01", ++ "day": "01", ++ "epoch": "1633080984", ++ "hour": "11", ++ "iso8601": "2021-10-01T09:36:24Z", ++ "iso8601_basic": "20211001T113624352399", ++ "iso8601_basic_short": "20211001T113624", ++ "iso8601_micro": "2021-10-01T09:36:24.352399Z", ++ "minute": "36", ++ "month": "10", ++ "second": "24", ++ "time": "11:36:24", ++ "tz": "CEST", ++ "tz_offset": "+0200", ++ "weekday": "Friday", ++ "weekday_number": "5", ++ "weeknumber": "39", ++ "year": "2021" ++ }, ++ "ansible_default_ipv4": { ++ "address": "192.168.122.250", ++ "alias": "eth0", ++ "broadcast": "192.168.122.255", ++ "gateway": "192.168.122.1", ++ "interface": "eth0", ++ "macaddress": "52:54:00:6e:35:54", ++ "mtu": 1500, ++ "netmask": "255.255.255.0", ++ "network": "192.168.122.0", ++ "type": "ether" ++ }, ++ "ansible_default_ipv6": {}, ++ "ansible_device_links": { ++ "ids": { ++ "sr0": [ ++ "ata-QEMU_DVD-ROM_QM00004" ++ ] ++ }, ++ "labels": { ++ "sr0": [ ++ "cidata" ++ ] ++ }, ++ "masters": {}, ++ "uuids": { ++ "sr0": [ ++ "2021-05-28-09-32-38-69" ++ ], ++ "vda1": [ ++ "6cd50e51-cfc6-40b9-9ec5-f32fa2e4ff02" ++ ] ++ } ++ }, ++ "ansible_devices": { ++ "sr0": { ++ "holders": [], ++ "host": "", ++ "links": { ++ "ids": [ ++ "ata-QEMU_DVD-ROM_QM00004" ++ ], ++ "labels": [ ++ "cidata" ++ ], ++ "masters": [], ++ "uuids": [ ++ "2021-05-28-09-32-38-69" ++ ] ++ }, ++ "model": "QEMU DVD-ROM", ++ "partitions": {}, ++ "removable": "1", ++ "rotational": "1", ++ "sas_address": null, ++ "sas_device_handle": null, ++ "scheduler_mode": "deadline", ++ "sectors": "732", ++ "sectorsize": "2048", ++ "size": "366.00 KB", ++ "support_discard": "0", ++ "vendor": "QEMU", ++ "virtual": 1 ++ }, ++ "vda": { ++ "holders": [], ++ "host": "", ++ "links": { ++ "ids": [], ++ "labels": [], ++ "masters": [], ++ "uuids": [] ++ }, ++ "model": null, ++ "partitions": { ++ "vda1": { ++ "holders": [], ++ "links": { ++ "ids": [], ++ "labels": [], ++ "masters": [], ++ "uuids": [ ++ "6cd50e51-cfc6-40b9-9ec5-f32fa2e4ff02" ++ ] ++ }, ++ "sectors": "419428319", ++ "sectorsize": 512, ++ "size": "200.00 GB", ++ "start": "2048", ++ "uuid": "6cd50e51-cfc6-40b9-9ec5-f32fa2e4ff02" ++ } ++ }, ++ "removable": "0", ++ "rotational": "1", ++ "sas_address": null, ++ "sas_device_handle": null, ++ "scheduler_mode": "mq-deadline", ++ "sectors": "419430400", ++ "sectorsize": "512", ++ "size": "200.00 GB", ++ "support_discard": "0", ++ "vendor": "0x1af4", ++ "virtual": 1 ++ } ++ }, ++ "ansible_distribution": "CentOS", ++ "ansible_distribution_file_parsed": true, ++ "ansible_distribution_file_path": "/etc/redhat-release", ++ "ansible_distribution_file_variety": "RedHat", ++ "ansible_distribution_major_version": "7", ++ "ansible_distribution_release": "Core", ++ "ansible_distribution_version": "7.8", ++ "ansible_dns": { ++ "nameservers": [ ++ "192.168.122.1" ++ ] ++ }, ++ "ansible_domain": "tf.local", ++ "ansible_effective_group_id": 0, ++ "ansible_effective_user_id": 0, ++ "ansible_env": { ++ "HOME": "/root", ++ "LANG": "C", ++ "LC_ADDRESS": "C", ++ "LC_ALL": "C", ++ "LC_COLLATE": "C", ++ "LC_CTYPE": "C.UTF-8", ++ "LC_IDENTIFICATION": "C", ++ "LC_MEASUREMENT": "C", ++ "LC_MESSAGES": "C", ++ "LC_MONETARY": "C", ++ "LC_NAME": "C", ++ "LC_NUMERIC": "C", ++ "LC_PAPER": "C", ++ "LC_TELEPHONE": "C", ++ "LC_TIME": "C", ++ "LOGNAME": "root", ++ "LS_COLORS": "rs=0:di=38;5;27:ln=38;5;51:mh=44;38;5;15:pi=40;38;5;11:so=38;5;13:do=38;5;5:bd=48;5;232;38;5;11:cd=48;5;232;38;5;3:or=48;5;232;38;5;9:mi=05;48;5;232;38;5;15:su=48;5;196;38;5;15:sg=48;5;11;38;5;16:ca=48;5;196;38;5;226:tw=48;5;10;38;5;16:ow=48;5;10;38;5;21:st=48;5;21;38;5;15:ex=38;5;34:*.tar=38;5;9:*.tgz=38;5;9:*.arc=38;5;9:*.arj=38;5;9:*.taz=38;5;9:*.lha=38;5;9:*.lz4=38;5;9:*.lzh=38;5;9:*.lzma=38;5;9:*.tlz=38;5;9:*.txz=38;5;9:*.tzo=38;5;9:*.t7z=38;5;9:*.zip=38;5;9:*.z=38;5;9:*.Z=38;5;9:*.dz=38;5;9:*.gz=38;5;9:*.lrz=38;5;9:*.lz=38;5;9:*.lzo=38;5;9:*.xz=38;5;9:*.bz2=38;5;9:*.bz=38;5;9:*.tbz=38;5;9:*.tbz2=38;5;9:*.tz=38;5;9:*.deb=38;5;9:*.rpm=38;5;9:*.jar=38;5;9:*.war=38;5;9:*.ear=38;5;9:*.sar=38;5;9:*.rar=38;5;9:*.alz=38;5;9:*.ace=38;5;9:*.zoo=38;5;9:*.cpio=38;5;9:*.7z=38;5;9:*.rz=38;5;9:*.cab=38;5;9:*.jpg=38;5;13:*.jpeg=38;5;13:*.gif=38;5;13:*.bmp=38;5;13:*.pbm=38;5;13:*.pgm=38;5;13:*.ppm=38;5;13:*.tga=38;5;13:*.xbm=38;5;13:*.xpm=38;5;13:*.tif=38;5;13:*.tiff=38;5;13:*.png=38;5;13:*.svg=38;5;13:*.svgz=38;5;13:*.mng=38;5;13:*.pcx=38;5;13:*.mov=38;5;13:*.mpg=38;5;13:*.mpeg=38;5;13:*.m2v=38;5;13:*.mkv=38;5;13:*.webm=38;5;13:*.ogm=38;5;13:*.mp4=38;5;13:*.m4v=38;5;13:*.mp4v=38;5;13:*.vob=38;5;13:*.qt=38;5;13:*.nuv=38;5;13:*.wmv=38;5;13:*.asf=38;5;13:*.rm=38;5;13:*.rmvb=38;5;13:*.flc=38;5;13:*.avi=38;5;13:*.fli=38;5;13:*.flv=38;5;13:*.gl=38;5;13:*.dl=38;5;13:*.xcf=38;5;13:*.xwd=38;5;13:*.yuv=38;5;13:*.cgm=38;5;13:*.emf=38;5;13:*.axv=38;5;13:*.anx=38;5;13:*.ogv=38;5;13:*.ogx=38;5;13:*.aac=38;5;45:*.au=38;5;45:*.flac=38;5;45:*.mid=38;5;45:*.midi=38;5;45:*.mka=38;5;45:*.mp3=38;5;45:*.mpc=38;5;45:*.ogg=38;5;45:*.ra=38;5;45:*.wav=38;5;45:*.axa=38;5;45:*.oga=38;5;45:*.spx=38;5;45:*.xspf=38;5;45:", ++ "MAIL": "/var/mail/root", ++ "PATH": "/sbin:/bin:/usr/sbin:/usr/bin", ++ "PWD": "/root", ++ "SHELL": "/bin/bash", ++ "SHLVL": "1", ++ "SUDO_COMMAND": "/bin/sh -c echo BECOME-SUCCESS-tyqutlgpttzahspvugthhwbwqizpflyl ; /usr/bin/python /root/.ansible/tmp/ansible-tmp-1633080982.7515817-3386-201873769122806/AnsiballZ_setup.py", ++ "SUDO_GID": "0", ++ "SUDO_UID": "0", ++ "SUDO_USER": "root", ++ "TERM": "xterm-256color", ++ "USER": "root", ++ "USERNAME": "root", ++ "XDG_RUNTIME_DIR": "/run/user/0", ++ "XDG_SESSION_ID": "1", ++ "_": "/usr/bin/python" ++ }, ++ "ansible_eth0": { ++ "active": true, ++ "device": "eth0", ++ "features": { ++ "busy_poll": "off [fixed]", ++ "fcoe_mtu": "off [fixed]", ++ "generic_receive_offload": "on", ++ "generic_segmentation_offload": "on", ++ "highdma": "on [fixed]", ++ "hw_tc_offload": "off [fixed]", ++ "l2_fwd_offload": "off [fixed]", ++ "large_receive_offload": "off [fixed]", ++ "loopback": "off [fixed]", ++ "netns_local": "off [fixed]", ++ "ntuple_filters": "off [fixed]", ++ "receive_hashing": "off [fixed]", ++ "rx_all": "off [fixed]", ++ "rx_checksumming": "on [fixed]", ++ "rx_fcs": "off [fixed]", ++ "rx_gro_hw": "off [fixed]", ++ "rx_udp_tunnel_port_offload": "off [fixed]", ++ "rx_vlan_filter": "on [fixed]", ++ "rx_vlan_offload": "off [fixed]", ++ "rx_vlan_stag_filter": "off [fixed]", ++ "rx_vlan_stag_hw_parse": "off [fixed]", ++ "scatter_gather": "on", ++ "tcp_segmentation_offload": "on", ++ "tx_checksum_fcoe_crc": "off [fixed]", ++ "tx_checksum_ip_generic": "on", ++ "tx_checksum_ipv4": "off [fixed]", ++ "tx_checksum_ipv6": "off [fixed]", ++ "tx_checksum_sctp": "off [fixed]", ++ "tx_checksumming": "on", ++ "tx_fcoe_segmentation": "off [fixed]", ++ "tx_gre_csum_segmentation": "off [fixed]", ++ "tx_gre_segmentation": "off [fixed]", ++ "tx_gso_partial": "off [fixed]", ++ "tx_gso_robust": "off [fixed]", ++ "tx_ipip_segmentation": "off [fixed]", ++ "tx_lockless": "off [fixed]", ++ "tx_nocache_copy": "off", ++ "tx_scatter_gather": "on", ++ "tx_scatter_gather_fraglist": "off [fixed]", ++ "tx_sctp_segmentation": "off [fixed]", ++ "tx_sit_segmentation": "off [fixed]", ++ "tx_tcp6_segmentation": "on", ++ "tx_tcp_ecn_segmentation": "on", ++ "tx_tcp_mangleid_segmentation": "off", ++ "tx_tcp_segmentation": "on", ++ "tx_udp_tnl_csum_segmentation": "off [fixed]", ++ "tx_udp_tnl_segmentation": "off [fixed]", ++ "tx_vlan_offload": "off [fixed]", ++ "tx_vlan_stag_hw_insert": "off [fixed]", ++ "udp_fragmentation_offload": "on", ++ "vlan_challenged": "off [fixed]" ++ }, ++ "hw_timestamp_filters": [], ++ "ipv4": { ++ "address": "192.168.122.250", ++ "broadcast": "192.168.122.255", ++ "netmask": "255.255.255.0", ++ "network": "192.168.122.0" ++ }, ++ "ipv6": [ ++ { ++ "address": "fe80::5054:ff:fe6e:3554", ++ "prefix": "64", ++ "scope": "link" ++ } ++ ], ++ "macaddress": "52:54:00:6e:35:54", ++ "module": "virtio_net", ++ "mtu": 1500, ++ "pciid": "virtio0", ++ "promisc": false, ++ "timestamping": [ ++ "rx_software", ++ "software" ++ ], ++ "type": "ether" ++ }, ++ "ansible_fibre_channel_wwn": [], ++ "ansible_fips": false, ++ "ansible_form_factor": "Other", ++ "ansible_fqdn": "uyuni-stable-min-centos7.tf.local", ++ "ansible_hostname": "uyuni-stable-min-centos7", ++ "ansible_hostnqn": "", ++ "ansible_interfaces": [ ++ "lo", ++ "eth0" ++ ], ++ "ansible_is_chroot": false, ++ "ansible_iscsi_iqn": "", ++ "ansible_kernel": "3.10.0-1127.el7.x86_64", ++ "ansible_kernel_version": "#1 SMP Tue Mar 31 23:36:51 UTC 2020", ++ "ansible_lo": { ++ "active": true, ++ "device": "lo", ++ "features": { ++ "busy_poll": "off [fixed]", ++ "fcoe_mtu": "off [fixed]", ++ "generic_receive_offload": "on", ++ "generic_segmentation_offload": "on", ++ "highdma": "on [fixed]", ++ "hw_tc_offload": "off [fixed]", ++ "l2_fwd_offload": "off [fixed]", ++ "large_receive_offload": "off [fixed]", ++ "loopback": "on [fixed]", ++ "netns_local": "on [fixed]", ++ "ntuple_filters": "off [fixed]", ++ "receive_hashing": "off [fixed]", ++ "rx_all": "off [fixed]", ++ "rx_checksumming": "on [fixed]", ++ "rx_fcs": "off [fixed]", ++ "rx_gro_hw": "off [fixed]", ++ "rx_udp_tunnel_port_offload": "off [fixed]", ++ "rx_vlan_filter": "off [fixed]", ++ "rx_vlan_offload": "off [fixed]", ++ "rx_vlan_stag_filter": "off [fixed]", ++ "rx_vlan_stag_hw_parse": "off [fixed]", ++ "scatter_gather": "on", ++ "tcp_segmentation_offload": "on", ++ "tx_checksum_fcoe_crc": "off [fixed]", ++ "tx_checksum_ip_generic": "on [fixed]", ++ "tx_checksum_ipv4": "off [fixed]", ++ "tx_checksum_ipv6": "off [fixed]", ++ "tx_checksum_sctp": "on [fixed]", ++ "tx_checksumming": "on", ++ "tx_fcoe_segmentation": "off [fixed]", ++ "tx_gre_csum_segmentation": "off [fixed]", ++ "tx_gre_segmentation": "off [fixed]", ++ "tx_gso_partial": "off [fixed]", ++ "tx_gso_robust": "off [fixed]", ++ "tx_ipip_segmentation": "off [fixed]", ++ "tx_lockless": "on [fixed]", ++ "tx_nocache_copy": "off [fixed]", ++ "tx_scatter_gather": "on [fixed]", ++ "tx_scatter_gather_fraglist": "on [fixed]", ++ "tx_sctp_segmentation": "on", ++ "tx_sit_segmentation": "off [fixed]", ++ "tx_tcp6_segmentation": "on", ++ "tx_tcp_ecn_segmentation": "on", ++ "tx_tcp_mangleid_segmentation": "on", ++ "tx_tcp_segmentation": "on", ++ "tx_udp_tnl_csum_segmentation": "off [fixed]", ++ "tx_udp_tnl_segmentation": "off [fixed]", ++ "tx_vlan_offload": "off [fixed]", ++ "tx_vlan_stag_hw_insert": "off [fixed]", ++ "udp_fragmentation_offload": "on", ++ "vlan_challenged": "on [fixed]" ++ }, ++ "hw_timestamp_filters": [], ++ "ipv4": { ++ "address": "127.0.0.1", ++ "broadcast": "", ++ "netmask": "255.0.0.0", ++ "network": "127.0.0.0" ++ }, ++ "ipv6": [ ++ { ++ "address": "::1", ++ "prefix": "128", ++ "scope": "host" ++ } ++ ], ++ "mtu": 65536, ++ "promisc": false, ++ "timestamping": [ ++ "rx_software", ++ "software" ++ ], ++ "type": "loopback" ++ }, ++ "ansible_local": {}, ++ "ansible_lsb": {}, ++ "ansible_machine": "x86_64", ++ "ansible_machine_id": "2622d9aee28c3a2356a756fa60b0b96e", ++ "ansible_memfree_mb": 612, ++ "ansible_memory_mb": { ++ "nocache": { ++ "free": 789, ++ "used": 201 ++ }, ++ "real": { ++ "free": 612, ++ "total": 990, ++ "used": 378 ++ }, ++ "swap": { ++ "cached": 0, ++ "free": 0, ++ "total": 0, ++ "used": 0 ++ } ++ }, ++ "ansible_memtotal_mb": 990, ++ "ansible_mounts": [ ++ { ++ "block_available": 52122135, ++ "block_size": 4096, ++ "block_total": 52425979, ++ "block_used": 303844, ++ "device": "/dev/vda1", ++ "fstype": "xfs", ++ "inode_available": 104821638, ++ "inode_total": 104857024, ++ "inode_used": 35386, ++ "mount": "/", ++ "options": "rw,seclabel,relatime,attr2,inode64,noquota", ++ "size_available": 213492264960, ++ "size_total": 214736809984, ++ "uuid": "6cd50e51-cfc6-40b9-9ec5-f32fa2e4ff02" ++ } ++ ], ++ "ansible_nodename": "uyuni-stable-min-centos7", ++ "ansible_os_family": "RedHat", ++ "ansible_pkg_mgr": "yum", ++ "ansible_proc_cmdline": { ++ "BOOT_IMAGE": "/boot/vmlinuz-3.10.0-1127.el7.x86_64", ++ "LANG": "en_US.UTF-8", ++ "console": [ ++ "tty0", ++ "ttyS0,115200n8", ++ "ttyS0" ++ ], ++ "crashkernel": "auto", ++ "net.ifnames": "0", ++ "ro": true, ++ "root": "UUID=6cd50e51-cfc6-40b9-9ec5-f32fa2e4ff02" ++ }, ++ "ansible_processor": [ ++ "0", ++ "GenuineIntel", ++ "QEMU Virtual CPU version 2.5+" ++ ], ++ "ansible_processor_cores": 1, ++ "ansible_processor_count": 1, ++ "ansible_processor_threads_per_core": 1, ++ "ansible_processor_vcpus": 1, ++ "ansible_product_name": "Standard PC (i440FX + PIIX, 1996)", ++ "ansible_product_serial": "NA", ++ "ansible_product_uuid": "5DCE1DC7-FB62-4199-8044-993382A0D198", ++ "ansible_product_version": "pc-i440fx-6.0", ++ "ansible_python": { ++ "executable": "/usr/bin/python", ++ "has_sslcontext": true, ++ "type": "CPython", ++ "version": { ++ "major": 2, ++ "micro": 5, ++ "minor": 7, ++ "releaselevel": "final", ++ "serial": 0 ++ }, ++ "version_info": [ ++ 2, ++ 7, ++ 5, ++ "final", ++ 0 ++ ] ++ }, ++ "ansible_python_version": "2.7.5", ++ "ansible_real_group_id": 0, ++ "ansible_real_user_id": 0, ++ "ansible_selinux": { ++ "config_mode": "enforcing", ++ "mode": "enforcing", ++ "policyvers": 31, ++ "status": "enabled", ++ "type": "targeted" ++ }, ++ "ansible_selinux_python_present": true, ++ "ansible_service_mgr": "systemd", ++ "ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBBNT/qKM5GfECZrKpKSipoy+FIVJCly6GmmiMtTDnSRhU++rz7FntpL9U+AWWdhqi5OA+7y9kEcSJ5StmwTIWeQ=", ++ "ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAIFPWtQ7R5nCp1n0cKpY3S5VcEzF063uJ92ohu5OBA9Dv", ++ "ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABAQC7jW8ES08/SJJ5dAKzMqBXeU5RHNKsfRGyiE+ERhZ7hsQ7qVSUAN/r1wpyqGrG2m426h4OMbme91dIpGMMvbKSBHgyVb8HdddHBxBz8hbvml4foujyfe2AkK4S0KxXzmkAOPZDweF1QOdJh4CN/nKyw/Ec1HMruqTECw1CeyamewI/GASzyvMogNZb564zsyI4RtNw5X1ztdAMcvw2Zb/8Vs4OPbTjjrZr/urXxlwVQWvlfFPj46puSjuuhKW4lIVuNZRopwyGeI7MR6ua1k812GW25I7THl3m9iqbZVgXDhyUZI72fm1Q4Ct6uV8ZUYcwlayL1MmPE0AQpXRG6Ls7", ++ "ansible_swapfree_mb": 0, ++ "ansible_swaptotal_mb": 0, ++ "ansible_system": "Linux", ++ "ansible_system_capabilities": [ ++ "cap_chown", ++ "cap_dac_override", ++ "cap_dac_read_search", ++ "cap_fowner", ++ "cap_fsetid", ++ "cap_kill", ++ "cap_setgid", ++ "cap_setuid", ++ "cap_setpcap", ++ "cap_linux_immutable", ++ "cap_net_bind_service", ++ "cap_net_broadcast", ++ "cap_net_admin", ++ "cap_net_raw", ++ "cap_ipc_lock", ++ "cap_ipc_owner", ++ "cap_sys_module", ++ "cap_sys_rawio", ++ "cap_sys_chroot", ++ "cap_sys_ptrace", ++ "cap_sys_pacct", ++ "cap_sys_admin", ++ "cap_sys_boot", ++ "cap_sys_nice", ++ "cap_sys_resource", ++ "cap_sys_time", ++ "cap_sys_tty_config", ++ "cap_mknod", ++ "cap_lease", ++ "cap_audit_write", ++ "cap_audit_control", ++ "cap_setfcap", ++ "cap_mac_override", ++ "cap_mac_admin", ++ "cap_syslog", ++ "35", ++ "36+ep" ++ ], ++ "ansible_system_capabilities_enforced": "True", ++ "ansible_system_vendor": "QEMU", ++ "ansible_uptime_seconds": 212, ++ "ansible_user_dir": "/root", ++ "ansible_user_gecos": "root", ++ "ansible_user_gid": 0, ++ "ansible_user_id": "root", ++ "ansible_user_shell": "/bin/bash", ++ "ansible_user_uid": 0, ++ "ansible_userspace_architecture": "x86_64", ++ "ansible_userspace_bits": "64", ++ "ansible_virtualization_role": "guest", ++ "ansible_virtualization_type": "kvm", ++ "discovered_interpreter_python": "/usr/bin/python", ++ "gather_subset": [ ++ "all" ++ ], ++ "module_setup": true ++ }, ++ "changed": false, ++ "deprecations": [], ++ "warnings": [] ++ }, ++ "uyuni-stable-min-sles15sp3.tf.local": { ++ "_ansible_no_log": false, ++ "_ansible_verbose_override": true, ++ "action": "gather_facts", ++ "ansible_facts": { ++ "ansible_all_ipv4_addresses": [ ++ "192.168.122.210" ++ ], ++ "ansible_all_ipv6_addresses": [ ++ "fe80::5054:ff:fed6:b9c8" ++ ], ++ "ansible_apparmor": { ++ "status": "enabled" ++ }, ++ "ansible_architecture": "x86_64", ++ "ansible_bios_date": "04/01/2014", ++ "ansible_bios_version": "rel-1.14.0-0-g155821a-rebuilt.opensuse.org", ++ "ansible_cmdline": { ++ "BOOT_IMAGE": "/boot/vmlinuz-5.3.18-59.24-default", ++ "console": "tty0", ++ "net.ifnames": "0", ++ "plymouth.enable": "0", ++ "quiet": true, ++ "root": "UUID=2c17af10-fc7c-4768-ab24-e6700f2dc588" ++ }, ++ "ansible_date_time": { ++ "date": "2021-10-01", ++ "day": "01", ++ "epoch": "1633080985", ++ "hour": "11", ++ "iso8601": "2021-10-01T09:36:25Z", ++ "iso8601_basic": "20211001T113625125429", ++ "iso8601_basic_short": "20211001T113625", ++ "iso8601_micro": "2021-10-01T09:36:25.125429Z", ++ "minute": "36", ++ "month": "10", ++ "second": "25", ++ "time": "11:36:25", ++ "tz": "CEST", ++ "tz_offset": "+0200", ++ "weekday": "Friday", ++ "weekday_number": "5", ++ "weeknumber": "39", ++ "year": "2021" ++ }, ++ "ansible_default_ipv4": { ++ "address": "192.168.122.210", ++ "alias": "eth0", ++ "broadcast": "192.168.122.255", ++ "gateway": "192.168.122.1", ++ "interface": "eth0", ++ "macaddress": "52:54:00:d6:b9:c8", ++ "mtu": 1500, ++ "netmask": "255.255.255.0", ++ "network": "192.168.122.0", ++ "type": "ether" ++ }, ++ "ansible_default_ipv6": {}, ++ "ansible_device_links": { ++ "ids": { ++ "sr0": [ ++ "ata-QEMU_DVD-ROM_QM00004", ++ "scsi-0QEMU_QEMU_DVD-ROM_QM00004", ++ "scsi-1ATA_QEMU_DVD-ROM_QM00004" ++ ] ++ }, ++ "labels": { ++ "sr0": [ ++ "cidata" ++ ], ++ "vda2": [ ++ "EFI" ++ ], ++ "vda3": [ ++ "ROOT" ++ ] ++ }, ++ "masters": {}, ++ "uuids": { ++ "sr0": [ ++ "2021-09-21-09-21-51-42" ++ ], ++ "vda2": [ ++ "DB16-E900" ++ ], ++ "vda3": [ ++ "2c17af10-fc7c-4768-ab24-e6700f2dc588" ++ ] ++ } ++ }, ++ "ansible_devices": { ++ "sr0": { ++ "holders": [], ++ "host": "IDE interface: Intel Corporation 82371SB PIIX3 IDE [Natoma/Triton II]", ++ "links": { ++ "ids": [ ++ "ata-QEMU_DVD-ROM_QM00004", ++ "scsi-0QEMU_QEMU_DVD-ROM_QM00004", ++ "scsi-1ATA_QEMU_DVD-ROM_QM00004" ++ ], ++ "labels": [ ++ "cidata" ++ ], ++ "masters": [], ++ "uuids": [ ++ "2021-09-21-09-21-51-42" ++ ] ++ }, ++ "model": "QEMU DVD-ROM", ++ "partitions": {}, ++ "removable": "1", ++ "rotational": "1", ++ "sas_address": null, ++ "sas_device_handle": null, ++ "scheduler_mode": "bfq", ++ "sectors": "732", ++ "sectorsize": "2048", ++ "size": "366.00 KB", ++ "support_discard": "0", ++ "vendor": "QEMU", ++ "virtual": 1 ++ }, ++ "vda": { ++ "holders": [], ++ "host": "SCSI storage controller: Red Hat, Inc. Virtio block device", ++ "links": { ++ "ids": [], ++ "labels": [], ++ "masters": [], ++ "uuids": [] ++ }, ++ "model": null, ++ "partitions": { ++ "vda1": { ++ "holders": [], ++ "links": { ++ "ids": [], ++ "labels": [], ++ "masters": [], ++ "uuids": [] ++ }, ++ "sectors": "4096", ++ "sectorsize": 512, ++ "size": "2.00 MB", ++ "start": "2048", ++ "uuid": null ++ }, ++ "vda2": { ++ "holders": [], ++ "links": { ++ "ids": [], ++ "labels": [ ++ "EFI" ++ ], ++ "masters": [], ++ "uuids": [ ++ "DB16-E900" ++ ] ++ }, ++ "sectors": "67584", ++ "sectorsize": 512, ++ "size": "33.00 MB", ++ "start": "6144", ++ "uuid": "DB16-E900" ++ }, ++ "vda3": { ++ "holders": [], ++ "links": { ++ "ids": [], ++ "labels": [ ++ "ROOT" ++ ], ++ "masters": [], ++ "uuids": [ ++ "2c17af10-fc7c-4768-ab24-e6700f2dc588" ++ ] ++ }, ++ "sectors": "419356639", ++ "sectorsize": 512, ++ "size": "199.96 GB", ++ "start": "73728", ++ "uuid": "2c17af10-fc7c-4768-ab24-e6700f2dc588" ++ } ++ }, ++ "removable": "0", ++ "rotational": "1", ++ "sas_address": null, ++ "sas_device_handle": null, ++ "scheduler_mode": "bfq", ++ "sectors": "419430400", ++ "sectorsize": "512", ++ "size": "200.00 GB", ++ "support_discard": "512", ++ "vendor": "0x1af4", ++ "virtual": 1 ++ } ++ }, ++ "ansible_distribution": "SLES", ++ "ansible_distribution_file_parsed": true, ++ "ansible_distribution_file_path": "/etc/os-release", ++ "ansible_distribution_file_variety": "SUSE", ++ "ansible_distribution_major_version": "15", ++ "ansible_distribution_release": "3", ++ "ansible_distribution_version": "15.3", ++ "ansible_dns": { ++ "nameservers": [ ++ "192.168.122.1" ++ ] ++ }, ++ "ansible_domain": "tf.local", ++ "ansible_effective_group_id": 0, ++ "ansible_effective_user_id": 0, ++ "ansible_env": { ++ "COLORTERM": "1", ++ "HOME": "/root", ++ "LANG": "es_ES.utf8", ++ "LC_ADDRESS": "C", ++ "LC_COLLATE": "C", ++ "LC_CTYPE": "C.UTF-8", ++ "LC_IDENTIFICATION": "C", ++ "LC_MEASUREMENT": "C", ++ "LC_MESSAGES": "C", ++ "LC_MONETARY": "C", ++ "LC_NAME": "C", ++ "LC_NUMERIC": "C", ++ "LC_PAPER": "C", ++ "LC_TELEPHONE": "C", ++ "LC_TIME": "C", ++ "LOGNAME": "root", ++ "MAIL": "/var/mail/root", ++ "PATH": "/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/bin:/usr/local/sbin", ++ "PWD": "/root", ++ "SHELL": "/bin/bash", ++ "SHLVL": "1", ++ "SUDO_COMMAND": "/bin/sh -c echo BECOME-SUCCESS-qedtxrnnbofvbuxhnhrhhiiglqankyuw ; /usr/bin/python3.6 /root/.ansible/tmp/ansible-tmp-1633080982.8063648-3385-23664123284127/AnsiballZ_setup.py", ++ "SUDO_GID": "0", ++ "SUDO_UID": "0", ++ "SUDO_USER": "root", ++ "TERM": "xterm-256color", ++ "USER": "root", ++ "_": "/usr/bin/python3.6" ++ }, ++ "ansible_eth0": { ++ "active": true, ++ "device": "eth0", ++ "features": { ++ "esp_hw_offload": "off [fixed]", ++ "esp_tx_csum_hw_offload": "off [fixed]", ++ "fcoe_mtu": "off [fixed]", ++ "generic_receive_offload": "on", ++ "generic_segmentation_offload": "on", ++ "highdma": "on [fixed]", ++ "hw_tc_offload": "off [fixed]", ++ "l2_fwd_offload": "off [fixed]", ++ "large_receive_offload": "on", ++ "loopback": "off [fixed]", ++ "macsec_hw_offload": "off [fixed]", ++ "netns_local": "off [fixed]", ++ "ntuple_filters": "off [fixed]", ++ "receive_hashing": "off [fixed]", ++ "rx_all": "off [fixed]", ++ "rx_checksumming": "on [fixed]", ++ "rx_fcs": "off [fixed]", ++ "rx_gro_hw": "off [fixed]", ++ "rx_gro_list": "off [fixed]", ++ "rx_udp_tunnel_port_offload": "off [fixed]", ++ "rx_vlan_filter": "on [fixed]", ++ "rx_vlan_offload": "off [fixed]", ++ "rx_vlan_stag_filter": "off [fixed]", ++ "rx_vlan_stag_hw_parse": "off [fixed]", ++ "scatter_gather": "on", ++ "tcp_segmentation_offload": "on", ++ "tls_hw_record": "off [fixed]", ++ "tls_hw_rx_offload": "off [fixed]", ++ "tls_hw_tx_offload": "off [fixed]", ++ "tx_checksum_fcoe_crc": "off [fixed]", ++ "tx_checksum_ip_generic": "on", ++ "tx_checksum_ipv4": "off [fixed]", ++ "tx_checksum_ipv6": "off [fixed]", ++ "tx_checksum_sctp": "off [fixed]", ++ "tx_checksumming": "on", ++ "tx_esp_segmentation": "off [fixed]", ++ "tx_fcoe_segmentation": "off [fixed]", ++ "tx_gre_csum_segmentation": "off [fixed]", ++ "tx_gre_segmentation": "off [fixed]", ++ "tx_gso_list": "off [fixed]", ++ "tx_gso_partial": "off [fixed]", ++ "tx_gso_robust": "on [fixed]", ++ "tx_ipxip4_segmentation": "off [fixed]", ++ "tx_ipxip6_segmentation": "off [fixed]", ++ "tx_lockless": "off [fixed]", ++ "tx_nocache_copy": "off", ++ "tx_scatter_gather": "on", ++ "tx_scatter_gather_fraglist": "off [fixed]", ++ "tx_sctp_segmentation": "off [fixed]", ++ "tx_tcp6_segmentation": "on", ++ "tx_tcp_ecn_segmentation": "on", ++ "tx_tcp_mangleid_segmentation": "off", ++ "tx_tcp_segmentation": "on", ++ "tx_tunnel_remcsum_segmentation": "off [fixed]", ++ "tx_udp_segmentation": "off [fixed]", ++ "tx_udp_tnl_csum_segmentation": "off [fixed]", ++ "tx_udp_tnl_segmentation": "off [fixed]", ++ "tx_vlan_offload": "off [fixed]", ++ "tx_vlan_stag_hw_insert": "off [fixed]", ++ "vlan_challenged": "off [fixed]" ++ }, ++ "hw_timestamp_filters": [], ++ "ipv4": { ++ "address": "192.168.122.210", ++ "broadcast": "192.168.122.255", ++ "netmask": "255.255.255.0", ++ "network": "192.168.122.0" ++ }, ++ "ipv6": [ ++ { ++ "address": "fe80::5054:ff:fed6:b9c8", ++ "prefix": "64", ++ "scope": "link" ++ } ++ ], ++ "macaddress": "52:54:00:d6:b9:c8", ++ "module": "virtio_net", ++ "mtu": 1500, ++ "pciid": "virtio0", ++ "promisc": false, ++ "speed": -1, ++ "timestamping": [], ++ "type": "ether" ++ }, ++ "ansible_fibre_channel_wwn": [], ++ "ansible_fips": false, ++ "ansible_form_factor": "Other", ++ "ansible_fqdn": "uyuni-stable-min-sles15sp3.tf.local", ++ "ansible_hostname": "uyuni-stable-min-sles15sp3", ++ "ansible_hostnqn": "", ++ "ansible_interfaces": [ ++ "eth0", ++ "lo" ++ ], ++ "ansible_is_chroot": false, ++ "ansible_iscsi_iqn": "iqn.1996-04.de.suse:01:e4116885f7c", ++ "ansible_kernel": "5.3.18-59.24-default", ++ "ansible_kernel_version": "#1 SMP Mon Sep 13 15:06:42 UTC 2021 (2f872ea)", ++ "ansible_lo": { ++ "active": true, ++ "device": "lo", ++ "features": { ++ "esp_hw_offload": "off [fixed]", ++ "esp_tx_csum_hw_offload": "off [fixed]", ++ "fcoe_mtu": "off [fixed]", ++ "generic_receive_offload": "on", ++ "generic_segmentation_offload": "on", ++ "highdma": "on [fixed]", ++ "hw_tc_offload": "off [fixed]", ++ "l2_fwd_offload": "off [fixed]", ++ "large_receive_offload": "off [fixed]", ++ "loopback": "on [fixed]", ++ "macsec_hw_offload": "off [fixed]", ++ "netns_local": "on [fixed]", ++ "ntuple_filters": "off [fixed]", ++ "receive_hashing": "off [fixed]", ++ "rx_all": "off [fixed]", ++ "rx_checksumming": "on [fixed]", ++ "rx_fcs": "off [fixed]", ++ "rx_gro_hw": "off [fixed]", ++ "rx_gro_list": "off [fixed]", ++ "rx_udp_tunnel_port_offload": "off [fixed]", ++ "rx_vlan_filter": "off [fixed]", ++ "rx_vlan_offload": "off [fixed]", ++ "rx_vlan_stag_filter": "off [fixed]", ++ "rx_vlan_stag_hw_parse": "off [fixed]", ++ "scatter_gather": "on", ++ "tcp_segmentation_offload": "on", ++ "tls_hw_record": "off [fixed]", ++ "tls_hw_rx_offload": "off [fixed]", ++ "tls_hw_tx_offload": "off [fixed]", ++ "tx_checksum_fcoe_crc": "off [fixed]", ++ "tx_checksum_ip_generic": "on [fixed]", ++ "tx_checksum_ipv4": "off [fixed]", ++ "tx_checksum_ipv6": "off [fixed]", ++ "tx_checksum_sctp": "on [fixed]", ++ "tx_checksumming": "on", ++ "tx_esp_segmentation": "off [fixed]", ++ "tx_fcoe_segmentation": "off [fixed]", ++ "tx_gre_csum_segmentation": "off [fixed]", ++ "tx_gre_segmentation": "off [fixed]", ++ "tx_gso_list": "off [fixed]", ++ "tx_gso_partial": "off [fixed]", ++ "tx_gso_robust": "off [fixed]", ++ "tx_ipxip4_segmentation": "off [fixed]", ++ "tx_ipxip6_segmentation": "off [fixed]", ++ "tx_lockless": "on [fixed]", ++ "tx_nocache_copy": "off [fixed]", ++ "tx_scatter_gather": "on [fixed]", ++ "tx_scatter_gather_fraglist": "on [fixed]", ++ "tx_sctp_segmentation": "on", ++ "tx_tcp6_segmentation": "on", ++ "tx_tcp_ecn_segmentation": "on", ++ "tx_tcp_mangleid_segmentation": "on", ++ "tx_tcp_segmentation": "on", ++ "tx_tunnel_remcsum_segmentation": "off [fixed]", ++ "tx_udp_segmentation": "off [fixed]", ++ "tx_udp_tnl_csum_segmentation": "off [fixed]", ++ "tx_udp_tnl_segmentation": "off [fixed]", ++ "tx_vlan_offload": "off [fixed]", ++ "tx_vlan_stag_hw_insert": "off [fixed]", ++ "vlan_challenged": "on [fixed]" ++ }, ++ "hw_timestamp_filters": [], ++ "ipv4": { ++ "address": "127.0.0.1", ++ "broadcast": "", ++ "netmask": "255.0.0.0", ++ "network": "127.0.0.0" ++ }, ++ "ipv6": [ ++ { ++ "address": "::1", ++ "prefix": "128", ++ "scope": "host" ++ } ++ ], ++ "mtu": 65536, ++ "promisc": false, ++ "timestamping": [], ++ "type": "loopback" ++ }, ++ "ansible_local": {}, ++ "ansible_lsb": {}, ++ "ansible_lvm": { ++ "lvs": {}, ++ "pvs": {}, ++ "vgs": {} ++ }, ++ "ansible_machine": "x86_64", ++ "ansible_machine_id": "4ea4c287c4d5498878452138614996c4", ++ "ansible_memfree_mb": 3351, ++ "ansible_memory_mb": { ++ "nocache": { ++ "free": 3540, ++ "used": 386 ++ }, ++ "real": { ++ "free": 3351, ++ "total": 3926, ++ "used": 575 ++ }, ++ "swap": { ++ "cached": 0, ++ "free": 0, ++ "total": 0, ++ "used": 0 ++ } ++ }, ++ "ansible_memtotal_mb": 3926, ++ "ansible_mounts": [ ++ { ++ "block_available": 50812991, ++ "block_size": 4096, ++ "block_total": 52418724, ++ "block_used": 1605733, ++ "device": "/dev/vda3", ++ "fstype": "xfs", ++ "inode_available": 104636905, ++ "inode_total": 104839104, ++ "inode_used": 202199, ++ "mount": "/", ++ "options": "rw,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota", ++ "size_available": 208130011136, ++ "size_total": 214707093504, ++ "uuid": "2c17af10-fc7c-4768-ab24-e6700f2dc588" ++ }, ++ { ++ "block_available": 14997, ++ "block_size": 2048, ++ "block_total": 16853, ++ "block_used": 1856, ++ "device": "/dev/vda2", ++ "fstype": "vfat", ++ "inode_available": 0, ++ "inode_total": 0, ++ "inode_used": 0, ++ "mount": "/boot/efi", ++ "options": "rw,relatime,fmask=0022,dmask=0022,codepage=437,iocharset=iso8859-1,shortname=mixed,errors=remount-ro", ++ "size_available": 30713856, ++ "size_total": 34514944, ++ "uuid": "DB16-E900" ++ } ++ ], ++ "ansible_nodename": "uyuni-stable-min-sles15sp3", ++ "ansible_os_family": "Suse", ++ "ansible_pkg_mgr": "zypper", ++ "ansible_proc_cmdline": { ++ "BOOT_IMAGE": "/boot/vmlinuz-5.3.18-59.24-default", ++ "console": [ ++ "ttyS0,115200", ++ "tty0" ++ ], ++ "net.ifnames": "0", ++ "plymouth.enable": "0", ++ "quiet": true, ++ "root": "UUID=2c17af10-fc7c-4768-ab24-e6700f2dc588" ++ }, ++ "ansible_processor": [ ++ "0", ++ "GenuineIntel", ++ "QEMU Virtual CPU version 2.5+" ++ ], ++ "ansible_processor_cores": 1, ++ "ansible_processor_count": 1, ++ "ansible_processor_threads_per_core": 1, ++ "ansible_processor_vcpus": 1, ++ "ansible_product_name": "Standard PC (i440FX + PIIX, 1996)", ++ "ansible_product_serial": "NA", ++ "ansible_product_uuid": "10721e79-de64-4ad6-8cca-63f00905e33a", ++ "ansible_product_version": "pc-i440fx-6.1", ++ "ansible_python": { ++ "executable": "/usr/bin/python3.6", ++ "has_sslcontext": true, ++ "type": "cpython", ++ "version": { ++ "major": 3, ++ "micro": 13, ++ "minor": 6, ++ "releaselevel": "final", ++ "serial": 0 ++ }, ++ "version_info": [ ++ 3, ++ 6, ++ 13, ++ "final", ++ 0 ++ ] ++ }, ++ "ansible_python_version": "3.6.13", ++ "ansible_real_group_id": 0, ++ "ansible_real_user_id": 0, ++ "ansible_selinux": { ++ "status": "disabled" ++ }, ++ "ansible_selinux_python_present": true, ++ "ansible_service_mgr": "systemd", ++ "ansible_ssh_host_key_dsa_public": "AAAAB3NzaC1kc3MAAACBAIDAuJ+oAZ+pvXNJaagO0odTNCZFCd13m6fM8AI/TaOV60jaANBh9Vn92Oj2MwG1lKuOXCNy7auMEMflt581H3eTL75gMjvwJzu+DULxM/unvI+6Pcqx4BH1LWFo6AtQn/orh3iaSy1e2i7ql6JJ+RIe2K467x0IFHEccW9AWye3AAAAFQDB0qTYcD2yjAo0d3jl1EQBTjo36QAAAIBxINNBX41qeQxKiZSID3gSeMJ1qLANJOy6P7mOzM5JkwUyjTmJXDMnU5uOflt9BZILypsqfF41VyXkCFVIwpChhJEStjkdsr4oU1NfYO/PTZDyiGKb/1uam6vUDkejjMH0k4IALsuOf6SUQ+nDJEauqKr0IcFKojK58ozL/V3ulwAAAIAY74j1hPcE7Xu2XANiaE4+426UkMmOl3EWZJK6rDARLhTS4PHcYWcHEBqvsMDGYCzI/QijXC/0tCfnbkVgWcwEFEBdaBNAy2fNfBzmmgkUpuYdwwjXI41Gq80aWiC1zZ8E8VjIjTVHqBoPyIeF6gg2/jKy8adFmK+sD7OvW+EDaQ==", ++ "ansible_ssh_host_key_ecdsa_public": "AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBGmKiPoVBzZKB8NLyIfucHWV6iczW1/Rc+80SCqg3kUpPGpvI6YFJAjdAeNwzR9Y561guqtuk6MUCQ5FmsvYorc=", ++ "ansible_ssh_host_key_ed25519_public": "AAAAC3NzaC1lZDI1NTE5AAAAIPjJby3xFaEPRohlxkZ++xmB25m9DRZ7Unl3kVMemmzc", ++ "ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABgQCpwVJh+FiVo5RbOWc9dYGVXL/wqiCjq2xqgFpERv7239qTFPZod3w2F37PY+FBK7m0+7PCRWCcnqw94GBICMb+QVJDspvBTN78kzvwSzvRg2K0CX0Qqj+9rpwh0FuD8E57i6R3l+AhAdQEBnUXUU4bj5ZzlHyoRXBxswyq7ret6Q8us6RGX+dlrQ9W+DaXC6r3lptQeOFlNyATyaFEI5wPWQU86hkoia2sCPxN5T/y+HcJDz4S45nEIm0IVpR2ZH0/mdqaOqF2tg7BuUHfhMRBefgQDp6vur1NORVxUYavHP3P0Bw//JNlzuRqb/NV5feM52WTqPujftS+25kTCGUm7X5jZV/PSfqrv5THnXVWy8pO/YAUnyc5ad08Gh0R/LvNPKxmbjYvSDcZ23uCa31nZ7oTwF1nlfGuGLJR5+PhEurtJrrgLL4m9rttHuf++AM8lOZncvwz6BD4e/YmorGcaCk4p7C6Ink6EBrYS36ekXcVkHDE0k5k09xSXHKBR4U=", ++ "ansible_swapfree_mb": 0, ++ "ansible_swaptotal_mb": 0, ++ "ansible_system": "Linux", ++ "ansible_system_vendor": "QEMU", ++ "ansible_uptime_seconds": 277, ++ "ansible_user_dir": "/root", ++ "ansible_user_gecos": "root", ++ "ansible_user_gid": 0, ++ "ansible_user_id": "root", ++ "ansible_user_shell": "/bin/bash", ++ "ansible_user_uid": 0, ++ "ansible_userspace_architecture": "x86_64", ++ "ansible_userspace_bits": "64", ++ "ansible_virtualization_role": "guest", ++ "ansible_virtualization_type": "kvm", ++ "discovered_interpreter_python": "/usr/bin/python3.6", ++ "gather_subset": [ ++ "all" ++ ], ++ "module_setup": true ++ }, ++ "changed": false, ++ "deprecations": [], ++ "warnings": [ ++ "Platform linux on host uyuni-stable-min-sles15sp3.tf.local is using the discovered Python interpreter at /usr/bin/python3.6, but future installation of another Python interpreter could change this. See https://docs.ansible.com/ansible/2.9/reference_appendices/interpreter_discovery.html for more information." ++ ] ++ } ++ }, ++ "task": { ++ "duration": { ++ "end": "2021-10-01T09:36:25.300423Z", ++ "start": "2021-10-01T09:36:22.632928Z" ++ }, ++ "id": "525400d6-b9c8-afcf-67b6-00000000000f", ++ "name": "Gathering Facts" ++ } ++ }, ++ { ++ "hosts": { ++ "uyuni-stable-min-centos7.tf.local": { ++ "_ansible_no_log": false, ++ "action": "yum", ++ "changed": false, ++ "changes": { ++ "installed": [], ++ "updated": [] ++ }, ++ "invocation": { ++ "module_args": { ++ "allow_downgrade": false, ++ "autoremove": false, ++ "bugfix": false, ++ "conf_file": null, ++ "disable_excludes": null, ++ "disable_gpg_check": false, ++ "disable_plugin": [], ++ "disablerepo": [], ++ "download_dir": null, ++ "download_only": false, ++ "enable_plugin": [], ++ "enablerepo": [], ++ "exclude": [], ++ "install_repoquery": true, ++ "install_weak_deps": true, ++ "installroot": "/", ++ "list": null, ++ "lock_timeout": 30, ++ "name": [ ++ "git" ++ ], ++ "releasever": null, ++ "security": false, ++ "skip_broken": false, ++ "state": "latest", ++ "update_cache": false, ++ "update_only": false, ++ "use_backend": "auto", ++ "validate_certs": true ++ } ++ }, ++ "msg": "", ++ "rc": 0, ++ "results": [ ++ "All packages providing git are up to date", ++ "" ++ ] ++ }, ++ "uyuni-stable-min-sles15sp3.tf.local": { ++ "_ansible_no_log": false, ++ "action": "yum", ++ "changed": false, ++ "skip_reason": "Conditional result was False", ++ "skipped": true ++ } ++ }, ++ "task": { ++ "duration": { ++ "end": "2021-10-01T09:36:28.556648Z", ++ "start": "2021-10-01T09:36:25.321171Z" ++ }, ++ "id": "525400d6-b9c8-afcf-67b6-000000000009", ++ "name": "install git CentOS" ++ } ++ }, ++ { ++ "hosts": { ++ "uyuni-stable-min-centos7.tf.local": { ++ "_ansible_no_log": false, ++ "action": "zypper", ++ "changed": false, ++ "skip_reason": "Conditional result was False", ++ "skipped": true ++ }, ++ "uyuni-stable-min-sles15sp3.tf.local": { ++ "_ansible_no_log": false, ++ "action": "zypper", ++ "changed": false, ++ "cmd": [ ++ "/usr/bin/zypper", ++ "--quiet", ++ "--non-interactive", ++ "--xmlout", ++ "install", ++ "--type", ++ "package", ++ "--auto-agree-with-licenses", ++ "--no-recommends", ++ "--", ++ "+git-core" ++ ], ++ "invocation": { ++ "module_args": { ++ "disable_gpg_check": false, ++ "disable_recommends": true, ++ "extra_args": null, ++ "extra_args_precommand": null, ++ "force": false, ++ "name": [ ++ "git-core" ++ ], ++ "oldpackage": false, ++ "state": "latest", ++ "type": "package", ++ "update_cache": false ++ } ++ }, ++ "name": [ ++ "git-core" ++ ], ++ "rc": 0, ++ "state": "latest", ++ "update_cache": false ++ } ++ }, ++ "task": { ++ "duration": { ++ "end": "2021-10-01T09:36:32.254044Z", ++ "start": "2021-10-01T09:36:28.572234Z" ++ }, ++ "id": "525400d6-b9c8-afcf-67b6-00000000000a", ++ "name": "install git SUSE" ++ } ++ }, ++ { ++ "hosts": { ++ "uyuni-stable-min-centos7.tf.local": { ++ "_ansible_no_log": false, ++ "action": "apt", ++ "changed": false, ++ "skip_reason": "Conditional result was False", ++ "skipped": true ++ }, ++ "uyuni-stable-min-sles15sp3.tf.local": { ++ "_ansible_no_log": false, ++ "action": "apt", ++ "changed": false, ++ "skip_reason": "Conditional result was False", ++ "skipped": true ++ } ++ }, ++ "task": { ++ "duration": { ++ "end": "2021-10-01T09:36:32.534379Z", ++ "start": "2021-10-01T09:36:32.273798Z" ++ }, ++ "id": "525400d6-b9c8-afcf-67b6-00000000000b", ++ "name": "install git Ubuntu" ++ } ++ } ++ ] ++ } ++ ], ++ "stats": { ++ "uyuni-stable-min-centos7.tf.local": { ++ "changed": 0, ++ "failures": 0, ++ "ignored": 0, ++ "ok": 2, ++ "rescued": 0, ++ "skipped": 2, ++ "unreachable": 0 ++ }, ++ "uyuni-stable-min-sles15sp3.tf.local": { ++ "changed": 0, ++ "failures": 0, ++ "ignored": 0, ++ "ok": 2, ++ "rescued": 0, ++ "skipped": 2, ++ "unreachable": 0 ++ } ++ }, ++ "retcode": 0 ++} +-- +2.34.1 + + diff --git a/3003.3-postgresql-json-support-in-pillar-423.patch b/3003.3-postgresql-json-support-in-pillar-423.patch new file mode 100644 index 0000000..ca9d794 --- /dev/null +++ b/3003.3-postgresql-json-support-in-pillar-423.patch @@ -0,0 +1,1008 @@ +From 5c2624552e1ac2dbec3b54ff8c147ae50494969e Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Wed, 19 Jan 2022 17:25:37 +0100 +Subject: [PATCH] 3003.3 - postgresql JSON support in pillar (#423) + +* Allow single field returns from SQL pillar + +Several SQL databases support native JSON storage. When storing +pillars in this way, SQL query result already returns dict and +without the need to have key column. + +* Add and adapt tests for as_json sql mode + +* Add missing entries to rest of sql pillar tests + +* Add changelog entry + +* Fix the sql_base pillar merge for as_json + +Use salt.utils.update() to recursively merge the JSON dicts of the +returned SQL queries. + +Co-authored-by: Ondrej Holecek +--- + changelog/60905.added | 1 + + salt/pillar/sql_base.py | 38 +++++++++++++++ + tests/pytests/unit/pillar/test_sql_base.py | 43 ++++++++++++++++ + tests/unit/pillar/test_mysql.py | 57 ++++++++++++++++++++++ + tests/unit/pillar/test_sqlcipher.py | 32 ++++++++++++ + tests/unit/pillar/test_sqlite3.py | 32 ++++++++++++ + 6 files changed, 203 insertions(+) + create mode 100644 changelog/60905.added + create mode 100644 tests/pytests/unit/pillar/test_sql_base.py + +diff --git a/changelog/60905.added b/changelog/60905.added +new file mode 100644 +index 0000000000..3fe39286a8 +--- /dev/null ++++ b/changelog/60905.added +@@ -0,0 +1 @@ ++Support querying for JSON data in SQL external pillar +diff --git a/salt/pillar/sql_base.py b/salt/pillar/sql_base.py +index f7d87105af..8020d5503b 100644 +--- a/salt/pillar/sql_base.py ++++ b/salt/pillar/sql_base.py +@@ -136,6 +136,33 @@ These columns define list grouping + The range for with_lists is 1 to number_of_fields, inclusive. + Numbers outside this range are ignored. + ++If you specify `as_json: True` in the mapping expression and query only for ++single value, returned data are considered in JSON format and will be merged ++directly. ++ ++.. code-block:: yaml ++ ++ ext_pillar: ++ - sql_base: ++ - query: "SELECT json_pillar FROM pillars WHERE minion_id = %s" ++ as_json: True ++ ++The processed JSON entries are recursively merged in a single dictionary. ++Additionnaly if `as_list` is set to `True` the lists will be merged in case of collision. ++ ++For instance the following rows: ++ ++ {"a": {"b": [1, 2]}, "c": 3} ++ {"a": {"b": [1, 3]}, "d": 4} ++ ++will result in the following pillar with `as_list=False` ++ ++ {"a": {"b": [1, 3], "c": 3, "d": 4} ++ ++and in with `as_list=True` ++ ++ {"a": {"b": [1, 2, 3], "c": 3, "d": 4} ++ + Finally, if you pass the queries in via a mapping, the key will be the + first level name where as passing them in as a list will place them in the + root. This isolates the query results into their own subtrees. +@@ -171,6 +198,7 @@ More complete example for MySQL (to also show configuration) + import abc # Added in python2.6 so always available + import logging + ++from salt.utils.dictupdate import update + from salt.utils.odict import OrderedDict + + # Please don't strip redundant parentheses from this file. +@@ -200,6 +228,7 @@ class SqlBaseExtPillar(metaclass=abc.ABCMeta): + num_fields = 0 + depth = 0 + as_list = False ++ as_json = False + with_lists = None + ignore_null = False + +@@ -259,6 +288,7 @@ class SqlBaseExtPillar(metaclass=abc.ABCMeta): + "query": "", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + } +@@ -314,6 +344,13 @@ class SqlBaseExtPillar(metaclass=abc.ABCMeta): + for ret in rows: + # crd is the Current Return Data level, to make this non-recursive. + crd = self.focus ++ ++ # We have just one field without any key, assume returned row is already a dict ++ # aka JSON storage ++ if self.as_json and self.num_fields == 1: ++ crd = update(crd, ret[0], merge_lists=self.as_list) ++ continue ++ + # Walk and create dicts above the final layer + for i in range(0, self.depth - 1): + # At the end we'll use listify to find values to make a list of +@@ -433,6 +470,7 @@ class SqlBaseExtPillar(metaclass=abc.ABCMeta): + ) + self.enter_root(root) + self.as_list = details["as_list"] ++ self.as_json = details["as_json"] + if details["with_lists"]: + self.with_lists = details["with_lists"] + else: +diff --git a/tests/pytests/unit/pillar/test_sql_base.py b/tests/pytests/unit/pillar/test_sql_base.py +new file mode 100644 +index 0000000000..0d44c2d608 +--- /dev/null ++++ b/tests/pytests/unit/pillar/test_sql_base.py +@@ -0,0 +1,43 @@ ++import pytest ++import salt.pillar.sql_base as sql_base ++from tests.support.mock import MagicMock ++ ++ ++class FakeExtPillar(sql_base.SqlBaseExtPillar): ++ """ ++ Mock SqlBaseExtPillar implementation for testing purpose ++ """ ++ ++ @classmethod ++ def _db_name(cls): ++ return "fake" ++ ++ def _get_cursor(self): ++ return MagicMock() ++ ++ ++@pytest.mark.parametrize("as_list", [True, False]) ++def test_process_results_as_json(as_list): ++ """ ++ Validates merging of dict values returned from JSON datatype. ++ """ ++ return_data = FakeExtPillar() ++ return_data.as_list = as_list ++ return_data.as_json = True ++ return_data.with_lists = None ++ return_data.enter_root(None) ++ return_data.process_fields(["json_data"], 0) ++ test_dicts = [ ++ ({"a": [1]},), ++ ({"b": [2, 3]},), ++ ({"a": [4]},), ++ ({"c": {"d": [4, 5], "e": 6}},), ++ ({"f": [{"g": 7, "h": "test"}], "c": {"g": 8}},), ++ ] ++ return_data.process_results(test_dicts) ++ assert return_data.result == { ++ "a": [1, 4] if as_list else [4], ++ "b": [2, 3], ++ "c": {"d": [4, 5], "e": 6, "g": 8}, ++ "f": [{"g": 7, "h": "test"}], ++ } +diff --git a/tests/unit/pillar/test_mysql.py b/tests/unit/pillar/test_mysql.py +index ddfb67d230..de6212b2c8 100644 +--- a/tests/unit/pillar/test_mysql.py ++++ b/tests/unit/pillar/test_mysql.py +@@ -18,6 +18,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -39,6 +40,7 @@ class MysqlPillarTestCase(TestCase): + {"query": "SELECT blah7", "as_list": True}, + {"query": "SELECT blah8", "with_lists": "1"}, + {"query": "SELECT blah9", "with_lists": "1,2"}, ++ {"query": "SELECT json1", "as_json": True}, + ], + {}, + ) +@@ -51,6 +53,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -61,6 +64,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -71,6 +75,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -81,6 +86,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah4", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -91,6 +97,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah5", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -101,6 +108,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah6", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -111,6 +119,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah7", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -121,6 +130,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah8", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": [1], + "ignore_null": False, + }, +@@ -131,10 +141,22 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah9", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": [1, 2], + "ignore_null": False, + }, + ], ++ [ ++ None, ++ { ++ "query": "SELECT json1", ++ "depth": 0, ++ "as_list": False, ++ "as_json": True, ++ "with_lists": None, ++ "ignore_null": False, ++ }, ++ ], + ], + qbuffer, + ) +@@ -151,6 +173,7 @@ class MysqlPillarTestCase(TestCase): + "5": {"query": "SELECT blah5"}, + "6": {"query": "SELECT blah6", "depth": 2}, + "7": {"query": "SELECT blah7", "as_list": True}, ++ "8": {"query": "SELECT json1", "as_json": True}, + }, + ) + qbuffer = return_data.extract_queries(args, kwargs) +@@ -162,6 +185,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -172,6 +196,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -182,6 +207,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -192,6 +218,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah4", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -202,6 +229,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah5", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -212,6 +240,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah6", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -222,6 +251,18 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah7", + "depth": 0, + "as_list": True, ++ "as_json": False, ++ "with_lists": None, ++ "ignore_null": False, ++ }, ++ ], ++ [ ++ "8", ++ { ++ "query": "SELECT json1", ++ "depth": 0, ++ "as_list": False, ++ "as_json": True, + "with_lists": None, + "ignore_null": False, + }, +@@ -253,6 +294,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah1", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -263,6 +305,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -273,6 +316,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -283,6 +327,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah1", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -293,6 +338,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -303,6 +349,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -341,6 +388,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -351,6 +399,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -361,6 +410,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -371,6 +421,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah4", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -381,6 +432,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah5", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -391,6 +443,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah6", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -401,6 +454,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah7", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -411,6 +465,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah8", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -432,6 +487,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -442,6 +498,7 @@ class MysqlPillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +diff --git a/tests/unit/pillar/test_sqlcipher.py b/tests/unit/pillar/test_sqlcipher.py +index 99edcb094c..1330c3bbfc 100644 +--- a/tests/unit/pillar/test_sqlcipher.py ++++ b/tests/unit/pillar/test_sqlcipher.py +@@ -30,6 +30,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -40,6 +41,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -50,6 +52,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -60,6 +63,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah4", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -70,6 +74,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah5", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -80,6 +85,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah6", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -90,6 +96,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah7", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -100,6 +107,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah8", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": [1], + "ignore_null": False, + }, +@@ -110,6 +118,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah9", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": [1, 2], + "ignore_null": False, + }, +@@ -141,6 +150,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -151,6 +161,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -161,6 +172,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -171,6 +183,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah4", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -181,6 +194,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah5", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -191,6 +205,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah6", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -201,6 +216,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah7", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -232,6 +248,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah1", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -242,6 +259,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -252,6 +270,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -262,6 +281,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah1", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -272,6 +292,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -282,6 +303,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -320,6 +342,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -330,6 +353,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -340,6 +364,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -350,6 +375,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah4", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -360,6 +386,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah5", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -370,6 +397,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah6", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -380,6 +408,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah7", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -390,6 +419,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah8", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -411,6 +441,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -421,6 +452,7 @@ class SQLCipherPillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +diff --git a/tests/unit/pillar/test_sqlite3.py b/tests/unit/pillar/test_sqlite3.py +index 1d0b187729..fee651db32 100644 +--- a/tests/unit/pillar/test_sqlite3.py ++++ b/tests/unit/pillar/test_sqlite3.py +@@ -30,6 +30,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -40,6 +41,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -50,6 +52,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -60,6 +63,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah4", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -70,6 +74,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah5", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -80,6 +85,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah6", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -90,6 +96,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah7", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -100,6 +107,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah8", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": [1], + "ignore_null": False, + }, +@@ -110,6 +118,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah9", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": [1, 2], + "ignore_null": False, + }, +@@ -141,6 +150,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -151,6 +161,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -161,6 +172,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -171,6 +183,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah4", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -181,6 +194,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah5", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -191,6 +205,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah6", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -201,6 +216,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah7", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -232,6 +248,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah1", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -242,6 +259,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -252,6 +270,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -262,6 +281,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah1", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -272,6 +292,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -282,6 +303,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -320,6 +342,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -330,6 +353,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -340,6 +364,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah3", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -350,6 +375,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah4", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -360,6 +386,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah5", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -370,6 +397,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah6", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -380,6 +408,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah7", + "depth": 2, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -390,6 +419,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah8", + "depth": 0, + "as_list": True, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -411,6 +441,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +@@ -421,6 +452,7 @@ class SQLite3PillarTestCase(TestCase): + "query": "SELECT blah2", + "depth": 0, + "as_list": False, ++ "as_json": False, + "with_lists": None, + "ignore_null": False, + }, +-- +2.34.1 + + diff --git a/_lastrevision b/_lastrevision index 19342cc..499e5b6 100644 --- a/_lastrevision +++ b/_lastrevision @@ -1 +1 @@ -cc4854c45cb99305dbd59ffe392c981c49178ae2 \ No newline at end of file +e04acec89d982e3bd465742afffe6ae5ec82620b \ No newline at end of file diff --git a/_service b/_service index 28c0df2..a708e01 100644 --- a/_service +++ b/_service @@ -3,7 +3,7 @@ https://github.com/openSUSE/salt-packaging.git salt package - release/3005.1 + release/3004 git @@ -12,8 +12,8 @@ codeload.github.com - openSUSE/salt/tar.gz/v3005.1-suse - v3005.1.tar.gz + openSUSE/salt/tar.gz/v3004-suse + v3004.tar.gz diff --git a/activate-all-beacons-sources-config-pillar-grains.patch b/activate-all-beacons-sources-config-pillar-grains.patch index e5d34b6..9667639 100644 --- a/activate-all-beacons-sources-config-pillar-grains.patch +++ b/activate-all-beacons-sources-config-pillar-grains.patch @@ -1,4 +1,4 @@ -From ae426ff5df3ade9ce16672fb20399634a8b777d5 Mon Sep 17 00:00:00 2001 +From c44b897eb1305c6b9c341fc16f729d2293ab24e4 Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Tue, 17 Oct 2017 16:52:33 +0200 Subject: [PATCH] Activate all beacons sources: config/pillar/grains @@ -8,10 +8,10 @@ Subject: [PATCH] Activate all beacons sources: config/pillar/grains 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/salt/minion.py b/salt/minion.py -index 544805fc62..16452facf4 100644 +index c255f37c26..4da665a130 100644 --- a/salt/minion.py +++ b/salt/minion.py -@@ -503,9 +503,7 @@ class MinionBase: +@@ -508,9 +508,7 @@ class MinionBase: the pillar or grains changed """ if "config.merge" in functions: @@ -23,6 +23,6 @@ index 544805fc62..16452facf4 100644 return self.beacons.process( b_conf, self.opts["grains"] -- -2.37.3 +2.29.2 diff --git a/add-amazon-ec2-detection-for-virtual-grains-bsc-1195.patch b/add-amazon-ec2-detection-for-virtual-grains-bsc-1195.patch index b599881..a9c7a3d 100644 --- a/add-amazon-ec2-detection-for-virtual-grains-bsc-1195.patch +++ b/add-amazon-ec2-detection-for-virtual-grains-bsc-1195.patch @@ -1,4 +1,4 @@ -From 1434a128559df8183c032af722dc3d187bda148a Mon Sep 17 00:00:00 2001 +From 77e90c4925a4268c5975cf1ce0bb0e4c457618c1 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Thu, 1 Sep 2022 14:46:24 +0300 Subject: [PATCH] Add Amazon EC2 detection for virtual grains @@ -29,10 +29,10 @@ index 0000000000..5f402d61c2 @@ -0,0 +1 @@ +Implementation of Amazon EC2 instance detection and setting `virtual_subtype` grain accordingly including the product if possible to identify. diff --git a/salt/grains/core.py b/salt/grains/core.py -index 23d8b8ea42..047c33ffd3 100644 +index c5d996d1bb..9530a43fc5 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py -@@ -1171,6 +1171,24 @@ def _virtual(osdata): +@@ -1173,6 +1173,24 @@ def _virtual(osdata): if grains.get("virtual_subtype") and grains["virtual"] == "physical": grains["virtual"] = "virtual" @@ -58,10 +58,10 @@ index 23d8b8ea42..047c33ffd3 100644 log.info( "Although '%s' was found in path, the current user " diff --git a/salt/modules/cmdmod.py b/salt/modules/cmdmod.py -index a26220718a..07b6e100d2 100644 +index 61b328b13b..cd42e2cda0 100644 --- a/salt/modules/cmdmod.py +++ b/salt/modules/cmdmod.py -@@ -932,6 +932,7 @@ def _run_quiet( +@@ -907,6 +907,7 @@ def _run_quiet( success_retcodes=None, success_stdout=None, success_stderr=None, @@ -69,7 +69,7 @@ index a26220718a..07b6e100d2 100644 ): """ Helper for running commands quietly for minion startup -@@ -958,6 +959,7 @@ def _run_quiet( +@@ -933,6 +934,7 @@ def _run_quiet( success_retcodes=success_retcodes, success_stdout=success_stdout, success_stderr=success_stderr, @@ -77,7 +77,7 @@ index a26220718a..07b6e100d2 100644 )["stdout"] -@@ -980,6 +982,7 @@ def _run_all_quiet( +@@ -955,6 +957,7 @@ def _run_all_quiet( success_retcodes=None, success_stdout=None, success_stderr=None, @@ -85,7 +85,7 @@ index a26220718a..07b6e100d2 100644 ): """ -@@ -1012,6 +1015,7 @@ def _run_all_quiet( +@@ -987,6 +990,7 @@ def _run_all_quiet( success_retcodes=success_retcodes, success_stdout=success_stdout, success_stderr=success_stderr, @@ -94,10 +94,10 @@ index a26220718a..07b6e100d2 100644 diff --git a/tests/pytests/unit/grains/test_core.py b/tests/pytests/unit/grains/test_core.py -index 5c43dbdb09..7c4ea1f17f 100644 +index bc3947fa1b..84dd97d62f 100644 --- a/tests/pytests/unit/grains/test_core.py +++ b/tests/pytests/unit/grains/test_core.py -@@ -2823,3 +2823,120 @@ def test_get_server_id(): +@@ -2720,3 +2720,120 @@ def test_get_server_id(): with patch.dict(core.__opts__, {"id": "otherid"}): assert core.get_server_id() != expected @@ -219,6 +219,6 @@ index 5c43dbdb09..7c4ea1f17f 100644 + assert virtual_grains["virtual"] == "kvm" + assert "virtual_subtype" not in virtual_grains -- -2.37.3 +2.37.2 diff --git a/add-custom-suse-capabilities-as-grains.patch b/add-custom-suse-capabilities-as-grains.patch index 418b5aa..39e1b58 100644 --- a/add-custom-suse-capabilities-as-grains.patch +++ b/add-custom-suse-capabilities-as-grains.patch @@ -1,4 +1,4 @@ -From cb31b475c2ac02e06b167f30fc36fe49f7f5d4f6 Mon Sep 17 00:00:00 2001 +From 1c20e6e1acf21d301d6e53432afaa7cc42db2380 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 18 Jan 2022 12:59:43 +0100 Subject: [PATCH] Add custom SUSE capabilities as Grains @@ -25,6 +25,6 @@ index 300052f1ee..f2504dbf19 100644 + '__suse_reserved_saltutil_states_support': True + } -- -2.37.3 +2.34.1 diff --git a/add-environment-variable-to-know-if-yum-is-invoked-f.patch b/add-environment-variable-to-know-if-yum-is-invoked-f.patch index da79922..76f7d79 100644 --- a/add-environment-variable-to-know-if-yum-is-invoked-f.patch +++ b/add-environment-variable-to-know-if-yum-is-invoked-f.patch @@ -1,4 +1,4 @@ -From 0cbc4e8f8ed5c8366ed6864216d70d58f5ae0a82 Mon Sep 17 00:00:00 2001 +From 6ba30d3900bc328efd3480c0ff3d9e9b126fc5cb Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 18 Jan 2022 12:57:21 +0100 Subject: [PATCH] Add environment variable to know if yum is invoked from @@ -9,7 +9,7 @@ Subject: [PATCH] Add environment variable to know if yum is invoked from 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py -index 3f855d255f..08dccafceb 100644 +index cf684e20f7..8d089c6aa4 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py @@ -965,7 +965,9 @@ def list_repo_pkgs(*args, **kwargs): @@ -23,7 +23,7 @@ index 3f855d255f..08dccafceb 100644 .splitlines()[0] .strip() ) -@@ -2433,7 +2435,9 @@ def list_holds(pattern=__HOLD_PATTERN, full=True): +@@ -2422,7 +2424,9 @@ def list_holds(pattern=__HOLD_PATTERN, full=True): """ _check_versionlock() @@ -34,7 +34,7 @@ index 3f855d255f..08dccafceb 100644 ret = [] for line in salt.utils.itertools.split(out, "\n"): match = _get_hold(line, pattern=pattern, full=full) -@@ -2501,7 +2505,10 @@ def group_list(): +@@ -2490,7 +2494,10 @@ def group_list(): } out = __salt__["cmd.run_stdout"]( @@ -46,7 +46,7 @@ index 3f855d255f..08dccafceb 100644 ) key = None for line in salt.utils.itertools.split(out, "\n"): -@@ -2572,7 +2579,9 @@ def group_info(name, expand=False, ignore_groups=None): +@@ -2561,7 +2568,9 @@ def group_info(name, expand=False, ignore_groups=None): ret[pkgtype] = set() cmd = [_yum(), "--quiet", "groupinfo", name] @@ -57,7 +57,7 @@ index 3f855d255f..08dccafceb 100644 g_info = {} for line in salt.utils.itertools.split(out, "\n"): -@@ -3301,7 +3310,9 @@ def download(*packages, **kwargs): +@@ -3278,7 +3287,9 @@ def download(*packages, **kwargs): cmd = ["yumdownloader", "-q", "--destdir={}".format(CACHE_DIR)] cmd.extend(packages) @@ -68,7 +68,7 @@ index 3f855d255f..08dccafceb 100644 ret = {} for dld_result in os.listdir(CACHE_DIR): if not dld_result.endswith(".rpm"): -@@ -3377,7 +3388,7 @@ def _get_patches(installed_only=False): +@@ -3354,7 +3365,7 @@ def _get_patches(installed_only=False): patches = {} cmd = [_yum(), "--quiet", "updateinfo", "list", "all"] @@ -78,6 +78,6 @@ index 3f855d255f..08dccafceb 100644 for line in salt.utils.itertools.split(ret, os.linesep): -- -2.37.3 +2.34.1 diff --git a/add-migrated-state-and-gpg-key-management-functions-.patch b/add-migrated-state-and-gpg-key-management-functions-.patch index 10ea5e3..e1552d7 100644 --- a/add-migrated-state-and-gpg-key-management-functions-.patch +++ b/add-migrated-state-and-gpg-key-management-functions-.patch @@ -1,4 +1,4 @@ -From 97753443c2d782b61fd51457d13309405e8f12d8 Mon Sep 17 00:00:00 2001 +From 82ceb569ea57fc14ff3e2fa1c3f7ef5b95bb5eb0 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 18 Jan 2022 18:40:40 +0100 Subject: [PATCH] Add "migrated" state and GPG key management functions @@ -20,7 +20,7 @@ same virtual package, based on the counterpart from rpm_lowpkg API. Convert test to pytests --- - salt/modules/aptpkg.py | 4 +- + salt/modules/aptpkg.py | 7 +- salt/modules/rpm_lowpkg.py | 151 +++++++ salt/modules/yumpkg.py | 88 ++++ salt/modules/zypperpkg.py | 88 ++++ @@ -28,37 +28,40 @@ Convert test to pytests tests/pytests/unit/modules/test_yumpkg.py | 44 +- tests/pytests/unit/modules/test_zypperpkg.py | 45 +- tests/pytests/unit/states/test_pkgrepo.py | 448 +++++++++++++++++++ - 8 files changed, 1071 insertions(+), 4 deletions(-) + 8 files changed, 1073 insertions(+), 5 deletions(-) diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py -index 544143d286..21ccc2a73c 100644 +index 0d378355ab..558033c931 100644 --- a/salt/modules/aptpkg.py +++ b/salt/modules/aptpkg.py -@@ -2211,7 +2211,7 @@ def _parse_repo_keys_output(cmd_ret): +@@ -2036,7 +2036,7 @@ def _convert_if_int(value): + return value + + +-def get_repo_keys(): ++def get_repo_keys(**kwargs): + """ + .. versionadded:: 2017.7.0 + +@@ -2118,7 +2118,9 @@ def get_repo_keys(): return ret --def get_repo_keys(aptkey=True, keydir=None): -+def get_repo_keys(aptkey=True, keydir=None, **kwargs): +-def add_repo_key(path=None, text=None, keyserver=None, keyid=None, saltenv="base"): ++def add_repo_key( ++ path=None, text=None, keyserver=None, keyid=None, saltenv="base", **kwargs ++): """ .. versionadded:: 2017.7.0 -@@ -2319,6 +2319,7 @@ def add_repo_key( - aptkey=True, - keydir=None, - keyfile=None, -+ **kwargs - ): +@@ -2144,7 +2146,6 @@ def add_repo_key(path=None, text=None, keyserver=None, keyid=None, saltenv="base + salt '*' pkg.add_repo_key keyserver='keyserver.example' keyid='0000AAAA' """ - .. versionadded:: 2017.7.0 -@@ -2372,7 +2373,6 @@ def add_repo_key( - if not salt.utils.path.which("apt-key"): - aptkey = False cmd = ["apt-key"] - kwargs = {} - # If the keyid is provided or determined, check it against the existing - # repo key ids to determine whether it needs to be imported. + current_repo_keys = get_repo_keys() + diff --git a/salt/modules/rpm_lowpkg.py b/salt/modules/rpm_lowpkg.py index d65a46a703..c8e984c021 100644 --- a/salt/modules/rpm_lowpkg.py @@ -219,10 +222,10 @@ index d65a46a703..c8e984c021 100644 + cmd.extend(["-e", key]) + return __salt__["cmd.retcode"](cmd) == 0 diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py -index 08dccafceb..46f0b1f613 100644 +index 8d089c6aa4..9737508377 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py -@@ -3494,3 +3494,91 @@ def services_need_restart(**kwargs): +@@ -3471,3 +3471,91 @@ def services_need_restart(**kwargs): services.add(service) return list(services) @@ -315,10 +318,10 @@ index 08dccafceb..46f0b1f613 100644 + """ + return __salt__["lowpkg.remove_gpg_key"](keyid, root) diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py -index c2452d6dec..4e3006a8cd 100644 +index 43c4e91109..4fc045c313 100644 --- a/salt/modules/zypperpkg.py +++ b/salt/modules/zypperpkg.py -@@ -3142,3 +3142,91 @@ def services_need_restart(root=None, **kwargs): +@@ -3134,3 +3134,91 @@ def services_need_restart(root=None, **kwargs): services = zypper_output.split() return services @@ -411,10 +414,10 @@ index c2452d6dec..4e3006a8cd 100644 + """ + return __salt__["lowpkg.remove_gpg_key"](keyid, root) diff --git a/salt/states/pkgrepo.py b/salt/states/pkgrepo.py -index 358c927695..a777fe4a96 100644 +index f395dec1ed..00d3cbfcd8 100644 --- a/salt/states/pkgrepo.py +++ b/salt/states/pkgrepo.py -@@ -118,6 +118,7 @@ Using ``aptkey: False`` with ``keyserver`` and ``keyid``: +@@ -85,6 +85,7 @@ package managers are APT, DNF, YUM and Zypper. Here is some example SLS: """ @@ -422,7 +425,7 @@ index 358c927695..a777fe4a96 100644 import sys import salt.utils.data -@@ -714,3 +715,209 @@ def absent(name, **kwargs): +@@ -672,3 +673,209 @@ def absent(name, **kwargs): ret["comment"] = "Failed to remove repo {}".format(name) return ret @@ -633,7 +636,7 @@ index 358c927695..a777fe4a96 100644 + + return ret diff --git a/tests/pytests/unit/modules/test_yumpkg.py b/tests/pytests/unit/modules/test_yumpkg.py -index 99ec05b990..a495569c5d 100644 +index ea8135bcef..475e1d6094 100644 --- a/tests/pytests/unit/modules/test_yumpkg.py +++ b/tests/pytests/unit/modules/test_yumpkg.py @@ -7,7 +7,7 @@ import salt.modules.rpm_lowpkg as rpm @@ -645,7 +648,7 @@ index 99ec05b990..a495569c5d 100644 try: import pytest -@@ -1911,6 +1911,48 @@ def test_get_repo_with_non_existent_repo(list_repos_var): +@@ -1849,6 +1849,48 @@ def test_get_repo_with_non_existent_repo(list_repos_var): assert ret == expected, ret @@ -695,7 +698,7 @@ index 99ec05b990..a495569c5d 100644 """ Tests that the proper CLI options are added when obsoletes=False diff --git a/tests/pytests/unit/modules/test_zypperpkg.py b/tests/pytests/unit/modules/test_zypperpkg.py -index 70bd837c5f..351a173b81 100644 +index 4a0055e11c..eb1e63f6d7 100644 --- a/tests/pytests/unit/modules/test_zypperpkg.py +++ b/tests/pytests/unit/modules/test_zypperpkg.py @@ -8,7 +8,8 @@ import os @@ -708,10 +711,10 @@ index 70bd837c5f..351a173b81 100644 @pytest.fixture -@@ -211,3 +212,45 @@ def test_pkg_list_holds(): - ret = zypper.list_holds() - assert len(ret) == 1 - assert "bar-2:2.3.4-2.1.*" in ret +@@ -78,3 +79,45 @@ def test_normalize_name(): + assert result == "foo", result + result = zypper.normalize_name("foo.noarch") + assert result == "foo", result + + +def test_get_repo_keys(): @@ -1211,6 +1214,6 @@ index daa913bcc2..cbb12cfb9b 100644 + "comment": "There are keys or repositories to migrate or drop", + } -- -2.37.3 +2.34.1 diff --git a/add-missing-ansible-module-functions-to-whitelist-in.patch b/add-missing-ansible-module-functions-to-whitelist-in.patch new file mode 100644 index 0000000..2ff3939 --- /dev/null +++ b/add-missing-ansible-module-functions-to-whitelist-in.patch @@ -0,0 +1,40 @@ +From aec7965f19f55d3d33893833fd259606d3a7e641 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Tue, 8 Feb 2022 11:53:47 +0000 +Subject: [PATCH] Add missing "ansible" module functions to whitelist in + Salt 3004 (bsc#1195625) (#485) + +* Add missing functions to ansible __load__ + +* Properly get separated copies from list +--- + salt/modules/ansiblegate.py | 11 ++++++++++- + 1 file changed, 10 insertions(+), 1 deletion(-) + +diff --git a/salt/modules/ansiblegate.py b/salt/modules/ansiblegate.py +index f33be6a00e..7c3a17861a 100644 +--- a/salt/modules/ansiblegate.py ++++ b/salt/modules/ansiblegate.py +@@ -45,7 +45,16 @@ hosts: + """ + DEFAULT_TIMEOUT = 1200 # seconds (20 minutes) + +-__load__ = __non_ansible_functions__ = ["help", "list_", "call", "playbooks"][:] ++__non_ansible_functions__ = [] ++ ++__load__ = __non_ansible_functions__[:] = [ ++ "help", ++ "list_", ++ "call", ++ "playbooks", ++ "discover_playbooks", ++ "targets", ++] + + + def _set_callables(modules): +-- +2.35.1 + + diff --git a/add-publish_batch-to-clearfuncs-exposed-methods.patch b/add-publish_batch-to-clearfuncs-exposed-methods.patch index c4e22cc..a572ef1 100644 --- a/add-publish_batch-to-clearfuncs-exposed-methods.patch +++ b/add-publish_batch-to-clearfuncs-exposed-methods.patch @@ -1,4 +1,4 @@ -From e6f6b011849536c5ff9a9bdef56e900ed5a7fb1d Mon Sep 17 00:00:00 2001 +From 2422d30358bcd0f96e399e623136f7984d136b38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Thu, 28 May 2020 09:37:08 +0100 @@ -9,10 +9,10 @@ Subject: [PATCH] Add publish_batch to ClearFuncs exposed methods 1 file changed, 1 insertion(+) diff --git a/salt/master.py b/salt/master.py -index 705a1bc2fb..7f41ffe77b 100644 +index ab85c7f5c6..59bb19ce75 100644 --- a/salt/master.py +++ b/salt/master.py -@@ -1952,6 +1952,7 @@ class ClearFuncs(TransportMethods): +@@ -2042,6 +2042,7 @@ class ClearFuncs(TransportMethods): expose_methods = ( "ping", "publish", @@ -21,6 +21,6 @@ index 705a1bc2fb..7f41ffe77b 100644 "mk_token", "wheel", -- -2.37.3 +2.29.2 diff --git a/add-rpm_vercmp-python-library-for-version-comparison.patch b/add-rpm_vercmp-python-library-for-version-comparison.patch new file mode 100644 index 0000000..89c9ead --- /dev/null +++ b/add-rpm_vercmp-python-library-for-version-comparison.patch @@ -0,0 +1,402 @@ +From a15321796586b033d8fa8366074087ceddaa4d23 Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Wed, 19 Jan 2022 17:41:11 +0100 +Subject: [PATCH] Add rpm_vercmp python library for version comparison - + 3003.3 (#448) + +* Add rpm_vercmp python library for version comparison + +* Add rpm-vercmp to tiamat builds + +* Put GPG tests back to test_rpm_lowpkg + +Co-authored-by: Megan Wilhite +--- + changelog/60814.added | 1 + + tests/pytests/unit/modules/test_rpm_lowpkg.py | 326 ++++++++++++++---- + 2 files changed, 263 insertions(+), 64 deletions(-) + create mode 100644 changelog/60814.added + +diff --git a/changelog/60814.added b/changelog/60814.added +new file mode 100644 +index 0000000000..7a9ffe1b25 +--- /dev/null ++++ b/changelog/60814.added +@@ -0,0 +1 @@ ++Add the python rpm-vercmp library in the rpm_lowpkg.py module. +diff --git a/tests/pytests/unit/modules/test_rpm_lowpkg.py b/tests/pytests/unit/modules/test_rpm_lowpkg.py +index f19afa854e..c9d1ac2b1c 100644 +--- a/tests/pytests/unit/modules/test_rpm_lowpkg.py ++++ b/tests/pytests/unit/modules/test_rpm_lowpkg.py +@@ -3,6 +3,7 @@ + """ + + ++import datetime + import pytest + import salt.modules.cmdmod + import salt.modules.rpm_lowpkg as rpm +@@ -250,92 +251,57 @@ def test_version_cmp_rpm_all_libraries(rpm_lib): + assert 1 == rpm.version_cmp("3:2.9.1-8.el7.4", "3:2.9.1-7.el7.4") + + +-def test_version_cmp_rpm(): ++@patch("salt.modules.rpm_lowpkg.HAS_RPM", True) ++@patch("salt.modules.rpm_lowpkg.rpm.labelCompare", return_value=-1) ++@patch("salt.modules.rpm_lowpkg.log") ++def test_version_cmp_rpm(mock_log, mock_labelCompare): + """ + Test package version if RPM-Python is installed + + :return: + """ +- mock_label = MagicMock(return_value=-1) +- mock_log = MagicMock() +- patch_label = patch("salt.modules.rpm_lowpkg.rpm.labelCompare", mock_label) +- patch_log = patch("salt.modules.rpm_lowpkg.log", mock_log) +- patch_rpm = patch("salt.modules.rpm_lowpkg.HAS_RPM", True) +- with patch_label, patch_rpm, patch_log: +- assert -1 == rpm.version_cmp("1", "2") +- assert not mock_log.warning.called +- assert mock_label.called ++ assert -1 == rpm.version_cmp("1", "2") ++ assert not mock_log.warning.called ++ assert mock_labelCompare.called + + +-def test_version_cmp_rpmutils(): ++@patch("salt.modules.rpm_lowpkg.HAS_RPM", False) ++@patch("salt.modules.rpm_lowpkg.HAS_RPMUTILS", True) ++@patch("salt.modules.rpm_lowpkg.HAS_PY_RPM", False) ++@patch("salt.modules.rpm_lowpkg.rpmUtils", create=True) ++@patch("salt.modules.rpm_lowpkg.log") ++def test_version_cmp_rpmutils(mock_log, mock_rpmUtils): + """ + Test package version if rpmUtils.miscutils called + + :return: + """ +- mock_log = MagicMock() +- mock_rpmUtils = MagicMock() + mock_rpmUtils.miscutils = MagicMock() + mock_rpmUtils.miscutils.compareEVR = MagicMock(return_value=-1) +- patch_utils = patch("salt.modules.rpm_lowpkg.rpmUtils", mock_rpmUtils, create=True) +- patch_rpm = patch("salt.modules.rpm_lowpkg.HAS_RPM", False) +- patch_utils_lib = patch("salt.modules.rpm_lowpkg.HAS_RPMUTILS", True) +- patch_py_rpm = patch("salt.modules.rpm_lowpkg.HAS_PY_RPM", False) +- patch_log = patch("salt.modules.rpm_lowpkg.log", mock_log) +- +- with patch_utils, patch_rpm, patch_py_rpm, patch_utils_lib, patch_log: +- assert -1 == rpm.version_cmp("1", "2") +- assert mock_log.warning.called +- assert mock_rpmUtils.miscutils.compareEVR.called ++ assert -1 == rpm.version_cmp("1", "2") ++ assert mock_log.warning.called ++ assert mock_rpmUtils.miscutils.compareEVR.called ++ assert ( ++ mock_log.warning.mock_calls[0][1][0] ++ == "Please install a package that provides rpm.labelCompare for more accurate version comparisons." ++ ) + + +-def test_version_cmp_rpmdev_vercmp(): ++@patch("salt.modules.rpm_lowpkg.HAS_RPM", False) ++@patch("salt.modules.rpm_lowpkg.HAS_RPMUTILS", False) ++@patch("salt.modules.rpm_lowpkg.HAS_PY_RPM", False) ++@patch("salt.utils.path.which", return_value=True) ++@patch("salt.modules.rpm_lowpkg.log") ++def test_version_cmp_rpmdev_vercmp(mock_log, mock_which): + """ + Test package version if rpmdev-vercmp is installed + + :return: + """ + mock__salt__ = MagicMock(return_value={"retcode": 12}) +- mock_log = MagicMock() +- patch_rpm = patch("salt.modules.rpm_lowpkg.HAS_RPM", False) +- patch_rpmutils = patch("salt.modules.rpm_lowpkg.HAS_RPMUTILS", False) +- patch_py_rpm = patch("salt.modules.rpm_lowpkg.HAS_PY_RPM", False) +- patch_which = patch("salt.utils.path.which", return_value=True) +- patch_log = patch("salt.modules.rpm_lowpkg.log", mock_log) +- +- with patch_rpm, patch_rpmutils, patch_py_rpm, patch_which, patch_log: +- with patch.dict(rpm.__salt__, {"cmd.run_all": mock__salt__}): +- assert -1 == rpm.version_cmp("1", "2") +- assert mock__salt__.called +- assert mock_log.warning.called +- assert ( +- mock_log.warning.mock_calls[0][1][0] +- == "Please install a package that provides rpm.labelCompare for more accurate version comparisons." +- ) +- assert ( +- mock_log.warning.mock_calls[1][1][0] +- == "Installing the rpmdevtools package may surface dev tools in production." +- ) +- +- +-def test_version_cmp_python(): +- """ +- Test package version if falling back to python +- +- :return: +- """ +- mock_log = MagicMock() +- patch_rpm = patch("salt.modules.rpm_lowpkg.HAS_RPM", False) +- patch_rpmutils = patch("salt.modules.rpm_lowpkg.HAS_RPMUTILS", False) +- mock_version_cmp = MagicMock(return_value=-1) +- patch_py_rpm = patch("salt.modules.rpm_lowpkg.HAS_PY_RPM", False) +- patch_cmp = patch("salt.utils.versions.version_cmp", mock_version_cmp) +- patch_which = patch("salt.utils.path.which", return_value=False) +- patch_log = patch("salt.modules.rpm_lowpkg.log", mock_log) +- +- with patch_rpm, patch_rpmutils, patch_py_rpm, patch_cmp, patch_which, patch_log: ++ with patch.dict(rpm.__salt__, {"cmd.run_all": mock__salt__}): + assert -1 == rpm.version_cmp("1", "2") +- assert mock_version_cmp.called ++ assert mock__salt__.called + assert mock_log.warning.called + assert ( + mock_log.warning.mock_calls[0][1][0] +@@ -343,5 +309,237 @@ def test_version_cmp_python(): + ) + assert ( + mock_log.warning.mock_calls[1][1][0] +- == "Falling back on salt.utils.versions.version_cmp() for version comparisons" ++ == "Installing the rpmdevtools package may surface dev tools in production." + ) ++ ++ ++@patch("salt.modules.rpm_lowpkg.HAS_RPM", False) ++@patch("salt.modules.rpm_lowpkg.HAS_RPMUTILS", False) ++@patch("salt.modules.rpm_lowpkg.HAS_PY_RPM", False) ++@patch("salt.utils.versions.version_cmp", return_value=-1) ++@patch("salt.utils.path.which", return_value=False) ++@patch("salt.modules.rpm_lowpkg.log") ++def test_version_cmp_python(mock_log, mock_which, mock_version_cmp): ++ """ ++ Test package version if falling back to python ++ ++ :return: ++ """ ++ assert -1 == rpm.version_cmp("1", "2") ++ assert mock_version_cmp.called ++ assert mock_log.warning.called ++ assert ( ++ mock_log.warning.mock_calls[0][1][0] ++ == "Please install a package that provides rpm.labelCompare for more accurate version comparisons." ++ ) ++ assert ( ++ mock_log.warning.mock_calls[1][1][0] ++ == "Falling back on salt.utils.versions.version_cmp() for version comparisons" ++ ) ++ ++ ++def test_list_gpg_keys_no_info(): ++ """ ++ Test list_gpg_keys with no extra information ++ """ ++ mock = MagicMock(return_value="\n".join(["gpg-pubkey-1", "gpg-pubkey-2"])) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ assert rpm.list_gpg_keys() == ["gpg-pubkey-1", "gpg-pubkey-2"] ++ assert not _called_with_root(mock) ++ ++ ++def test_list_gpg_keys_no_info_root(): ++ """ ++ Test list_gpg_keys with no extra information and root ++ """ ++ mock = MagicMock(return_value="\n".join(["gpg-pubkey-1", "gpg-pubkey-2"])) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ assert rpm.list_gpg_keys(root="/mnt") == ["gpg-pubkey-1", "gpg-pubkey-2"] ++ assert _called_with_root(mock) ++ ++ ++@patch("salt.modules.rpm_lowpkg.info_gpg_key") ++def test_list_gpg_keys_info(info_gpg_key): ++ """ ++ Test list_gpg_keys with extra information ++ """ ++ info_gpg_key.side_effect = lambda x, root: {"Description": "key for {}".format(x)} ++ mock = MagicMock(return_value="\n".join(["gpg-pubkey-1", "gpg-pubkey-2"])) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ assert rpm.list_gpg_keys(info=True) == { ++ "gpg-pubkey-1": {"Description": "key for gpg-pubkey-1"}, ++ "gpg-pubkey-2": {"Description": "key for gpg-pubkey-2"}, ++ } ++ assert not _called_with_root(mock) ++ ++ ++def test_info_gpg_key(): ++ """ ++ Test info_gpg_keys from a normal output ++ """ ++ info = """Name : gpg-pubkey ++Version : 3dbdc284 ++Release : 53674dd4 ++Architecture: (none) ++Install Date: Fri 08 Mar 2019 11:57:44 AM UTC ++Group : Public Keys ++Size : 0 ++License : pubkey ++Signature : (none) ++Source RPM : (none) ++Build Date : Mon 05 May 2014 10:37:40 AM UTC ++Build Host : localhost ++Packager : openSUSE Project Signing Key ++Summary : gpg(openSUSE Project Signing Key ) ++Description : ++-----BEGIN PGP PUBLIC KEY BLOCK----- ++Version: rpm-4.14.2.1 (NSS-3) ++ ++mQENBEkUTD8BCADWLy5d5IpJedHQQSXkC1VK/oAZlJEeBVpSZjMCn8LiHaI9Wq3G ++3Vp6wvsP1b3kssJGzVFNctdXt5tjvOLxvrEfRJuGfqHTKILByqLzkeyWawbFNfSQ ++93/8OunfSTXC1Sx3hgsNXQuOrNVKrDAQUqT620/jj94xNIg09bLSxsjN6EeTvyiO ++mtE9H1J03o9tY6meNL/gcQhxBvwuo205np0JojYBP0pOfN8l9hnIOLkA0yu4ZXig ++oKOVmf4iTjX4NImIWldT+UaWTO18NWcCrujtgHueytwYLBNV5N0oJIP2VYuLZfSD ++VYuPllv7c6O2UEOXJsdbQaVuzU1HLocDyipnABEBAAG0NG9wZW5TVVNFIFByb2pl ++Y3QgU2lnbmluZyBLZXkgPG9wZW5zdXNlQG9wZW5zdXNlLm9yZz6JATwEEwECACYC ++GwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCU2dN1AUJHR8ElQAKCRC4iy/UPb3C ++hGQrB/9teCZ3Nt8vHE0SC5NmYMAE1Spcjkzx6M4r4C70AVTMEQh/8BvgmwkKP/qI ++CWo2vC1hMXRgLg/TnTtFDq7kW+mHsCXmf5OLh2qOWCKi55Vitlf6bmH7n+h34Sha ++Ei8gAObSpZSF8BzPGl6v0QmEaGKM3O1oUbbB3Z8i6w21CTg7dbU5vGR8Yhi9rNtr ++hqrPS+q2yftjNbsODagaOUb85ESfQGx/LqoMePD+7MqGpAXjKMZqsEDP0TbxTwSk ++4UKnF4zFCYHPLK3y/hSH5SEJwwPY11l6JGdC1Ue8Zzaj7f//axUs/hTC0UZaEE+a ++5v4gbqOcigKaFs9Lc3Bj8b/lE10Y ++=i2TA ++-----END PGP PUBLIC KEY BLOCK----- ++ ++""" ++ mock = MagicMock(return_value=info) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ assert rpm.info_gpg_key("key") == { ++ "Name": "gpg-pubkey", ++ "Version": "3dbdc284", ++ "Release": "53674dd4", ++ "Architecture": None, ++ "Install Date": datetime.datetime(2019, 3, 8, 11, 57, 44), ++ "Group": "Public Keys", ++ "Size": 0, ++ "License": "pubkey", ++ "Signature": None, ++ "Source RPM": None, ++ "Build Date": datetime.datetime(2014, 5, 5, 10, 37, 40), ++ "Build Host": "localhost", ++ "Packager": "openSUSE Project Signing Key ", ++ "Summary": "gpg(openSUSE Project Signing Key )", ++ "Description": """-----BEGIN PGP PUBLIC KEY BLOCK----- ++Version: rpm-4.14.2.1 (NSS-3) ++ ++mQENBEkUTD8BCADWLy5d5IpJedHQQSXkC1VK/oAZlJEeBVpSZjMCn8LiHaI9Wq3G ++3Vp6wvsP1b3kssJGzVFNctdXt5tjvOLxvrEfRJuGfqHTKILByqLzkeyWawbFNfSQ ++93/8OunfSTXC1Sx3hgsNXQuOrNVKrDAQUqT620/jj94xNIg09bLSxsjN6EeTvyiO ++mtE9H1J03o9tY6meNL/gcQhxBvwuo205np0JojYBP0pOfN8l9hnIOLkA0yu4ZXig ++oKOVmf4iTjX4NImIWldT+UaWTO18NWcCrujtgHueytwYLBNV5N0oJIP2VYuLZfSD ++VYuPllv7c6O2UEOXJsdbQaVuzU1HLocDyipnABEBAAG0NG9wZW5TVVNFIFByb2pl ++Y3QgU2lnbmluZyBLZXkgPG9wZW5zdXNlQG9wZW5zdXNlLm9yZz6JATwEEwECACYC ++GwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCU2dN1AUJHR8ElQAKCRC4iy/UPb3C ++hGQrB/9teCZ3Nt8vHE0SC5NmYMAE1Spcjkzx6M4r4C70AVTMEQh/8BvgmwkKP/qI ++CWo2vC1hMXRgLg/TnTtFDq7kW+mHsCXmf5OLh2qOWCKi55Vitlf6bmH7n+h34Sha ++Ei8gAObSpZSF8BzPGl6v0QmEaGKM3O1oUbbB3Z8i6w21CTg7dbU5vGR8Yhi9rNtr ++hqrPS+q2yftjNbsODagaOUb85ESfQGx/LqoMePD+7MqGpAXjKMZqsEDP0TbxTwSk ++4UKnF4zFCYHPLK3y/hSH5SEJwwPY11l6JGdC1Ue8Zzaj7f//axUs/hTC0UZaEE+a ++5v4gbqOcigKaFs9Lc3Bj8b/lE10Y ++=i2TA ++-----END PGP PUBLIC KEY BLOCK-----""", ++ } ++ assert not _called_with_root(mock) ++ ++ ++def test_info_gpg_key_extended(): ++ """ ++ Test info_gpg_keys from an extended output ++ """ ++ info = """Name : gpg-pubkey ++Version : 3dbdc284 ++Release : 53674dd4 ++Architecture: (none) ++Install Date: Fri 08 Mar 2019 11:57:44 AM UTC ++Group : Public Keys ++Size : 0 ++License : pubkey ++Signature : (none) ++Source RPM : (none) ++Build Date : Mon 05 May 2014 10:37:40 AM UTC ++Build Host : localhost ++Packager : openSUSE Project Signing Key ++Summary : gpg(openSUSE Project Signing Key ) ++Description : ++-----BEGIN PGP PUBLIC KEY BLOCK----- ++Version: rpm-4.14.2.1 (NSS-3) ++ ++mQENBEkUTD8BCADWLy5d5IpJedHQQSXkC1VK/oAZlJEeBVpSZjMCn8LiHaI9Wq3G ++3Vp6wvsP1b3kssJGzVFNctdXt5tjvOLxvrEfRJuGfqHTKILByqLzkeyWawbFNfSQ ++93/8OunfSTXC1Sx3hgsNXQuOrNVKrDAQUqT620/jj94xNIg09bLSxsjN6EeTvyiO ++mtE9H1J03o9tY6meNL/gcQhxBvwuo205np0JojYBP0pOfN8l9hnIOLkA0yu4ZXig ++oKOVmf4iTjX4NImIWldT+UaWTO18NWcCrujtgHueytwYLBNV5N0oJIP2VYuLZfSD ++VYuPllv7c6O2UEOXJsdbQaVuzU1HLocDyipnABEBAAG0NG9wZW5TVVNFIFByb2pl ++Y3QgU2lnbmluZyBLZXkgPG9wZW5zdXNlQG9wZW5zdXNlLm9yZz6JATwEEwECACYC ++GwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCU2dN1AUJHR8ElQAKCRC4iy/UPb3C ++hGQrB/9teCZ3Nt8vHE0SC5NmYMAE1Spcjkzx6M4r4C70AVTMEQh/8BvgmwkKP/qI ++CWo2vC1hMXRgLg/TnTtFDq7kW+mHsCXmf5OLh2qOWCKi55Vitlf6bmH7n+h34Sha ++Ei8gAObSpZSF8BzPGl6v0QmEaGKM3O1oUbbB3Z8i6w21CTg7dbU5vGR8Yhi9rNtr ++hqrPS+q2yftjNbsODagaOUb85ESfQGx/LqoMePD+7MqGpAXjKMZqsEDP0TbxTwSk ++4UKnF4zFCYHPLK3y/hSH5SEJwwPY11l6JGdC1Ue8Zzaj7f//axUs/hTC0UZaEE+a ++5v4gbqOcigKaFs9Lc3Bj8b/lE10Y ++=i2TA ++-----END PGP PUBLIC KEY BLOCK----- ++ ++Distribution: (none) ++""" ++ mock = MagicMock(return_value=info) ++ with patch.dict(rpm.__salt__, {"cmd.run_stdout": mock}): ++ assert rpm.info_gpg_key("key") == { ++ "Name": "gpg-pubkey", ++ "Version": "3dbdc284", ++ "Release": "53674dd4", ++ "Architecture": None, ++ "Install Date": datetime.datetime(2019, 3, 8, 11, 57, 44), ++ "Group": "Public Keys", ++ "Size": 0, ++ "License": "pubkey", ++ "Signature": None, ++ "Source RPM": None, ++ "Build Date": datetime.datetime(2014, 5, 5, 10, 37, 40), ++ "Build Host": "localhost", ++ "Packager": "openSUSE Project Signing Key ", ++ "Summary": "gpg(openSUSE Project Signing Key )", ++ "Description": """-----BEGIN PGP PUBLIC KEY BLOCK----- ++Version: rpm-4.14.2.1 (NSS-3) ++ ++mQENBEkUTD8BCADWLy5d5IpJedHQQSXkC1VK/oAZlJEeBVpSZjMCn8LiHaI9Wq3G ++3Vp6wvsP1b3kssJGzVFNctdXt5tjvOLxvrEfRJuGfqHTKILByqLzkeyWawbFNfSQ ++93/8OunfSTXC1Sx3hgsNXQuOrNVKrDAQUqT620/jj94xNIg09bLSxsjN6EeTvyiO ++mtE9H1J03o9tY6meNL/gcQhxBvwuo205np0JojYBP0pOfN8l9hnIOLkA0yu4ZXig ++oKOVmf4iTjX4NImIWldT+UaWTO18NWcCrujtgHueytwYLBNV5N0oJIP2VYuLZfSD ++VYuPllv7c6O2UEOXJsdbQaVuzU1HLocDyipnABEBAAG0NG9wZW5TVVNFIFByb2pl ++Y3QgU2lnbmluZyBLZXkgPG9wZW5zdXNlQG9wZW5zdXNlLm9yZz6JATwEEwECACYC ++GwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAUCU2dN1AUJHR8ElQAKCRC4iy/UPb3C ++hGQrB/9teCZ3Nt8vHE0SC5NmYMAE1Spcjkzx6M4r4C70AVTMEQh/8BvgmwkKP/qI ++CWo2vC1hMXRgLg/TnTtFDq7kW+mHsCXmf5OLh2qOWCKi55Vitlf6bmH7n+h34Sha ++Ei8gAObSpZSF8BzPGl6v0QmEaGKM3O1oUbbB3Z8i6w21CTg7dbU5vGR8Yhi9rNtr ++hqrPS+q2yftjNbsODagaOUb85ESfQGx/LqoMePD+7MqGpAXjKMZqsEDP0TbxTwSk ++4UKnF4zFCYHPLK3y/hSH5SEJwwPY11l6JGdC1Ue8Zzaj7f//axUs/hTC0UZaEE+a ++5v4gbqOcigKaFs9Lc3Bj8b/lE10Y ++=i2TA ++-----END PGP PUBLIC KEY BLOCK-----""", ++ "Distribution": None, ++ } ++ assert not _called_with_root(mock) ++ ++ ++def test_remove_gpg_key(): ++ """ ++ Test remove_gpg_key ++ """ ++ mock = MagicMock(return_value=0) ++ with patch.dict(rpm.__salt__, {"cmd.retcode": mock}): ++ assert rpm.remove_gpg_key("gpg-pubkey-1") ++ assert not _called_with_root(mock) +-- +2.34.1 + + diff --git a/add-salt-ssh-support-with-venv-salt-minion-3004-493.patch b/add-salt-ssh-support-with-venv-salt-minion-3004-493.patch index 58dd071..d860c0b 100644 --- a/add-salt-ssh-support-with-venv-salt-minion-3004-493.patch +++ b/add-salt-ssh-support-with-venv-salt-minion-3004-493.patch @@ -1,4 +1,4 @@ -From c683ceaf9321a646d32e3b2b5fca705563fe8e73 Mon Sep 17 00:00:00 2001 +From 8d1aba4e450922ec7ae4ce5fcf13dc5f7d2b8b7e Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Thu, 24 Feb 2022 16:52:24 +0300 Subject: [PATCH] Add salt-ssh support with venv-salt-minion - 3004 @@ -54,45 +54,49 @@ the child process as failed * Do not run pre flight script for raw_shell --- - salt/_logging/impl.py | 55 +++++++----- - salt/client/ssh/__init__.py | 157 ++++++++++++++++++++++++++++----- + salt/_logging/impl.py | 59 ++++++----- + salt/client/ssh/__init__.py | 174 ++++++++++++++++++++++++++++----- salt/client/ssh/client.py | 7 +- salt/client/ssh/shell.py | 8 ++ - salt/client/ssh/ssh_py_shim.py | 108 +++++++++++++---------- - salt/loader/__init__.py | 31 ++++++- + salt/client/ssh/ssh_py_shim.py | 108 +++++++++++--------- + salt/loader/__init__.py | 31 +++++- salt/netapi/__init__.py | 3 +- salt/roster/__init__.py | 6 +- tests/unit/test_loader.py | 2 +- - 9 files changed, 278 insertions(+), 99 deletions(-) + 9 files changed, 292 insertions(+), 106 deletions(-) diff --git a/salt/_logging/impl.py b/salt/_logging/impl.py -index cc18f49a9e..e050f43caf 100644 +index 779316ce0b..953490b284 100644 --- a/salt/_logging/impl.py +++ b/salt/_logging/impl.py -@@ -14,6 +14,7 @@ import re - import socket +@@ -7,6 +7,7 @@ + import logging + import re import sys - import traceback +import threading import types - import urllib.parse -@@ -104,6 +105,10 @@ DFLT_LOG_DATEFMT_LOGFILE = "%Y-%m-%d %H:%M:%S" - DFLT_LOG_FMT_CONSOLE = "[%(levelname)-8s] %(message)s" - DFLT_LOG_FMT_LOGFILE = "%(asctime)s,%(msecs)03d [%(name)-17s:%(lineno)-4d][%(levelname)-8s][%(process)d] %(message)s" + # Let's define these custom logging levels before importing the salt._logging.mixins +@@ -89,6 +90,10 @@ SORTED_LEVEL_NAMES = [l[0] for l in sorted(LOG_LEVELS.items(), key=lambda x: x[1 + + MODNAME_PATTERN = re.compile(r"(?P%%\(name\)(?:\-(?P[\d]+))?s)") +# LOG_LOCK is used to prevent deadlocks on using logging +# in combination with multiprocessing with salt-api +LOG_LOCK = threading.Lock() + - class SaltLogRecord(logging.LogRecord): - def __init__(self, *args, **kwargs): -@@ -270,27 +275,35 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta): + # ----- REMOVE ME ON REFACTOR COMPLETE ------------------------------------------------------------------------------> + class __NullLoggingHandler(TemporaryLoggingHandler): +@@ -283,31 +288,35 @@ class SaltLoggingClass( else: extra["exc_info_on_loglevel"] = exc_info_on_loglevel -- if sys.version_info < (3, 8): +- if sys.version_info < (3,): +- LOGGING_LOGGER_CLASS._log( +- self, level, msg, args, exc_info=exc_info, extra=extra +- ) +- elif sys.version_info < (3, 8): - LOGGING_LOGGER_CLASS._log( - self, - level, @@ -146,7 +150,7 @@ index cc18f49a9e..e050f43caf 100644 def makeRecord( self, diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py -index ad2796bc87..6db2dfcbb0 100644 +index 37faa869bc..0066f4597b 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py @@ -6,11 +6,13 @@ import base64 @@ -162,8 +166,8 @@ index ad2796bc87..6db2dfcbb0 100644 +import psutil import queue import re - import shlex -@@ -20,6 +22,7 @@ import tarfile + import subprocess +@@ -19,6 +21,7 @@ import tarfile import tempfile import time import uuid @@ -171,16 +175,16 @@ index ad2796bc87..6db2dfcbb0 100644 import salt.client.ssh.shell import salt.client.ssh.wrapper -@@ -47,6 +50,7 @@ import salt.utils.url +@@ -44,6 +47,7 @@ import salt.utils.stringutils + import salt.utils.thin + import salt.utils.url import salt.utils.verify - from salt._logging import LOG_LEVELS - from salt._logging.mixins import MultiprocessingStateMixin +from salt._logging.impl import LOG_LOCK from salt.template import compile_template + from salt.utils.platform import is_junos, is_windows from salt.utils.process import Process - from salt.utils.zeromq import zmq -@@ -146,15 +150,26 @@ if [ "$SUDO" ] && [ "$SUDO_USER" ] - then SUDO="$SUDO -u $SUDO_USER" +@@ -146,15 +150,26 @@ elif [ "$SUDO" ] && [ -n "$SUDO_USER" ] + then SUDO="sudo " fi EX_PYTHON_INVALID={EX_THIN_PYTHON_INVALID} -PYTHON_CMDS="python3 /usr/libexec/platform-python python27 python2.7 python26 python2.6 python2 python" @@ -230,10 +234,10 @@ index ad2796bc87..6db2dfcbb0 100644 +# The file on a salt-ssh minion used to identify if Salt Bundle was deployed +VENV_HASH_FILE = "/var/tmp/venv-salt-minion/venv-hash.txt" + - if not salt.utils.platform.is_windows() and not salt.utils.platform.is_junos(): + if not is_windows() and not is_junos(): shim_file = os.path.join(os.path.dirname(__file__), "ssh_py_shim.py") if not os.path.exists(shim_file): -@@ -209,7 +223,7 @@ class SSH(MultiprocessingStateMixin): +@@ -209,7 +223,7 @@ class SSH: ROSTER_UPDATE_FLAG = "#__needs_update" @@ -242,7 +246,7 @@ index ad2796bc87..6db2dfcbb0 100644 self.__parsed_rosters = {SSH.ROSTER_UPDATE_FLAG: True} pull_sock = os.path.join(opts["sock_dir"], "master_event_pull.ipc") if os.path.exists(pull_sock) and zmq: -@@ -236,7 +250,9 @@ class SSH(MultiprocessingStateMixin): +@@ -236,7 +250,9 @@ class SSH: else "glob" ) self._expand_target() @@ -253,10 +257,10 @@ index ad2796bc87..6db2dfcbb0 100644 self.targets = self.roster.targets(self.opts["tgt"], self.tgt_type) if not self.targets: self._update_targets() -@@ -316,6 +332,13 @@ class SSH(MultiprocessingStateMixin): - extended_cfg=self.opts.get("ssh_ext_alternatives"), +@@ -317,6 +333,14 @@ class SSH: ) self.mods = mod_data(self.fsclient) + + self.cache = salt.cache.Cache(self.opts) + self.master_id = self.opts["id"] + self.max_pid_wait = int(self.opts.get("ssh_max_pid_wait", 600)) @@ -264,10 +268,11 @@ index ad2796bc87..6db2dfcbb0 100644 + self.opts["cachedir"], "salt-ssh.session.lock" + ) + self.ssh_session_grace_time = int(self.opts.get("ssh_session_grace_time", 3)) - - # __setstate__ and __getstate__ are only used on spawning platforms. - def __setstate__(self, state): -@@ -546,6 +569,8 @@ class SSH(MultiprocessingStateMixin): ++ + @property + def parse_tgt(self): + """ +@@ -531,6 +555,8 @@ class SSH: """ Run the routine in a "Thread", put a dict on the queue """ @@ -276,7 +281,7 @@ index ad2796bc87..6db2dfcbb0 100644 opts = copy.deepcopy(opts) single = Single( opts, -@@ -585,7 +610,7 @@ class SSH(MultiprocessingStateMixin): +@@ -570,7 +596,7 @@ class SSH: """ que = multiprocessing.Queue() running = {} @@ -285,7 +290,7 @@ index ad2796bc87..6db2dfcbb0 100644 returned = set() rets = set() init = False -@@ -594,11 +619,43 @@ class SSH(MultiprocessingStateMixin): +@@ -579,11 +605,43 @@ class SSH: log.error("No matching targets found in roster.") break if len(running) < self.opts.get("ssh_max_procs", 25) and not init: @@ -332,7 +337,7 @@ index ad2796bc87..6db2dfcbb0 100644 for default in self.defaults: if default not in self.targets[host]: self.targets[host][default] = self.defaults[default] -@@ -630,8 +687,38 @@ class SSH(MultiprocessingStateMixin): +@@ -615,8 +673,38 @@ class SSH: mine, ) routine = Process(target=self.handle_routine, args=args) @@ -372,7 +377,7 @@ index ad2796bc87..6db2dfcbb0 100644 continue ret = {} try: -@@ -662,12 +749,27 @@ class SSH(MultiprocessingStateMixin): +@@ -647,12 +735,27 @@ class SSH: ) ret = {"id": host, "ret": error} log.error(error) @@ -400,7 +405,24 @@ index ad2796bc87..6db2dfcbb0 100644 if len(rets) >= len(self.targets): break # Sleep when limit or all threads started -@@ -1036,14 +1138,24 @@ class Single: +@@ -916,6 +1019,7 @@ class Single: + self.context = {"master_opts": self.opts, "fileclient": self.fsclient} + + self.ssh_pre_flight = kwargs.get("ssh_pre_flight", None) ++ self.ssh_pre_flight_args = kwargs.get("ssh_pre_flight_args", None) + + if self.ssh_pre_flight: + self.ssh_pre_file = os.path.basename(self.ssh_pre_flight) +@@ -1007,7 +1111,7 @@ class Single: + + self.shell.send(self.ssh_pre_flight, script) + +- return self.execute_script(script) ++ return self.execute_script(script, script_args=self.ssh_pre_flight_args) + + def check_thin_dir(self): + """ +@@ -1020,14 +1124,24 @@ class Single: return False return True @@ -429,7 +451,7 @@ index ad2796bc87..6db2dfcbb0 100644 self.deploy_ext() return True -@@ -1071,8 +1183,9 @@ class Single: +@@ -1055,8 +1169,9 @@ class Single: Returns tuple of (stdout, stderr, retcode) """ stdout = stderr = retcode = None @@ -440,7 +462,7 @@ index ad2796bc87..6db2dfcbb0 100644 if not self.opts.get("ssh_run_pre_flight", False) and self.check_thin_dir(): log.info( "%s thin dir already exists. Not running ssh_pre_flight script", -@@ -1086,14 +1199,16 @@ class Single: +@@ -1070,14 +1185,16 @@ class Single: stdout, stderr, retcode = self.run_ssh_pre_flight() if retcode != 0: log.error( @@ -459,11 +481,38 @@ index ad2796bc87..6db2dfcbb0 100644 cmd_str = " ".join([self._escape_arg(arg) for arg in self.argv]) stdout, stderr, retcode = self.shell.exec_cmd(cmd_str) +@@ -1335,15 +1452,24 @@ ARGS = {arguments}\n'''.format( + + return cmd + +- def execute_script(self, script, extension="py", pre_dir=""): ++ def execute_script(self, script, extension="py", pre_dir="", script_args=None): + """ + execute a script on the minion then delete + """ ++ args = "" ++ if script_args: ++ args = " {}".format( ++ " ".join([str(el) for el in script_args]) ++ if isinstance(script_args, (list, tuple)) ++ else script_args ++ ) + if extension == "ps1": + ret = self.shell.exec_cmd('"powershell {}"'.format(script)) + else: + if not self.winrm: +- ret = self.shell.exec_cmd("/bin/sh '{}{}'".format(pre_dir, script)) ++ ret = self.shell.exec_cmd( ++ "/bin/sh '{}{}'{}".format(pre_dir, script, args) ++ ) + else: + ret = saltwinshell.call_python(self, script) + diff --git a/salt/client/ssh/client.py b/salt/client/ssh/client.py -index be9247cb15..0b67598fc6 100644 +index 245e1529c6..a45deeb325 100644 --- a/salt/client/ssh/client.py +++ b/salt/client/ssh/client.py -@@ -108,7 +108,7 @@ class SSHClient: +@@ -107,7 +107,7 @@ class SSHClient: return sane_kwargs def _prep_ssh( @@ -472,7 +521,7 @@ index be9247cb15..0b67598fc6 100644 ): """ Prepare the arguments -@@ -123,7 +123,7 @@ class SSHClient: +@@ -122,7 +122,7 @@ class SSHClient: opts["selected_target_option"] = tgt_type opts["tgt"] = tgt opts["arg"] = arg @@ -481,7 +530,7 @@ index be9247cb15..0b67598fc6 100644 def cmd_iter( self, -@@ -160,7 +160,7 @@ class SSHClient: +@@ -159,7 +159,7 @@ class SSHClient: final.update(ret) return final @@ -490,7 +539,7 @@ index be9247cb15..0b67598fc6 100644 """ Execute a salt-ssh call synchronously. -@@ -193,6 +193,7 @@ class SSHClient: +@@ -192,6 +192,7 @@ class SSHClient: low.get("timeout"), low.get("tgt_type"), low.get("kwarg"), @@ -499,10 +548,10 @@ index be9247cb15..0b67598fc6 100644 ) diff --git a/salt/client/ssh/shell.py b/salt/client/ssh/shell.py -index cfa82d13c2..bc1ad034df 100644 +index 7461618a2e..6b54a20abd 100644 --- a/salt/client/ssh/shell.py +++ b/salt/client/ssh/shell.py -@@ -464,6 +464,14 @@ class Shell: +@@ -442,6 +442,14 @@ class Shell: if stdout: old_stdout = stdout time.sleep(0.01) @@ -650,10 +699,10 @@ index b77749f495..293ea1b7fa 100644 "--local", "--metadata", diff --git a/salt/loader/__init__.py b/salt/loader/__init__.py -index 72a5e54401..32f8a7702c 100644 +index f7815acc03..a0f2220476 100644 --- a/salt/loader/__init__.py +++ b/salt/loader/__init__.py -@@ -9,6 +9,7 @@ import inspect +@@ -8,6 +8,7 @@ import contextlib import logging import os import re @@ -689,18 +738,14 @@ index 72a5e54401..32f8a7702c 100644 def static_loader( opts, -@@ -725,7 +738,7 @@ def fileserver(opts, backends, loaded_base_name=None): +@@ -597,16 +610,19 @@ def fileserver(opts, backends): ) --def roster(opts, runner=None, utils=None, whitelist=None, loaded_base_name=None): -+def roster(opts, runner=None, utils=None, whitelist=None, loaded_base_name=None, context=None): +-def roster(opts, runner=None, utils=None, whitelist=None): ++def roster(opts, runner=None, utils=None, whitelist=None, context=None): """ Returns the roster modules - -@@ -736,12 +749,15 @@ def roster(opts, runner=None, utils=None, whitelist=None, loaded_base_name=None) - :param str loaded_base_name: The imported modules namespace when imported - by the salt loader. """ + if context is None: + context = {} @@ -713,9 +758,9 @@ index 72a5e54401..32f8a7702c 100644 - pack={"__runner__": runner, "__utils__": utils}, + pack={"__runner__": runner, "__utils__": utils, "__context__": context}, extra_module_dirs=utils.module_dirs if utils else None, - loaded_base_name=loaded_base_name, ) -@@ -933,7 +949,14 @@ def render( + +@@ -744,7 +760,14 @@ def render(opts, functions, states=None, proxy=None, context=None): ) rend = FilterDictWrapper(ret, ".render") @@ -732,10 +777,10 @@ index 72a5e54401..32f8a7702c 100644 ): err = ( diff --git a/salt/netapi/__init__.py b/salt/netapi/__init__.py -index 7127dc2b3c..4a80697648 100644 +index 81954acb96..5d2ff994a6 100644 --- a/salt/netapi/__init__.py +++ b/salt/netapi/__init__.py -@@ -79,6 +79,7 @@ class NetapiClient: +@@ -46,6 +46,7 @@ class NetapiClient: self.loadauth = salt.auth.LoadAuth(apiopts) self.key = salt.daemons.masterapi.access_keys(apiopts) self.ckminions = salt.utils.minions.CkMinions(apiopts) @@ -743,7 +788,7 @@ index 7127dc2b3c..4a80697648 100644 def _is_master_running(self): """ -@@ -238,7 +239,7 @@ class NetapiClient: +@@ -205,7 +206,7 @@ class NetapiClient: with salt.client.ssh.client.SSHClient( mopts=self.opts, disable_custom_roster=True ) as client: @@ -753,7 +798,7 @@ index 7127dc2b3c..4a80697648 100644 def runner(self, fun, timeout=None, full_return=False, **kwargs): """ diff --git a/salt/roster/__init__.py b/salt/roster/__init__.py -index fc7339d785..ea23d550d7 100644 +index b45afffd24..4b6182b2dd 100644 --- a/salt/roster/__init__.py +++ b/salt/roster/__init__.py @@ -59,7 +59,7 @@ class Roster: @@ -777,7 +822,7 @@ index fc7339d785..ea23d550d7 100644 def _gen_back(self): """ diff --git a/tests/unit/test_loader.py b/tests/unit/test_loader.py -index 66ba3d4e05..412d412398 100644 +index 2319f815d3..e83f86cd01 100644 --- a/tests/unit/test_loader.py +++ b/tests/unit/test_loader.py @@ -1696,7 +1696,7 @@ class LazyLoaderRefreshFileMappingTest(TestCase): @@ -790,6 +835,6 @@ index 66ba3d4e05..412d412398 100644 self.LOADER_CLASS = LazyLoaderMock -- -2.37.3 +2.35.1 diff --git a/add-sleep-on-exception-handling-on-minion-connection.patch b/add-sleep-on-exception-handling-on-minion-connection.patch index 83a3ca8..146f6c2 100644 --- a/add-sleep-on-exception-handling-on-minion-connection.patch +++ b/add-sleep-on-exception-handling-on-minion-connection.patch @@ -1,4 +1,4 @@ -From b772fc7540cea4088ecac0bdc0e24d2be84bfcad Mon Sep 17 00:00:00 2001 +From aafa76ddd04114f699d760577681db75579685d7 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> Date: Thu, 18 Feb 2021 14:49:38 +0300 Subject: [PATCH] Add sleep on exception handling on minion connection @@ -12,7 +12,7 @@ Subject: [PATCH] Add sleep on exception handling on minion connection 1 file changed, 6 insertions(+) diff --git a/salt/minion.py b/salt/minion.py -index 16452facf4..780b397e83 100644 +index 4da665a130..dacff1e0a9 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -1123,6 +1123,9 @@ class MinionManager(MinionBase): @@ -25,7 +25,7 @@ index 16452facf4..780b397e83 100644 while True: try: if minion.opts.get("beacons_before_connect", False): -@@ -1161,6 +1164,9 @@ class MinionManager(MinionBase): +@@ -1158,6 +1161,9 @@ class MinionManager(MinionBase): minion.opts["master"], exc_info=True, ) @@ -36,6 +36,6 @@ index 16452facf4..780b397e83 100644 # Multi Master Tune In def tune_in(self): -- -2.37.3 +2.29.2 diff --git a/add-standalone-configuration-file-for-enabling-packa.patch b/add-standalone-configuration-file-for-enabling-packa.patch index 8cf92a5..5eb1546 100644 --- a/add-standalone-configuration-file-for-enabling-packa.patch +++ b/add-standalone-configuration-file-for-enabling-packa.patch @@ -1,9 +1,9 @@ -From 875fb95ae468042005cd0db463f13a9315c1e756 Mon Sep 17 00:00:00 2001 +From 8ad65d6fa39edc7fc1967e2df1f3db0aa7df4d11 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Wed, 22 May 2019 13:00:46 +0100 -Subject: [PATCH] Add standalone configuration file for enabling package - formulas +Subject: [PATCH] Add standalone configuration file for enabling + package formulas --- conf/suse/standalone-formulas-configuration.conf | 4 ++++ @@ -21,6 +21,6 @@ index 0000000000..94d05fb2ee + - /usr/share/salt-formulas/states + - /srv/salt -- -2.37.3 +2.29.2 diff --git a/add-support-for-gpgautoimport-539.patch b/add-support-for-gpgautoimport-539.patch index 804c62c..3ead610 100644 --- a/add-support-for-gpgautoimport-539.patch +++ b/add-support-for-gpgautoimport-539.patch @@ -1,4 +1,4 @@ -From f92891c007ee55d9ccc2d7b3da53e4e0a6fc94c3 Mon Sep 17 00:00:00 2001 +From fbd5163bd0d5409a1823e9fb8e0cb623c22d6036 Mon Sep 17 00:00:00 2001 From: Michael Calmer Date: Fri, 8 Jul 2022 10:15:37 +0200 Subject: [PATCH] add support for gpgautoimport (#539) @@ -18,10 +18,10 @@ Subject: [PATCH] add support for gpgautoimport (#539) 2 files changed, 140 insertions(+), 31 deletions(-) diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py -index cbfbc4b78d..2c36e2968a 100644 +index 39d26f0e93..b622105e15 100644 --- a/salt/modules/zypperpkg.py +++ b/salt/modules/zypperpkg.py -@@ -589,7 +589,7 @@ def list_upgrades(refresh=True, root=None, **kwargs): +@@ -591,7 +591,7 @@ def list_upgrades(refresh=True, root=None, **kwargs): salt '*' pkg.list_upgrades """ if refresh: @@ -30,7 +30,7 @@ index cbfbc4b78d..2c36e2968a 100644 ret = dict() cmd = ["list-updates"] -@@ -703,7 +703,7 @@ def info_available(*names, **kwargs): +@@ -705,7 +705,7 @@ def info_available(*names, **kwargs): # Refresh db before extracting the latest package if kwargs.get("refresh", True): @@ -39,7 +39,7 @@ index cbfbc4b78d..2c36e2968a 100644 pkg_info = [] batch = names[:] -@@ -1393,7 +1393,6 @@ def mod_repo(repo, **kwargs): +@@ -1395,7 +1395,6 @@ def mod_repo(repo, **kwargs): cmd_opt.append("--name='{}'".format(kwargs.get("humanname"))) if kwargs.get("gpgautoimport") is True: @@ -47,7 +47,7 @@ index cbfbc4b78d..2c36e2968a 100644 call_refresh = True if cmd_opt: -@@ -1405,8 +1404,8 @@ def mod_repo(repo, **kwargs): +@@ -1407,8 +1406,8 @@ def mod_repo(repo, **kwargs): # when used with "zypper ar --refresh" or "zypper mr --refresh" # --gpg-auto-import-keys is not doing anything # so we need to specifically refresh here with --gpg-auto-import-keys @@ -58,7 +58,7 @@ index cbfbc4b78d..2c36e2968a 100644 elif not added and not cmd_opt: comment = "Specified arguments did not result in modification of repo" -@@ -1417,7 +1416,7 @@ def mod_repo(repo, **kwargs): +@@ -1419,7 +1418,7 @@ def mod_repo(repo, **kwargs): return repo @@ -67,7 +67,7 @@ index cbfbc4b78d..2c36e2968a 100644 """ Trigger a repository refresh by calling ``zypper refresh``. Refresh will run with ``--force`` if the "force=True" flag is passed on the CLI or -@@ -1428,6 +1427,17 @@ def refresh_db(force=None, root=None): +@@ -1430,6 +1429,17 @@ def refresh_db(force=None, root=None): {'': Bool} @@ -85,7 +85,7 @@ index cbfbc4b78d..2c36e2968a 100644 root operate on a different root directory. -@@ -1448,11 +1458,22 @@ def refresh_db(force=None, root=None): +@@ -1450,11 +1460,22 @@ def refresh_db(force=None, root=None): salt.utils.pkg.clear_rtag(__opts__) ret = {} refresh_opts = ["refresh"] @@ -109,7 +109,7 @@ index cbfbc4b78d..2c36e2968a 100644 for line in out.splitlines(): if not line: -@@ -1637,7 +1658,7 @@ def install( +@@ -1639,7 +1660,7 @@ def install( 'arch': ''}}} """ if refresh: @@ -118,7 +118,7 @@ index cbfbc4b78d..2c36e2968a 100644 try: pkg_params, pkg_type = __salt__["pkg_resource.parse_targets"]( -@@ -1932,7 +1953,7 @@ def upgrade( +@@ -1934,7 +1955,7 @@ def upgrade( cmd_update.insert(0, "--no-gpg-checks") if refresh: @@ -127,7 +127,7 @@ index cbfbc4b78d..2c36e2968a 100644 if dryrun: cmd_update.append("--dry-run") -@@ -2759,7 +2780,7 @@ def search(criteria, refresh=False, **kwargs): +@@ -2844,7 +2865,7 @@ def search(criteria, refresh=False, **kwargs): root = kwargs.get("root", None) if refresh: @@ -136,7 +136,7 @@ index cbfbc4b78d..2c36e2968a 100644 cmd = ["search"] if kwargs.get("match") == "exact": -@@ -2910,7 +2931,7 @@ def download(*packages, **kwargs): +@@ -2995,7 +3016,7 @@ def download(*packages, **kwargs): refresh = kwargs.get("refresh", False) if refresh: @@ -145,7 +145,7 @@ index cbfbc4b78d..2c36e2968a 100644 pkg_ret = {} for dld_result in ( -@@ -3062,7 +3083,7 @@ def list_patches(refresh=False, root=None, **kwargs): +@@ -3147,7 +3168,7 @@ def list_patches(refresh=False, root=None, **kwargs): salt '*' pkg.list_patches """ if refresh: @@ -154,7 +154,7 @@ index cbfbc4b78d..2c36e2968a 100644 return _get_patches(root=root) -@@ -3156,7 +3177,7 @@ def resolve_capabilities(pkgs, refresh=False, root=None, **kwargs): +@@ -3241,7 +3262,7 @@ def resolve_capabilities(pkgs, refresh=False, root=None, **kwargs): salt '*' pkg.resolve_capabilities resolve_capabilities=True w3m_ssl """ if refresh: @@ -364,6 +364,6 @@ index fea6eeb004..3f1560a385 100644 def test_wildcard_to_query_match_all(self): -- -2.37.3 +2.36.1 diff --git a/add-support-for-name-pkgs-and-diff_attr-parameters-t.patch b/add-support-for-name-pkgs-and-diff_attr-parameters-t.patch index f6d9d17..90d257d 100644 --- a/add-support-for-name-pkgs-and-diff_attr-parameters-t.patch +++ b/add-support-for-name-pkgs-and-diff_attr-parameters-t.patch @@ -1,8 +1,8 @@ -From 0165c300ec0f7ac70b274b81a8857c2e6d71552d Mon Sep 17 00:00:00 2001 +From c162e36fc52ca2f10b25354f1e430e13113f2976 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Thu, 7 Jul 2022 11:26:34 +0200 -Subject: [PATCH] Add support for name, pkgs and diff_attr parameters to - zypperpkg.upgrade()/yumpkg.upgrade() - backport 3004 (#538) +Subject: [PATCH] Add support for name, pkgs and diff_attr parameters + to zypperpkg.upgrade()/yumpkg.upgrade() - backport 3004 (#538) * Migrate zypper.upgrade tests to pytest @@ -37,9 +37,9 @@ Fixes: https://github.com/saltstack/salt/issues/62031 changelog/62032.fixed | 1 + salt/modules/yumpkg.py | 7 +- salt/modules/zypperpkg.py | 76 ++- - tests/pytests/unit/modules/test_zypperpkg.py | 277 ++++++++++- + tests/pytests/unit/modules/test_zypperpkg.py | 278 ++++++++++- tests/unit/modules/test_zypperpkg.py | 482 ------------------- - 7 files changed, 355 insertions(+), 490 deletions(-) + 7 files changed, 356 insertions(+), 490 deletions(-) create mode 100644 changelog/62030.fixed create mode 100644 changelog/62031.added create mode 100644 changelog/62032.fixed @@ -66,7 +66,7 @@ index 0000000000..ceb3cc89b9 @@ -0,0 +1 @@ +Fix attr=all handling in pkg.list_pkgs() (yum/zypper). diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py -index f52e084346..fcfc5d4045 100644 +index 3138ac2e59..0013282507 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py @@ -735,7 +735,7 @@ def list_pkgs(versions_as_list=False, **kwargs): @@ -78,7 +78,7 @@ index f52e084346..fcfc5d4045 100644 attr = salt.utils.args.split_input(attr) contextkey = "pkg.list_pkgs" -@@ -1844,6 +1844,7 @@ def upgrade( +@@ -1835,6 +1835,7 @@ def upgrade( normalize=True, minimal=False, obsoletes=True, @@ -86,7 +86,7 @@ index f52e084346..fcfc5d4045 100644 **kwargs ): """ -@@ -2000,7 +2001,7 @@ def upgrade( +@@ -1991,7 +1992,7 @@ def upgrade( if salt.utils.data.is_true(refresh): refresh_db(**kwargs) @@ -95,7 +95,7 @@ index f52e084346..fcfc5d4045 100644 targets = [] if name or pkgs: -@@ -2032,7 +2033,7 @@ def upgrade( +@@ -2023,7 +2024,7 @@ def upgrade( cmd.extend(targets) result = _call_yum(cmd) __context__.pop("pkg.list_pkgs", None) @@ -105,10 +105,10 @@ index f52e084346..fcfc5d4045 100644 if result["retcode"] != 0: diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py -index 4e3006a8cd..cbfbc4b78d 100644 +index ac6c36a09f..39d26f0e93 100644 --- a/salt/modules/zypperpkg.py +++ b/salt/modules/zypperpkg.py -@@ -939,7 +939,7 @@ def list_pkgs(versions_as_list=False, root=None, includes=None, **kwargs): +@@ -941,7 +941,7 @@ def list_pkgs(versions_as_list=False, root=None, includes=None, **kwargs): return {} attr = kwargs.get("attr") @@ -117,7 +117,7 @@ index 4e3006a8cd..cbfbc4b78d 100644 attr = salt.utils.args.split_input(attr) includes = includes if includes else [] -@@ -1791,6 +1791,8 @@ def install( +@@ -1793,6 +1793,8 @@ def install( def upgrade( @@ -126,7 +126,7 @@ index 4e3006a8cd..cbfbc4b78d 100644 refresh=True, dryrun=False, dist_upgrade=False, -@@ -1800,6 +1802,7 @@ def upgrade( +@@ -1802,6 +1804,7 @@ def upgrade( skip_verify=False, no_recommends=False, root=None, @@ -134,7 +134,7 @@ index 4e3006a8cd..cbfbc4b78d 100644 **kwargs ): # pylint: disable=unused-argument """ -@@ -1819,6 +1822,27 @@ def upgrade( +@@ -1821,6 +1824,27 @@ def upgrade( Run a full system upgrade, a zypper upgrade @@ -162,7 +162,7 @@ index 4e3006a8cd..cbfbc4b78d 100644 refresh force a refresh if set to True (default). If set to False it depends on zypper if a refresh is -@@ -1850,6 +1874,24 @@ def upgrade( +@@ -1852,6 +1876,24 @@ def upgrade( root Operate on a different root directory. @@ -187,7 +187,7 @@ index 4e3006a8cd..cbfbc4b78d 100644 Returns a dictionary containing the changes: .. code-block:: python -@@ -1857,11 +1899,27 @@ def upgrade( +@@ -1859,11 +1901,27 @@ def upgrade( {'': {'old': '', 'new': ''}} @@ -215,7 +215,7 @@ index 4e3006a8cd..cbfbc4b78d 100644 salt '*' pkg.upgrade dist_upgrade=True fromrepo='["MyRepoName"]' novendorchange=True salt '*' pkg.upgrade dist_upgrade=True dryrun=True """ -@@ -1897,12 +1955,24 @@ def upgrade( +@@ -1899,12 +1957,24 @@ def upgrade( allowvendorchange, novendorchange ).noraise.call(*cmd_update + ["--debug-solver"]) @@ -243,7 +243,7 @@ index 4e3006a8cd..cbfbc4b78d 100644 if __zypper__.exit_code not in __zypper__.SUCCESS_EXIT_CODES: diff --git a/tests/pytests/unit/modules/test_zypperpkg.py b/tests/pytests/unit/modules/test_zypperpkg.py -index 351a173b81..84dc7a10b4 100644 +index bfc1558c9a..e02bba9a07 100644 --- a/tests/pytests/unit/modules/test_zypperpkg.py +++ b/tests/pytests/unit/modules/test_zypperpkg.py @@ -4,17 +4,31 @@ @@ -280,10 +280,11 @@ index 351a173b81..84dc7a10b4 100644 def test_list_pkgs_no_context(): -@@ -254,3 +268,262 @@ def test_del_repo_key(): - with patch.dict(zypper.__salt__, salt_mock): - assert zypper.del_repo_key(keyid="keyid", root="/mnt") - salt_mock["lowpkg.remove_gpg_key"].assert_called_once_with("keyid", "/mnt") +@@ -254,3 +268,263 @@ def test_pkg_list_holds(): + ret = zypper.list_holds() + assert len(ret) == 1 + assert "bar-2:2.3.4-2.1.*" in ret ++ + +@pytest.mark.parametrize( + "zypper_version,lowpkg_version_cmp,expected_inst_avc,expected_dup_avc", @@ -1051,6 +1052,6 @@ index 39f28f2198..fea6eeb004 100644 """ Test whether or not an upgrade is available for a given package. -- -2.37.3 +2.36.1 diff --git a/adds-explicit-type-cast-for-port.patch b/adds-explicit-type-cast-for-port.patch new file mode 100644 index 0000000..2681ce4 --- /dev/null +++ b/adds-explicit-type-cast-for-port.patch @@ -0,0 +1,32 @@ +From 3beb3379dafe1adf9c1a43694f7b71938be3f583 Mon Sep 17 00:00:00 2001 +From: Jochen Breuer +Date: Wed, 1 Apr 2020 16:13:23 +0200 +Subject: [PATCH] Adds explicit type cast for port + +If a port was passed as a string, the execution logic was broken +and a wrong set of remotes was returned. + +The type casting to int solves this issue. +--- + salt/utils/network.py | 4 ++++ + 1 file changed, 4 insertions(+) + +diff --git a/salt/utils/network.py b/salt/utils/network.py +index 5fc9a34ca4..0dd20c5599 100644 +--- a/salt/utils/network.py ++++ b/salt/utils/network.py +@@ -1703,6 +1703,10 @@ def _netlink_tool_remote_on(port, which_end): + chunks = line.split() + remote_host, remote_port = chunks[4].rsplit(":", 1) + ++ if which_end == "remote_port" and int(remote_port) != int(port): ++ continue ++ if which_end == "local_port" and int(local_port) != int(port): ++ continue + remotes.add(remote_host.strip("[]")) + + if valid is False: +-- +2.33.0 + + diff --git a/align-amazon-ec2-nitro-grains-with-upstream-pr-bsc-1.patch b/align-amazon-ec2-nitro-grains-with-upstream-pr-bsc-1.patch deleted file mode 100644 index a56ff4e..0000000 --- a/align-amazon-ec2-nitro-grains-with-upstream-pr-bsc-1.patch +++ /dev/null @@ -1,124 +0,0 @@ -From d1e9af256fa67cd792ce11e6e9c1e24a1fe2054f Mon Sep 17 00:00:00 2001 -From: Victor Zhestkov -Date: Fri, 28 Oct 2022 13:19:46 +0300 -Subject: [PATCH] Align Amazon EC2 (Nitro) grains with upstream PR - (bsc#1203685) - -* Set virtual to Nitro for Amazon EC2 kvm instances - -* Add few mocks to prevent false failing - -possible in some specific environments - -* Add one more possible test case returning Nitro ---- - salt/grains/core.py | 8 +++++++- - tests/pytests/unit/grains/test_core.py | 27 +++++++++++++++++++++++++- - 2 files changed, 33 insertions(+), 2 deletions(-) - -diff --git a/salt/grains/core.py b/salt/grains/core.py -index 76f3767ddf..f359c07432 100644 ---- a/salt/grains/core.py -+++ b/salt/grains/core.py -@@ -860,6 +860,10 @@ def _virtual(osdata): - grains["virtual"] = "container" - grains["virtual_subtype"] = "LXC" - break -+ elif "amazon" in output: -+ grains["virtual"] = "Nitro" -+ grains["virtual_subtype"] = "Amazon EC2" -+ break - elif command == "virt-what": - for line in output.splitlines(): - if line in ("kvm", "qemu", "uml", "xen"): -@@ -1174,7 +1178,7 @@ def _virtual(osdata): - grains["virtual"] = "virtual" - - # Try to detect if the instance is running on Amazon EC2 -- if grains["virtual"] in ("qemu", "kvm", "xen"): -+ if grains["virtual"] in ("qemu", "kvm", "xen", "amazon"): - dmidecode = salt.utils.path.which("dmidecode") - if dmidecode: - ret = __salt__["cmd.run_all"]( -@@ -1182,6 +1186,8 @@ def _virtual(osdata): - ) - output = ret["stdout"] - if "Manufacturer: Amazon EC2" in output: -+ if grains["virtual"] != "xen": -+ grains["virtual"] = "Nitro" - grains["virtual_subtype"] = "Amazon EC2" - product = re.match( - r".*Product Name: ([^\r\n]*).*", output, flags=re.DOTALL -diff --git a/tests/pytests/unit/grains/test_core.py b/tests/pytests/unit/grains/test_core.py -index c06cdb2db0..6f3bef69f2 100644 ---- a/tests/pytests/unit/grains/test_core.py -+++ b/tests/pytests/unit/grains/test_core.py -@@ -2888,6 +2888,11 @@ def test_virtual_set_virtual_ec2(): - "/usr/bin/systemd-detect-virt", - None, - None, -+ # Check with systemd-detect-virt returning amazon and no dmidecode available -+ None, -+ "/usr/bin/systemd-detect-virt", -+ None, -+ None, - ] - ) - cmd_run_all_mock = MagicMock( -@@ -2946,9 +2951,22 @@ def test_virtual_set_virtual_ec2(): - }, - # Check with systemd-detect-virt when no dmidecode available - {"retcode": 0, "stderr": "", "stdout": "kvm"}, -+ # Check with systemd-detect-virt returning amazon and no dmidecode available -+ {"retcode": 0, "stderr": "", "stdout": "amazon"}, - ] - ) - -+ def _mock_is_file(filename): -+ if filename in ( -+ "/proc/1/cgroup", -+ "/proc/cpuinfo", -+ "/sys/devices/virtual/dmi/id/product_name", -+ "/proc/xen/xsd_kva", -+ "/proc/xen/capabilities", -+ ): -+ return False -+ return True -+ - with patch("salt.utils.path.which", which_mock), patch.dict( - core.__salt__, - { -@@ -2957,6 +2975,8 @@ def test_virtual_set_virtual_ec2(): - "cmd.retcode": salt.modules.cmdmod.retcode, - "smbios.get": salt.modules.smbios.get, - }, -+ ), patch("os.path.isfile", _mock_is_file), patch( -+ "os.path.isdir", return_value=False - ): - - virtual_grains = core._virtual(osdata.copy()) -@@ -2966,7 +2986,7 @@ def test_virtual_set_virtual_ec2(): - - virtual_grains = core._virtual(osdata.copy()) - -- assert virtual_grains["virtual"] == "kvm" -+ assert virtual_grains["virtual"] == "Nitro" - assert virtual_grains["virtual_subtype"] == "Amazon EC2 (m5.large)" - - virtual_grains = core._virtual(osdata.copy()) -@@ -2974,6 +2994,11 @@ def test_virtual_set_virtual_ec2(): - assert virtual_grains["virtual"] == "kvm" - assert "virtual_subtype" not in virtual_grains - -+ virtual_grains = core._virtual(osdata.copy()) -+ -+ assert virtual_grains["virtual"] == "Nitro" -+ assert virtual_grains["virtual_subtype"] == "Amazon EC2" -+ - - @pytest.mark.skip_on_windows - def test_linux_proc_files_with_non_utf8_chars(): --- -2.37.3 - - diff --git a/allow-vendor-change-option-with-zypper.patch b/allow-vendor-change-option-with-zypper.patch index 58ac76b..c701677 100644 --- a/allow-vendor-change-option-with-zypper.patch +++ b/allow-vendor-change-option-with-zypper.patch @@ -1,4 +1,4 @@ -From 51836a4c37f05262e708f058f323c1fbc2123ade Mon Sep 17 00:00:00 2001 +From 07d1b742f16799d3df9d7eeb04bbce5d814e519d Mon Sep 17 00:00:00 2001 From: Martin Seidl Date: Tue, 27 Oct 2020 16:12:29 +0100 Subject: [PATCH] Allow vendor change option with zypper @@ -64,7 +64,7 @@ Co-authored-by: Pablo Suárez Hernández 2 files changed, 462 insertions(+), 61 deletions(-) diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py -index 6b19c65db3..c2452d6dec 100644 +index 1777bec031..7216e25b86 100644 --- a/salt/modules/zypperpkg.py +++ b/salt/modules/zypperpkg.py @@ -35,6 +35,8 @@ import salt.utils.stringutils @@ -76,7 +76,7 @@ index 6b19c65db3..c2452d6dec 100644 from salt.utils.versions import LooseVersion log = logging.getLogger(__name__) -@@ -135,6 +137,13 @@ class _Zypper: +@@ -128,6 +130,13 @@ class _Zypper: self.__systemd_scope = False self.__root = None @@ -90,7 +90,7 @@ index 6b19c65db3..c2452d6dec 100644 # Call status self.__called = False -@@ -179,6 +188,8 @@ class _Zypper: +@@ -172,6 +181,8 @@ class _Zypper: self.__no_raise = True elif item == "refreshable": self.__refresh = True @@ -99,7 +99,7 @@ index 6b19c65db3..c2452d6dec 100644 elif item == "call": return self.__call else: -@@ -219,6 +230,33 @@ class _Zypper: +@@ -212,6 +223,33 @@ class _Zypper: def pid(self): return self.__call_result.get("pid", "") @@ -133,7 +133,7 @@ index 6b19c65db3..c2452d6dec 100644 def _is_error(self): """ Is this is an error code? -@@ -335,6 +373,15 @@ class _Zypper: +@@ -326,6 +364,15 @@ class _Zypper: if self.__systemd_scope: cmd.extend(["systemd-run", "--scope"]) cmd.extend(self.__cmd) @@ -149,7 +149,7 @@ index 6b19c65db3..c2452d6dec 100644 log.debug("Calling Zypper: %s", " ".join(cmd)) self.__call_result = __salt__["cmd.run_all"](cmd, **kwargs) if self._check_result(): -@@ -1444,6 +1491,8 @@ def install( +@@ -1435,6 +1482,8 @@ def install( no_recommends=False, root=None, inclusion_detection=False, @@ -158,7 +158,7 @@ index 6b19c65db3..c2452d6dec 100644 **kwargs ): """ -@@ -1491,6 +1540,13 @@ def install( +@@ -1482,6 +1531,13 @@ def install( skip_verify Skip the GPG verification check (e.g., ``--no-gpg-checks``) @@ -172,7 +172,7 @@ index 6b19c65db3..c2452d6dec 100644 version Can be either a version number, or the combination of a comparison operator (<, >, <=, >=, =) and a version number (ex. '>1.2.3-4'). -@@ -1656,6 +1712,7 @@ def install( +@@ -1647,6 +1703,7 @@ def install( cmd_install.append( kwargs.get("resolve_capabilities") and "--capability" or "--name" ) @@ -180,7 +180,7 @@ index 6b19c65db3..c2452d6dec 100644 if not refresh: cmd_install.insert(0, "--no-refresh") -@@ -1692,6 +1749,7 @@ def install( +@@ -1683,6 +1740,7 @@ def install( systemd_scope=systemd_scope, root=root, ) @@ -188,7 +188,7 @@ index 6b19c65db3..c2452d6dec 100644 .call(*cmd) .splitlines() ): -@@ -1704,7 +1762,9 @@ def install( +@@ -1695,7 +1753,9 @@ def install( while downgrades: cmd = cmd_install + ["--force"] + downgrades[:500] downgrades = downgrades[500:] @@ -199,7 +199,7 @@ index 6b19c65db3..c2452d6dec 100644 _clean_cache() new = ( -@@ -1735,7 +1795,8 @@ def upgrade( +@@ -1726,7 +1786,8 @@ def upgrade( dryrun=False, dist_upgrade=False, fromrepo=None, @@ -209,7 +209,7 @@ index 6b19c65db3..c2452d6dec 100644 skip_verify=False, no_recommends=False, root=None, -@@ -1774,7 +1835,11 @@ def upgrade( +@@ -1765,7 +1826,11 @@ def upgrade( Specify a list of package repositories to upgrade from. Default: None novendorchange @@ -222,7 +222,7 @@ index 6b19c65db3..c2452d6dec 100644 skip_verify Skip the GPG verification check (e.g., ``--no-gpg-checks``) -@@ -1821,31 +1886,21 @@ def upgrade( +@@ -1812,31 +1877,21 @@ def upgrade( cmd_update.extend(["--from" if dist_upgrade else "--repo", repo]) log.info("Targeting repos: %s", fromrepo) @@ -267,10 +267,10 @@ index 6b19c65db3..c2452d6dec 100644 new = list_pkgs(root=root) ret = salt.utils.data.compare_dicts(old, new) diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py -index 671adc2779..39f28f2198 100644 +index 0ba5595d65..78fe226914 100644 --- a/tests/unit/modules/test_zypperpkg.py +++ b/tests/unit/modules/test_zypperpkg.py -@@ -135,6 +135,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): +@@ -137,6 +137,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): stdout_xml_snippet = '' sniffer = RunSniffer(stdout=stdout_xml_snippet) @@ -278,7 +278,7 @@ index 671adc2779..39f28f2198 100644 with patch.dict("salt.modules.zypperpkg.__salt__", {"cmd.run_all": sniffer}): self.assertEqual(zypper.__zypper__.call("foo"), stdout_xml_snippet) self.assertEqual(len(sniffer.calls), 1) -@@ -609,13 +610,373 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): +@@ -592,13 +593,373 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): {"vim": "7.4.326-2.62", "fakepkg": ""}, ) @@ -653,7 +653,7 @@ index 671adc2779..39f28f2198 100644 "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True) ), patch( "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False) -@@ -654,16 +1015,6 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): +@@ -637,16 +998,6 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): self.assertDictEqual(ret, {"vim": {"old": "1.1", "new": "1.1,1.2"}}) zypper_mock.assert_any_call("update", "--auto-agree-with-licenses") @@ -670,7 +670,7 @@ index 671adc2779..39f28f2198 100644 with patch( "salt.modules.zypperpkg.list_pkgs", MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]), -@@ -679,6 +1030,22 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): +@@ -662,6 +1013,22 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): "--debug-solver", ) @@ -693,7 +693,7 @@ index 671adc2779..39f28f2198 100644 with patch( "salt.modules.zypperpkg.list_pkgs", MagicMock(side_effect=[{"vim": "1.1"}, {"vim": "1.1"}]), -@@ -697,7 +1064,6 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): +@@ -680,7 +1047,6 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): "Dummy", "--from", "Dummy2", @@ -701,7 +701,7 @@ index 671adc2779..39f28f2198 100644 ) zypper_mock.assert_any_call( "dist-upgrade", -@@ -707,7 +1073,6 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): +@@ -690,7 +1056,6 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): "Dummy", "--from", "Dummy2", @@ -709,7 +709,7 @@ index 671adc2779..39f28f2198 100644 "--debug-solver", ) -@@ -727,33 +1092,13 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): +@@ -710,33 +1075,13 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): "Dummy2", ) @@ -744,7 +744,7 @@ index 671adc2779..39f28f2198 100644 "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True) ), patch( "salt.modules.zypperpkg._systemd_scope", MagicMock(return_value=False) -@@ -812,12 +1157,13 @@ Repository 'DUMMY' not found by its alias, number, or URI. +@@ -795,12 +1140,13 @@ Repository 'DUMMY' not found by its alias, number, or URI. self.pid = 1234 self.exit_code = 555 self.noraise = MagicMock() @@ -759,7 +759,7 @@ index 671adc2779..39f28f2198 100644 "salt.modules.zypperpkg.__zypper__", FailingZypperDummy() ) as zypper_mock, patch( "salt.modules.zypperpkg.refresh_db", MagicMock(return_value=True) -@@ -834,7 +1180,7 @@ Repository 'DUMMY' not found by its alias, number, or URI. +@@ -817,7 +1163,7 @@ Repository 'DUMMY' not found by its alias, number, or URI. self.assertEqual(cmd_exc.exception.info["changes"], {}) self.assertEqual(cmd_exc.exception.info["result"]["stdout"], zypper_out) zypper_mock.noraise.call.assert_called_with( @@ -769,6 +769,6 @@ index 671adc2779..39f28f2198 100644 def test_upgrade_available(self): -- -2.37.3 +2.33.0 diff --git a/async-batch-implementation.patch b/async-batch-implementation.patch index 434ef94..9cebed7 100644 --- a/async-batch-implementation.patch +++ b/async-batch-implementation.patch @@ -1,4 +1,4 @@ -From f16fb8885eeddad179be9e2290d1523cb8f82641 Mon Sep 17 00:00:00 2001 +From c25ee8158000770cb667b914de62f802467c204e Mon Sep 17 00:00:00 2001 From: Mihai Dinca Date: Fri, 16 Nov 2018 17:05:29 +0100 Subject: [PATCH] Async batch implementation @@ -71,21 +71,20 @@ Async batch implementation fix (#320) Remove deprecated usage of NO_MOCK and NO_MOCK_REASON --- - salt/auth/__init__.py | 2 + - salt/cli/batch.py | 109 ++++++-- - salt/cli/batch_async.py | 315 +++++++++++++++++++++ - salt/cli/support/profiles/__init__.py | 5 +- - salt/client/__init__.py | 51 +--- - salt/master.py | 20 ++ - salt/transport/ipc.py | 9 +- - salt/utils/event.py | 8 +- - tests/unit/cli/test_batch_async.py | 386 ++++++++++++++++++++++++++ - 9 files changed, 841 insertions(+), 64 deletions(-) + salt/auth/__init__.py | 2 + + salt/cli/batch.py | 109 ++++++-- + salt/cli/batch_async.py | 315 +++++++++++++++++++++++ + salt/client/__init__.py | 51 ++-- + salt/master.py | 20 ++ + salt/transport/ipc.py | 9 +- + salt/utils/event.py | 8 +- + tests/unit/cli/test_batch_async.py | 386 +++++++++++++++++++++++++++++ + 8 files changed, 839 insertions(+), 61 deletions(-) create mode 100644 salt/cli/batch_async.py create mode 100644 tests/unit/cli/test_batch_async.py diff --git a/salt/auth/__init__.py b/salt/auth/__init__.py -index 0c64755235..8dc096d9c5 100644 +index 3b73c2ec08..6f300fe7c4 100644 --- a/salt/auth/__init__.py +++ b/salt/auth/__init__.py @@ -49,6 +49,8 @@ AUTH_INTERNAL_KEYWORDS = frozenset( @@ -98,7 +97,7 @@ index 0c64755235..8dc096d9c5 100644 ) diff --git a/salt/cli/batch.py b/salt/cli/batch.py -index 8e1547c61d..fcd3f571d5 100644 +index 2a692e13f8..828a1ded5b 100644 --- a/salt/cli/batch.py +++ b/salt/cli/batch.py @@ -13,9 +13,88 @@ import salt.exceptions @@ -569,22 +568,11 @@ index 0000000000..09aa85258b + self.event = None + self.ioloop = None + gc.collect() -diff --git a/salt/cli/support/profiles/__init__.py b/salt/cli/support/profiles/__init__.py -index b86aef30b8..4ae6d07b13 100644 ---- a/salt/cli/support/profiles/__init__.py -+++ b/salt/cli/support/profiles/__init__.py -@@ -1,4 +1,3 @@ --# coding=utf-8 --''' -+""" - Profiles for salt-support. --''' -+""" diff --git a/salt/client/__init__.py b/salt/client/__init__.py -index eaf156e060..2427516ca1 100644 +index 8ea8818d01..482d3ac7bd 100644 --- a/salt/client/__init__.py +++ b/salt/client/__init__.py -@@ -586,47 +586,23 @@ class LocalClient: +@@ -584,47 +584,23 @@ class LocalClient: {'dave': {...}} {'stewart': {...}} """ @@ -643,9 +631,9 @@ index eaf156e060..2427516ca1 100644 - if key not in opts: - opts[key] = val batch = salt.cli.batch.Batch(opts, eauth=eauth, quiet=True) - for ret, _ in batch.run(): + for ret in batch.run(): yield ret -@@ -1819,6 +1795,7 @@ class LocalClient: +@@ -1812,6 +1788,7 @@ class LocalClient: "key": self.key, "tgt_type": tgt_type, "ret": ret, @@ -654,18 +642,18 @@ index eaf156e060..2427516ca1 100644 } diff --git a/salt/master.py b/salt/master.py -index 9c06a52c1c..705a1bc2fb 100644 +index 37fe52159f..795aeef647 100644 --- a/salt/master.py +++ b/salt/master.py @@ -19,6 +19,7 @@ import time + import salt.acl import salt.auth - import salt.channel.server +import salt.cli.batch_async import salt.client import salt.client.ssh.client import salt.crypt -@@ -2145,6 +2146,22 @@ class ClearFuncs(TransportMethods): +@@ -2167,6 +2168,22 @@ class ClearFuncs(TransportMethods): return False return self.loadauth.get_tok(clear_load["token"]) @@ -688,7 +676,7 @@ index 9c06a52c1c..705a1bc2fb 100644 def publish(self, clear_load): """ This method sends out publications to the minions, it can only be used -@@ -2264,6 +2281,9 @@ class ClearFuncs(TransportMethods): +@@ -2284,6 +2301,9 @@ class ClearFuncs(TransportMethods): ), }, } @@ -699,10 +687,10 @@ index 9c06a52c1c..705a1bc2fb 100644 if jid is None: return {"enc": "clear", "load": {"error": "Master failed to assign jid"}} diff --git a/salt/transport/ipc.py b/salt/transport/ipc.py -index ca13a498e3..3a3f0c7a5f 100644 +index 29210d7522..3f430ba796 100644 --- a/salt/transport/ipc.py +++ b/salt/transport/ipc.py -@@ -659,6 +659,7 @@ class IPCMessageSubscriber(IPCClient): +@@ -650,6 +650,7 @@ class IPCMessageSubscriber(IPCClient): self._read_stream_future = None self._saved_data = [] self._read_in_progress = Lock() @@ -710,7 +698,7 @@ index ca13a498e3..3a3f0c7a5f 100644 @salt.ext.tornado.gen.coroutine def _read(self, timeout, callback=None): -@@ -764,8 +765,12 @@ class IPCMessageSubscriber(IPCClient): +@@ -749,8 +750,12 @@ class IPCMessageSubscriber(IPCClient): return self._saved_data.pop(0) return self.io_loop.run_sync(lambda: self._read(timeout)) @@ -724,7 +712,7 @@ index ca13a498e3..3a3f0c7a5f 100644 """ Asynchronously read messages and invoke a callback when they are ready. -@@ -783,7 +788,7 @@ class IPCMessageSubscriber(IPCClient): +@@ -768,7 +773,7 @@ class IPCMessageSubscriber(IPCClient): except Exception as exc: # pylint: disable=broad-except log.error("Exception occurred while Subscriber connecting: %s", exc) yield salt.ext.tornado.gen.sleep(1) @@ -734,10 +722,10 @@ index ca13a498e3..3a3f0c7a5f 100644 def close(self): """ diff --git a/salt/utils/event.py b/salt/utils/event.py -index d14b9ada40..e8d1aaa5f5 100644 +index 3c91daa2b4..fd23197747 100644 --- a/salt/utils/event.py +++ b/salt/utils/event.py -@@ -944,6 +944,10 @@ class SaltEvent: +@@ -920,6 +920,10 @@ class SaltEvent: # Minion fired a bad retcode, fire an event self._fire_ret_load_specific_fun(load) @@ -748,7 +736,7 @@ index d14b9ada40..e8d1aaa5f5 100644 def set_event_handler(self, event_handler): """ Invoke the event_handler callback each time an event arrives. -@@ -952,8 +956,10 @@ class SaltEvent: +@@ -928,8 +932,10 @@ class SaltEvent: if not self.cpub: self.connect_pub() @@ -1153,6 +1141,6 @@ index 0000000000..c0b708de76 + self.batch.schedule_next() + self.assertEqual(len(self.batch.event.io_loop.spawn_callback.mock_calls), 0) -- -2.37.3 +2.33.0 diff --git a/avoid-excessive-syslogging-by-watchdog-cronjob-58.patch b/avoid-excessive-syslogging-by-watchdog-cronjob-58.patch index 9284c2f..a5ad4b8 100644 --- a/avoid-excessive-syslogging-by-watchdog-cronjob-58.patch +++ b/avoid-excessive-syslogging-by-watchdog-cronjob-58.patch @@ -1,4 +1,4 @@ -From 100bf2d977c15fda21de7d1d5da2f2c61bed2afd Mon Sep 17 00:00:00 2001 +From 3c83bab3da101223c99af1f9ee2f3bf5e97be3f8 Mon Sep 17 00:00:00 2001 From: Hubert Mantel Date: Mon, 27 Nov 2017 13:55:13 +0100 Subject: [PATCH] avoid excessive syslogging by watchdog cronjob (#58) @@ -21,6 +21,6 @@ index 2e418094ed..73a91ebd62 100755 /usr/bin/salt-daemon-watcher --with-init & disown fi -- -2.37.3 +2.29.2 diff --git a/backport-syndic-auth-fixes.patch b/backport-syndic-auth-fixes.patch new file mode 100644 index 0000000..c04903b --- /dev/null +++ b/backport-syndic-auth-fixes.patch @@ -0,0 +1,355 @@ +From 54ab69e74beb83710d0bf6049039d13e260d5517 Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Tue, 13 Sep 2022 11:26:21 +0200 +Subject: [PATCH] Backport Syndic auth fixes + +[3004.2] Syndic Fixes + +(cherry picked from commit 643bd4b572ca97466e085ecd1d84da45b1684332) + +Co-authored-by: Megan Wilhite +--- + changelog/61868.fixed | 1 + + salt/transport/mixins/auth.py | 2 +- + salt/transport/tcp.py | 2 +- + salt/transport/zeromq.py | 2 +- + tests/pytests/unit/transport/test_tcp.py | 149 +++++++++++++++++++- + tests/pytests/unit/transport/test_zeromq.py | 73 +++++++++- + 6 files changed, 224 insertions(+), 5 deletions(-) + create mode 100644 changelog/61868.fixed + +diff --git a/changelog/61868.fixed b/changelog/61868.fixed +new file mode 100644 +index 0000000000..0169c48e99 +--- /dev/null ++++ b/changelog/61868.fixed +@@ -0,0 +1 @@ ++Make sure the correct key is being used when verifying or validating communication, eg. when a Salt syndic is involved use syndic_master.pub and when a Salt minion is involved use minion_master.pub. +diff --git a/salt/transport/mixins/auth.py b/salt/transport/mixins/auth.py +index 1e2e8e6b7b..e5c6a5345f 100644 +--- a/salt/transport/mixins/auth.py ++++ b/salt/transport/mixins/auth.py +@@ -43,7 +43,7 @@ class AESPubClientMixin: + ) + + # Verify that the signature is valid +- master_pubkey_path = os.path.join(self.opts["pki_dir"], "minion_master.pub") ++ master_pubkey_path = os.path.join(self.opts["pki_dir"], self.auth.mpub) + if not salt.crypt.verify_signature( + master_pubkey_path, payload["load"], payload.get("sig") + ): +diff --git a/salt/transport/tcp.py b/salt/transport/tcp.py +index f00b3c40eb..2821be82c7 100644 +--- a/salt/transport/tcp.py ++++ b/salt/transport/tcp.py +@@ -295,7 +295,7 @@ class AsyncTCPReqChannel(salt.transport.client.ReqChannel): + signed_msg = pcrypt.loads(ret[dictkey]) + + # Validate the master's signature. +- master_pubkey_path = os.path.join(self.opts["pki_dir"], "minion_master.pub") ++ master_pubkey_path = os.path.join(self.opts["pki_dir"], self.auth.mpub) + if not salt.crypt.verify_signature( + master_pubkey_path, signed_msg["data"], signed_msg["sig"] + ): +diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py +index aa06298ee1..8199378239 100644 +--- a/salt/transport/zeromq.py ++++ b/salt/transport/zeromq.py +@@ -255,7 +255,7 @@ class AsyncZeroMQReqChannel(salt.transport.client.ReqChannel): + signed_msg = pcrypt.loads(ret[dictkey]) + + # Validate the master's signature. +- master_pubkey_path = os.path.join(self.opts["pki_dir"], "minion_master.pub") ++ master_pubkey_path = os.path.join(self.opts["pki_dir"], self.auth.mpub) + if not salt.crypt.verify_signature( + master_pubkey_path, signed_msg["data"], signed_msg["sig"] + ): +diff --git a/tests/pytests/unit/transport/test_tcp.py b/tests/pytests/unit/transport/test_tcp.py +index 3b6e175472..e41edcc37e 100644 +--- a/tests/pytests/unit/transport/test_tcp.py ++++ b/tests/pytests/unit/transport/test_tcp.py +@@ -1,13 +1,53 @@ + import contextlib ++import os + import socket + + import attr + import pytest + import salt.exceptions ++import salt.transport.mixins.auth + import salt.transport.tcp + from salt.ext.tornado import concurrent, gen, ioloop + from saltfactories.utils.ports import get_unused_localhost_port +-from tests.support.mock import MagicMock, patch ++from tests.support.mock import MagicMock, PropertyMock, create_autospec, patch ++ ++ ++@pytest.fixture ++def fake_keys(): ++ with patch("salt.crypt.AsyncAuth.get_keys", autospec=True): ++ yield ++ ++ ++@pytest.fixture ++def fake_crypto(): ++ with patch("salt.transport.tcp.PKCS1_OAEP", create=True) as fake_crypto: ++ yield fake_crypto ++ ++ ++@pytest.fixture ++def fake_authd(): ++ @salt.ext.tornado.gen.coroutine ++ def return_nothing(): ++ raise salt.ext.tornado.gen.Return() ++ ++ with patch( ++ "salt.crypt.AsyncAuth.authenticated", new_callable=PropertyMock ++ ) as mock_authed, patch( ++ "salt.crypt.AsyncAuth.authenticate", ++ autospec=True, ++ return_value=return_nothing(), ++ ), patch( ++ "salt.crypt.AsyncAuth.gen_token", autospec=True, return_value=42 ++ ): ++ mock_authed.return_value = False ++ yield ++ ++ ++@pytest.fixture ++def fake_crypticle(): ++ with patch("salt.crypt.Crypticle") as fake_crypticle: ++ fake_crypticle.generate_key_string.return_value = "fakey fake" ++ yield fake_crypticle + + + @pytest.fixture +@@ -405,3 +445,110 @@ def test_client_reconnect_backoff(client_socket): + client.io_loop.run_sync(client._connect) + finally: + client.close() ++ ++ ++async def test_when_async_req_channel_with_syndic_role_should_use_syndic_master_pub_file_to_verify_master_sig( ++ fake_keys, fake_crypto, fake_crypticle ++): ++ # Syndics use the minion pki dir, but they also create a syndic_master.pub ++ # file for comms with the Salt master ++ expected_pubkey_path = os.path.join("/etc/salt/pki/minion", "syndic_master.pub") ++ fake_crypto.new.return_value.decrypt.return_value = "decrypted_return_value" ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": "/etc/salt/pki/minion", ++ "id": "syndic", ++ "__role": "syndic", ++ "keysize": 4096, ++ } ++ client = salt.transport.tcp.AsyncTCPReqChannel(opts, io_loop=mockloop) ++ ++ dictkey = "pillar" ++ target = "minion" ++ ++ # Mock auth and message client. ++ client.auth._authenticate_future = MagicMock() ++ client.auth._authenticate_future.done.return_value = True ++ client.auth._authenticate_future.exception.return_value = None ++ client.auth._crypticle = MagicMock() ++ client.message_client = create_autospec(client.message_client) ++ ++ @salt.ext.tornado.gen.coroutine ++ def mocksend(msg, timeout=60, tries=3): ++ raise salt.ext.tornado.gen.Return({"pillar": "data", "key": "value"}) ++ ++ client.message_client.send = mocksend ++ ++ # Note the 'ver' value in 'load' does not represent the the 'version' sent ++ # in the top level of the transport's message. ++ load = { ++ "id": target, ++ "grains": {}, ++ "saltenv": "base", ++ "pillarenv": "base", ++ "pillar_override": True, ++ "extra_minion_data": {}, ++ "ver": "2", ++ "cmd": "_pillar", ++ } ++ fake_nonce = 42 ++ with patch( ++ "salt.crypt.verify_signature", autospec=True, return_value=True ++ ) as fake_verify, patch( ++ "salt.payload.loads", ++ autospec=True, ++ return_value={"key": "value", "nonce": fake_nonce, "pillar": "data"}, ++ ), patch( ++ "uuid.uuid4", autospec=True ++ ) as fake_uuid: ++ fake_uuid.return_value.hex = fake_nonce ++ ret = await client.crypted_transfer_decode_dictentry( ++ load, ++ dictkey="pillar", ++ ) ++ ++ assert fake_verify.mock_calls[0].args[0] == expected_pubkey_path ++ ++ ++async def test_mixin_should_use_correct_path_when_syndic( ++ fake_keys, fake_authd, fake_crypticle ++): ++ mockloop = MagicMock() ++ expected_pubkey_path = os.path.join("/etc/salt/pki/minion", "syndic_master.pub") ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": "/etc/salt/pki/minion", ++ "id": "syndic", ++ "__role": "syndic", ++ "keysize": 4096, ++ "sign_pub_messages": True, ++ } ++ ++ with patch( ++ "salt.crypt.verify_signature", autospec=True, return_value=True ++ ) as fake_verify, patch( ++ "salt.utils.msgpack.loads", ++ autospec=True, ++ return_value={"enc": "aes", "load": "", "sig": "fake_signature"}, ++ ): ++ client = salt.transport.tcp.AsyncTCPPubChannel(opts, io_loop=mockloop) ++ client.message_client = MagicMock() ++ client.message_client.on_recv.side_effect = lambda x: x(b"some_data") ++ await client.connect() ++ client.auth._crypticle = fake_crypticle ++ ++ @client.on_recv ++ def test_recv_function(*args, **kwargs): ++ ... ++ ++ await test_recv_function ++ assert fake_verify.mock_calls[0].args[0] == expected_pubkey_path +diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py +index 1f0515c91a..c3093f4b19 100644 +--- a/tests/pytests/unit/transport/test_zeromq.py ++++ b/tests/pytests/unit/transport/test_zeromq.py +@@ -23,7 +23,7 @@ import salt.utils.process + import salt.utils.stringutils + from salt.master import SMaster + from salt.transport.zeromq import AsyncReqMessageClientPool +-from tests.support.mock import MagicMock, patch ++from tests.support.mock import MagicMock, create_autospec, patch + + try: + from M2Crypto import RSA +@@ -608,6 +608,7 @@ async def test_req_chan_decode_data_dict_entry_v2(pki_dir): + auth = client.auth + auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) + client.auth = MagicMock() ++ client.auth.mpub = auth.mpub + client.auth.authenticated = True + client.auth.get_keys = auth.get_keys + client.auth.crypticle.dumps = auth.crypticle.dumps +@@ -672,6 +673,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_nonce(pki_dir): + auth = client.auth + auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) + client.auth = MagicMock() ++ client.auth.mpub = auth.mpub + client.auth.authenticated = True + client.auth.get_keys = auth.get_keys + client.auth.crypticle.dumps = auth.crypticle.dumps +@@ -735,6 +737,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_signature(pki_dir): + auth = client.auth + auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) + client.auth = MagicMock() ++ client.auth.mpub = auth.mpub + client.auth.authenticated = True + client.auth.get_keys = auth.get_keys + client.auth.crypticle.dumps = auth.crypticle.dumps +@@ -814,6 +817,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_key(pki_dir): + auth = client.auth + auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) + client.auth = MagicMock() ++ client.auth.mpub = auth.mpub + client.auth.authenticated = True + client.auth.get_keys = auth.get_keys + client.auth.crypticle.dumps = auth.crypticle.dumps +@@ -1273,3 +1277,70 @@ async def test_req_chan_auth_v2_new_minion_without_master_pub(pki_dir, io_loop): + assert "sig" in ret + ret = client.auth.handle_signin_response(signin_payload, ret) + assert ret == "retry" ++ ++ ++async def test_when_async_req_channel_with_syndic_role_should_use_syndic_master_pub_file_to_verify_master_sig( ++ pki_dir, ++): ++ # Syndics use the minion pki dir, but they also create a syndic_master.pub ++ # file for comms with the Salt master ++ expected_pubkey_path = str(pki_dir.join("minion").join("syndic_master.pub")) ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "syndic", ++ "__role": "syndic", ++ "keysize": 4096, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ client = salt.transport.zeromq.AsyncZeroMQReqChannel(opts, io_loop=mockloop) ++ ++ dictkey = "pillar" ++ target = "minion" ++ pillar_data = {"pillar1": "data1"} ++ ++ # Mock auth and message client. ++ client.auth._authenticate_future = MagicMock() ++ client.auth._authenticate_future.done.return_value = True ++ client.auth._authenticate_future.exception.return_value = None ++ client.auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) ++ client.message_client = create_autospec(client.message_client) ++ ++ @salt.ext.tornado.gen.coroutine ++ def mocksend(msg, timeout=60, tries=3): ++ client.message_client.msg = msg ++ load = client.auth.crypticle.loads(msg["load"]) ++ ret = server._encrypt_private( ++ pillar_data, dictkey, target, nonce=load["nonce"], sign_messages=True ++ ) ++ raise salt.ext.tornado.gen.Return(ret) ++ ++ client.message_client.send = mocksend ++ ++ # Note the 'ver' value in 'load' does not represent the the 'version' sent ++ # in the top level of the transport's message. ++ load = { ++ "id": target, ++ "grains": {}, ++ "saltenv": "base", ++ "pillarenv": "base", ++ "pillar_override": True, ++ "extra_minion_data": {}, ++ "ver": "2", ++ "cmd": "_pillar", ++ } ++ with patch( ++ "salt.crypt.verify_signature", autospec=True, return_value=True ++ ) as fake_verify: ++ ret = await client.crypted_transfer_decode_dictentry( ++ load, ++ dictkey="pillar", ++ ) ++ ++ assert fake_verify.mock_calls[0].args[0] == expected_pubkey_path +-- +2.37.3 + diff --git a/batch.py-avoid-exception-when-minion-does-not-respon.patch b/batch.py-avoid-exception-when-minion-does-not-respon.patch new file mode 100644 index 0000000..6471863 --- /dev/null +++ b/batch.py-avoid-exception-when-minion-does-not-respon.patch @@ -0,0 +1,46 @@ +From 03f0aa44f6963e09a92dd3ea2090ef9ee463cb94 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Wed, 5 Jun 2019 15:15:04 +0100 +Subject: [PATCH] batch.py: avoid exception when minion does not + respond (bsc#1135507) +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +We have several issues reporting that salt is throwing exception when +the minion does not respond. This change avoid the exception adding a +default data to the minion when it fails to respond. This patch based +on the patch suggested by @roskens. + +Issues #46876 #48509 #50238 +bsc#1135507 + +Signed-off-by: José Guilherme Vanz +--- + salt/cli/batch.py | 8 ++++++++ + 1 file changed, 8 insertions(+) + +diff --git a/salt/cli/batch.py b/salt/cli/batch.py +index 2bc5444aef..6285a45434 100644 +--- a/salt/cli/batch.py ++++ b/salt/cli/batch.py +@@ -348,6 +348,14 @@ class Batch: + if self.opts.get("failhard") and data["retcode"] > 0: + failhard = True + ++ # avoid an exception if the minion does not respond. ++ if data.get("failed") is True: ++ log.debug("Minion %s failed to respond: data=%s", minion, data) ++ data = { ++ "ret": "Minion did not return. [Failed]", ++ "retcode": salt.defaults.exitcodes.EX_GENERIC, ++ } ++ + if self.opts.get("raw"): + ret[minion] = data + yield data +-- +2.29.2 + + diff --git a/bsc-1176024-fix-file-directory-user-and-group-owners.patch b/bsc-1176024-fix-file-directory-user-and-group-owners.patch index e2fad68..a6f6811 100644 --- a/bsc-1176024-fix-file-directory-user-and-group-owners.patch +++ b/bsc-1176024-fix-file-directory-user-and-group-owners.patch @@ -1,4 +1,4 @@ -From 58329533d8b3239d978c15ecb76934987880897f Mon Sep 17 00:00:00 2001 +From 60b8f6cdaab10a12973a074678608b86a34e23b7 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> Date: Tue, 6 Oct 2020 12:36:41 +0300 Subject: [PATCH] bsc#1176024: Fix file/directory user and group @@ -17,10 +17,10 @@ Co-authored-by: Victor Zhestkov 2 files changed, 27 insertions(+), 11 deletions(-) diff --git a/salt/modules/file.py b/salt/modules/file.py -index 95bd69a588..d475e3c2e3 100644 +index 989a7ad92d..b830b390d3 100644 --- a/salt/modules/file.py +++ b/salt/modules/file.py -@@ -247,7 +247,7 @@ def group_to_gid(group): +@@ -252,7 +252,7 @@ def group_to_gid(group): try: if isinstance(group, int): return group @@ -29,7 +29,7 @@ index 95bd69a588..d475e3c2e3 100644 except KeyError: return "" -@@ -338,7 +338,7 @@ def user_to_uid(user): +@@ -344,7 +344,7 @@ def user_to_uid(user): try: if isinstance(user, int): return user @@ -38,7 +38,7 @@ index 95bd69a588..d475e3c2e3 100644 except KeyError: return "" -@@ -5043,7 +5043,10 @@ def check_perms( +@@ -4977,7 +4977,10 @@ def check_perms( if ( salt.utils.platform.is_windows() and user_to_uid(user) != user_to_uid(perms["luser"]) @@ -50,7 +50,7 @@ index 95bd69a588..d475e3c2e3 100644 perms["cuser"] = user if group: -@@ -5052,7 +5055,10 @@ def check_perms( +@@ -4986,7 +4989,10 @@ def check_perms( if ( salt.utils.platform.is_windows() and group_to_gid(group) != group_to_gid(perms["lgroup"]) @@ -62,7 +62,7 @@ index 95bd69a588..d475e3c2e3 100644 perms["cgroup"] = group if "cuser" in perms or "cgroup" in perms: -@@ -5083,7 +5089,8 @@ def check_perms( +@@ -5017,7 +5023,8 @@ def check_perms( and user != "" ) or ( not salt.utils.platform.is_windows() @@ -72,7 +72,7 @@ index 95bd69a588..d475e3c2e3 100644 and user != "" ): if __opts__["test"] is True: -@@ -5101,18 +5108,19 @@ def check_perms( +@@ -5035,18 +5042,19 @@ def check_perms( salt.utils.platform.is_windows() and group_to_gid(group) != group_to_gid(get_group(name, follow_symlinks=follow_symlinks)) @@ -97,10 +97,10 @@ index 95bd69a588..d475e3c2e3 100644 # Mode changes if needed diff --git a/salt/states/file.py b/salt/states/file.py -index 9f33a8de23..50ceef1158 100644 +index 9e24e389d8..89c70eb454 100644 --- a/salt/states/file.py +++ b/salt/states/file.py -@@ -863,9 +863,17 @@ def _check_dir_meta(name, user, group, mode, follow_symlinks=False): +@@ -989,9 +989,17 @@ def _check_dir_meta(name, user, group, mode, follow_symlinks=False): if not stats: changes["directory"] = "new" return changes @@ -121,6 +121,6 @@ index 9f33a8de23..50ceef1158 100644 # Normalize the dir mode smode = salt.utils.files.normalize_mode(stats["mode"]) -- -2.37.3 +2.29.2 diff --git a/change-the-delimeters-to-prevent-possible-tracebacks.patch b/change-the-delimeters-to-prevent-possible-tracebacks.patch index 72ab82a..fba2d14 100644 --- a/change-the-delimeters-to-prevent-possible-tracebacks.patch +++ b/change-the-delimeters-to-prevent-possible-tracebacks.patch @@ -1,8 +1,8 @@ -From e011015f0eaa8e6453b57c208ab2a43a15824e36 Mon Sep 17 00:00:00 2001 +From e28385eb37932809a11ec81c81834a51e094f507 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Thu, 1 Sep 2022 14:42:24 +0300 -Subject: [PATCH] Change the delimeters to prevent possible tracebacks on - some packages with dpkg_lowpkg +Subject: [PATCH] Change the delimeters to prevent possible tracebacks + on some packages with dpkg_lowpkg * Use another separator on query to dpkg-query @@ -75,6 +75,6 @@ index d00fc46c66..a97519f489 100644 ), } -- -2.37.3 +2.37.2 diff --git a/check-if-dpkgnotify-is-executable-bsc-1186674-376.patch b/check-if-dpkgnotify-is-executable-bsc-1186674-376.patch new file mode 100644 index 0000000..3a69fe6 --- /dev/null +++ b/check-if-dpkgnotify-is-executable-bsc-1186674-376.patch @@ -0,0 +1,23 @@ +From b477b00447b49fc2f221cfb6d2c491bcd1970119 Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Tue, 1 Jun 2021 13:04:43 +0300 +Subject: [PATCH] Check if dpkgnotify is executable (bsc#1186674) + (#376) + +It prevents fails on removing salt-minion package +when the dpkg configuration is still active +--- + scripts/suse/dpkg/99dpkgnotify | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/scripts/suse/dpkg/99dpkgnotify b/scripts/suse/dpkg/99dpkgnotify +index 8013387a57..f89815f605 100644 +--- a/scripts/suse/dpkg/99dpkgnotify ++++ b/scripts/suse/dpkg/99dpkgnotify +@@ -1 +1 @@ +-DPkg::Post-Invoke {"/usr/bin/dpkgnotify";}; ++DPkg::Post-Invoke {"if [ -x /usr/bin/dpkgnotify ]; then /usr/bin/dpkgnotify; fi;";}; +-- +2.31.1 + + diff --git a/clarify-pkg.installed-pkg_verify-documentation.patch b/clarify-pkg.installed-pkg_verify-documentation.patch deleted file mode 100644 index 90f9a24..0000000 --- a/clarify-pkg.installed-pkg_verify-documentation.patch +++ /dev/null @@ -1,51 +0,0 @@ -From 5ed2295489fc13e48b981c323c846bde927cb800 Mon Sep 17 00:00:00 2001 -From: Alexander Graul -Date: Fri, 21 Oct 2022 14:39:21 +0200 -Subject: [PATCH] Clarify pkg.installed pkg_verify documentation - -There have been misunderstandings what the pkg_verify parameter does and -bug reports that it does not work, based on the wrong assumption that -this parameter changes the installation of new packages. The docstring -also stated that it was only provided by `yum`, but `zypper` also -provides this feature (actually it is `rpm` itself in both cases that -does the verification check) - -Related issue: https://github.com/saltstack/salt/issues/44878 - -(cherry picked from commit 2ed5f3c29d3b4313d904b7c081e5a29bf5e309c7) ---- - salt/states/pkg.py | 17 +++++++++-------- - 1 file changed, 9 insertions(+), 8 deletions(-) - -diff --git a/salt/states/pkg.py b/salt/states/pkg.py -index cda966a1e8..13532521d5 100644 ---- a/salt/states/pkg.py -+++ b/salt/states/pkg.py -@@ -1277,14 +1277,15 @@ def installed( - - .. versionadded:: 2014.7.0 - -- For requested packages that are already installed and would not be -- targeted for upgrade or downgrade, use pkg.verify to determine if any -- of the files installed by the package have been altered. If files have -- been altered, the reinstall option of pkg.install is used to force a -- reinstall. Types to ignore can be passed to pkg.verify. Additionally, -- ``verify_options`` can be used to modify further the behavior of -- pkg.verify. See examples below. Currently, this option is supported -- for the following pkg providers: :mod:`yumpkg `. -+ Use pkg.verify to check if already installed packages require -+ reinstallion. Requested packages that are already installed and not -+ targeted for up- or downgrade are verified with pkg.verify to determine -+ if any file installed by the package have been modified or if package -+ dependencies are not fulfilled. ``ignore_types`` and ``verify_options`` -+ can be passed to pkg.verify. See examples below. Currently, this option -+ is supported for the following pkg providers: -+ :mod:`yum `, -+ :mod:`zypperpkg `. - - Examples: - --- -2.37.3 - - diff --git a/debian-info_installed-compatibility-50453.patch b/debian-info_installed-compatibility-50453.patch index 7486718..2d1d26d 100644 --- a/debian-info_installed-compatibility-50453.patch +++ b/debian-info_installed-compatibility-50453.patch @@ -1,4 +1,4 @@ -From 8cd50907edeb4a1128681c30e52b2b30cf7f937d Mon Sep 17 00:00:00 2001 +From 7720401d74ed6eafe860aab297aee0c8e22bc00f Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 25 Jan 2022 17:08:57 +0100 Subject: [PATCH] Debian info_installed compatibility (#50453) @@ -61,10 +61,10 @@ https://github.com/openSUSE/salt/commit/d0ef24d113bdaaa29f180031b5da384cffe08c64 3 files changed, 166 insertions(+), 18 deletions(-) diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py -index 0cbd611b71..4a2281c47f 100644 +index 8d9f1b9f52..3c3fbf4970 100644 --- a/salt/modules/aptpkg.py +++ b/salt/modules/aptpkg.py -@@ -3458,6 +3458,15 @@ def info_installed(*names, **kwargs): +@@ -3035,6 +3035,15 @@ def info_installed(*names, **kwargs): .. versionadded:: 2016.11.3 @@ -80,7 +80,7 @@ index 0cbd611b71..4a2281c47f 100644 CLI Example: .. code-block:: bash -@@ -3468,11 +3477,19 @@ def info_installed(*names, **kwargs): +@@ -3045,11 +3054,19 @@ def info_installed(*names, **kwargs): """ kwargs = salt.utils.args.clean_kwargs(**kwargs) failhard = kwargs.pop("failhard", True) @@ -101,7 +101,7 @@ index 0cbd611b71..4a2281c47f 100644 t_nfo = dict() if pkg_nfo.get("status", "ii")[1] != "i": continue # return only packages that are really installed -@@ -3493,7 +3510,10 @@ def info_installed(*names, **kwargs): +@@ -3070,7 +3087,10 @@ def info_installed(*names, **kwargs): else: t_nfo[key] = value @@ -281,10 +281,10 @@ index 6a88573a8f..afbd619490 100644 return ret diff --git a/tests/pytests/unit/modules/test_aptpkg.py b/tests/pytests/unit/modules/test_aptpkg.py -index 6ef27e2d29..8e404a673c 100644 +index 6c5ed29848..51b7ffbe4d 100644 --- a/tests/pytests/unit/modules/test_aptpkg.py +++ b/tests/pytests/unit/modules/test_aptpkg.py -@@ -359,6 +359,58 @@ def test_info_installed(lowpkg_info_var): +@@ -336,6 +336,58 @@ def test_info_installed(lowpkg_info_var): assert len(aptpkg.info_installed()) == 1 @@ -344,6 +344,6 @@ index 6ef27e2d29..8e404a673c 100644 """ Test - Return the name of the package that owns the file. -- -2.37.3 +2.34.1 diff --git a/detect-module.run-syntax.patch b/detect-module.run-syntax.patch deleted file mode 100644 index e7f58ae..0000000 --- a/detect-module.run-syntax.patch +++ /dev/null @@ -1,28 +0,0 @@ -From dd147ab110e71ea0f1091923c9230ade01f226d4 Mon Sep 17 00:00:00 2001 -From: Victor Zhestkov -Date: Fri, 28 Oct 2022 13:19:23 +0300 -Subject: [PATCH] Detect module.run syntax - -* Detect module run syntax version - -* Update module.run docs and add changelog - -* Add test for module.run without any args - -Co-authored-by: Daniel A. Wozniak ---- - changelog/58763.fixed | 1 + - 1 file changed, 1 insertion(+) - create mode 100644 changelog/58763.fixed - -diff --git a/changelog/58763.fixed b/changelog/58763.fixed -new file mode 100644 -index 0000000000..53ee8304c0 ---- /dev/null -+++ b/changelog/58763.fixed -@@ -0,0 +1 @@ -+Detect new and legacy styles of calling module.run and support them both. --- -2.37.3 - - diff --git a/dnfnotify-pkgset-plugin-implementation-3002.2-450.patch b/dnfnotify-pkgset-plugin-implementation-3002.2-450.patch index 44dcdc4..a94cafb 100644 --- a/dnfnotify-pkgset-plugin-implementation-3002.2-450.patch +++ b/dnfnotify-pkgset-plugin-implementation-3002.2-450.patch @@ -1,4 +1,4 @@ -From e7bc5c7fc89877e9cbf203d8fb70855df0b626e1 Mon Sep 17 00:00:00 2001 +From b1c213f171538890b3b61def25e4777bccfa64fe Mon Sep 17 00:00:00 2001 From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> Date: Mon, 8 Nov 2021 18:09:53 +0300 Subject: [PATCH] dnfnotify pkgset plugin implementation - 3002.2 (#450) @@ -125,6 +125,6 @@ index 0000000000..6e9df85f71 + digest.update(buff) + return digest.hexdigest() -- -2.37.3 +2.33.1 diff --git a/do-not-crash-when-unexpected-cmd-output-at-listing-p.patch b/do-not-crash-when-unexpected-cmd-output-at-listing-p.patch new file mode 100644 index 0000000..b75a5dc --- /dev/null +++ b/do-not-crash-when-unexpected-cmd-output-at-listing-p.patch @@ -0,0 +1,79 @@ +From b151f2c1c6b6599b6387ec6e2d32a56e031e3d48 Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Tue, 18 Jan 2022 19:12:25 +0100 +Subject: [PATCH] Do not crash when unexpected cmd output at listing + patches (bsc#1181290) + +Add unit tests to cover unexpected output when listing patches +--- + tests/pytests/unit/modules/test_yumpkg.py | 53 +++++++++++++++++++++++ + 1 file changed, 53 insertions(+) + +diff --git a/tests/pytests/unit/modules/test_yumpkg.py b/tests/pytests/unit/modules/test_yumpkg.py +index 475e1d6094..3b35272550 100644 +--- a/tests/pytests/unit/modules/test_yumpkg.py ++++ b/tests/pytests/unit/modules/test_yumpkg.py +@@ -433,6 +433,59 @@ def test_list_patches(): + assert _patch in patches["my-fake-patch-installed-1234"]["summary"] + + ++def test_list_patches_with_unexpected_output(): ++ """ ++ Test patches listin with unexpected output from updateinfo list ++ ++ :return: ++ """ ++ yum_out = [ ++ "Update notice RHBA-2014:0722 (from rhel7-dev-rhel7-rpm-x86_64) is broken, or a bad duplicate, skipping.", ++ "You should report this problem to the owner of the rhel7-dev-rhel7-rpm-x86_64 repository.", ++ 'To help pinpoint the issue, please attach the output of "yum updateinfo --verbose" to the report.', ++ "Update notice RHSA-2014:1971 (from rhel7-dev-rhel7-rpm-x86_64) is broken, or a bad duplicate, skipping.", ++ "Update notice RHSA-2015:1981 (from rhel7-dev-rhel7-rpm-x86_64) is broken, or a bad duplicate, skipping.", ++ "Update notice RHSA-2015:0067 (from rhel7-dev-rhel7-rpm-x86_64) is broken, or a bad duplicate, skipping", ++ "i my-fake-patch-not-installed-1234 recommended spacewalk-usix-2.7.5.2-2.2.noarch", ++ " my-fake-patch-not-installed-1234 recommended spacewalksd-5.0.26.2-21.2.x86_64", ++ "i my-fake-patch-not-installed-1234 recommended suseRegisterInfo-3.1.1-18.2.x86_64", ++ "i my-fake-patch-installed-1234 recommended my-package-one-1.1-0.1.x86_64", ++ "i my-fake-patch-installed-1234 recommended my-package-two-1.1-0.1.x86_64", ++ ] ++ ++ expected_patches = { ++ "my-fake-patch-not-installed-1234": { ++ "installed": False, ++ "summary": [ ++ "spacewalk-usix-2.7.5.2-2.2.noarch", ++ "spacewalksd-5.0.26.2-21.2.x86_64", ++ "suseRegisterInfo-3.1.1-18.2.x86_64", ++ ], ++ }, ++ "my-fake-patch-installed-1234": { ++ "installed": True, ++ "summary": [ ++ "my-package-one-1.1-0.1.x86_64", ++ "my-package-two-1.1-0.1.x86_64", ++ ], ++ }, ++ } ++ ++ with patch.dict(yumpkg.__grains__, {"osarch": "x86_64"}), patch.dict( ++ yumpkg.__salt__, ++ {"cmd.run_stdout": MagicMock(return_value=os.linesep.join(yum_out))}, ++ ): ++ patches = yumpkg.list_patches() ++ assert not patches["my-fake-patch-not-installed-1234"]["installed"] ++ assert len(patches["my-fake-patch-not-installed-1234"]["summary"]) == 3 ++ for _patch in expected_patches["my-fake-patch-not-installed-1234"]["summary"]: ++ assert _patch in patches["my-fake-patch-not-installed-1234"]["summary"] ++ assert patches["my-fake-patch-installed-1234"]["installed"] ++ assert len(patches["my-fake-patch-installed-1234"]["summary"]) == 2 ++ for _patch in expected_patches["my-fake-patch-installed-1234"]["summary"]: ++ assert _patch in patches["my-fake-patch-installed-1234"]["summary"] ++ ++ + def test_latest_version_with_options(): + with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})): + +-- +2.34.1 + + diff --git a/do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch b/do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch index 5a6592f..63e7b3c 100644 --- a/do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch +++ b/do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch @@ -1,4 +1,4 @@ -From d97cbae7eb3cb0030d355a4ae3fb35745fed5da0 Mon Sep 17 00:00:00 2001 +From e0b91c626c10b29d328fa92415393cd57bb4c962 Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Fri, 21 Sep 2018 17:31:39 +0200 Subject: [PATCH] Do not load pip state if there is no 3rd party @@ -10,7 +10,7 @@ Safe import 3rd party dependency 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/salt/modules/pip.py b/salt/modules/pip.py -index 7135a9145f..da26416662 100644 +index f68cafaeaf..14cfafed4b 100644 --- a/salt/modules/pip.py +++ b/salt/modules/pip.py @@ -96,6 +96,12 @@ import salt.utils.url @@ -26,7 +26,7 @@ index 7135a9145f..da26416662 100644 # This needs to be named logger so we don't shadow it in pip.install logger = logging.getLogger(__name__) # pylint: disable=invalid-name -@@ -114,7 +120,12 @@ def __virtual__(): +@@ -113,7 +119,12 @@ def __virtual__(): entire filesystem. If it's not installed in a conventional location, the user is required to provide the location of pip each time it is used. """ @@ -41,6 +41,6 @@ index 7135a9145f..da26416662 100644 def _pip_bin_env(cwd, bin_env): -- -2.37.3 +2.33.0 diff --git a/don-t-use-shell-sbin-nologin-in-requisites.patch b/don-t-use-shell-sbin-nologin-in-requisites.patch index 2d734f0..f773381 100644 --- a/don-t-use-shell-sbin-nologin-in-requisites.patch +++ b/don-t-use-shell-sbin-nologin-in-requisites.patch @@ -1,4 +1,4 @@ -From 009f51315366827653011d2e9b80aa88416a8bf0 Mon Sep 17 00:00:00 2001 +From 9a8ca020a3cacbcfbbc33f209cd0ea6c3da3f788 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 17 Aug 2021 11:52:00 +0200 Subject: [PATCH] Don't use shell="/sbin/nologin" in requisites @@ -13,10 +13,10 @@ Fixes: bsc#1188259 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/salt/state.py b/salt/state.py -index 2385975b42..db228228a7 100644 +index 64c5225728..c6742101b2 100644 --- a/salt/state.py +++ b/salt/state.py -@@ -921,9 +921,14 @@ class State: +@@ -889,9 +889,14 @@ class State: cmd_opts[run_cmd_arg] = low_data.get(run_cmd_arg) if "shell" in low_data: @@ -34,6 +34,6 @@ index 2385975b42..db228228a7 100644 if "onlyif" in low_data: _ret = self._run_check_onlyif(low_data, cmd_opts) -- -2.37.3 +2.32.0 diff --git a/drop-serial-from-event.unpack-in-cli.batch_async.patch b/drop-serial-from-event.unpack-in-cli.batch_async.patch index d92ab44..c50c5e7 100644 --- a/drop-serial-from-event.unpack-in-cli.batch_async.patch +++ b/drop-serial-from-event.unpack-in-cli.batch_async.patch @@ -1,4 +1,4 @@ -From d4c70bdcb8d871bf3a7e15299b69b9687b7d0d94 Mon Sep 17 00:00:00 2001 +From f6ad8b59662333327b04aa8f6465f6f3bceaa152 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Mon, 31 Jan 2022 10:24:26 +0100 Subject: [PATCH] Drop serial from event.unpack in cli.batch_async @@ -29,6 +29,6 @@ index 09aa85258b..1012ce37cc 100644 if mtag.startswith(pattern[:-1]): minion = data["id"] -- -2.37.3 +2.34.1 diff --git a/early-feature-support-config.patch b/early-feature-support-config.patch index b7f509f..f1d422b 100644 --- a/early-feature-support-config.patch +++ b/early-feature-support-config.patch @@ -1,4 +1,4 @@ -From 3ccce128163f6cd9a9360d3b28729702a5d260c1 Mon Sep 17 00:00:00 2001 +From f24c61d3c1ede64c0ef5c11efeb7d2293e714550 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 18 Jan 2022 16:40:45 +0100 Subject: [PATCH] early feature: support-config @@ -561,10 +561,10 @@ Check last function by full name create mode 100644 tests/unit/modules/test_saltsupport.py diff --git a/doc/ref/modules/all/index.rst b/doc/ref/modules/all/index.rst -index fef851215a..a36ab6b2e9 100644 +index 43dfb700f9..73958181dd 100644 --- a/doc/ref/modules/all/index.rst +++ b/doc/ref/modules/all/index.rst -@@ -416,6 +416,7 @@ execution modules +@@ -415,6 +415,7 @@ execution modules salt_version saltcheck saltcloudmod @@ -573,10 +573,10 @@ index fef851215a..a36ab6b2e9 100644 schedule scp_mod diff --git a/doc/ref/states/all/index.rst b/doc/ref/states/all/index.rst -index da7be43039..c968315970 100644 +index 914b63d0fb..e40b32a6e8 100644 --- a/doc/ref/states/all/index.rst +++ b/doc/ref/states/all/index.rst -@@ -282,6 +282,7 @@ state modules +@@ -280,6 +280,7 @@ state modules rvm salt_proxy saltmod @@ -1740,13 +1740,13 @@ index 0000000000..391acdb606 + info: List of all available groups + output: table diff --git a/salt/loader/lazy.py b/salt/loader/lazy.py -index 8a5d0dd267..e7d692859c 100644 +index 48c70d01c0..220641059c 100644 --- a/salt/loader/lazy.py +++ b/salt/loader/lazy.py -@@ -968,8 +968,10 @@ class LazyLoader(salt.utils.lazy.LazyDict): +@@ -950,8 +950,10 @@ class LazyLoader(salt.utils.lazy.LazyDict): mod_names = [module_name] + list(virtual_aliases) - for attr in funcs_to_load: + for attr in getattr(mod, "__load__", dir(mod)): - if attr.startswith("_"): - # private functions are skipped + if attr.startswith("_") and attr != "__call__": @@ -2168,10 +2168,10 @@ index 0000000000..e800e3bf1f + + return __virtualname__ diff --git a/salt/scripts.py b/salt/scripts.py -index 7f6d80de59..1276d2c8b2 100644 +index 93eab0f702..b1fea566a9 100644 --- a/salt/scripts.py +++ b/salt/scripts.py -@@ -583,3 +583,18 @@ def salt_unity(): +@@ -574,3 +574,18 @@ def salt_unity(): sys.argv.pop(1) s_fun = getattr(sys.modules[__name__], "salt_{}".format(cmd)) s_fun() @@ -2191,10 +2191,10 @@ index 7f6d80de59..1276d2c8b2 100644 + _install_signal_handlers(client) + client.run() diff --git a/salt/state.py b/salt/state.py -index d6d2c90168..3196f3c635 100644 +index 91927d9ec6..fa5a578dc6 100644 --- a/salt/state.py +++ b/salt/state.py -@@ -1592,7 +1592,9 @@ class State: +@@ -1577,7 +1577,9 @@ class State: names = [] if state.startswith("__"): continue @@ -2205,7 +2205,7 @@ index d6d2c90168..3196f3c635 100644 if orchestration_jid is not None: chunk["__orchestration_jid__"] = orchestration_jid if "__sls__" in body: -@@ -2273,9 +2275,16 @@ class State: +@@ -2176,9 +2178,16 @@ class State: ret = self.call_parallel(cdata, low) else: self.format_slots(cdata) @@ -2225,7 +2225,7 @@ index d6d2c90168..3196f3c635 100644 self.states.inject_globals = {} if ( "check_cmd" in low -@@ -3362,10 +3371,31 @@ class State: +@@ -3252,10 +3261,31 @@ class State: running.update(errors) return running @@ -2489,7 +2489,7 @@ index 0000000000..fb0c9e0372 + """ + return __virtualname__ diff --git a/salt/utils/args.py b/salt/utils/args.py -index 536aea3816..04a8a14054 100644 +index ba50aff126..4e5ca0eedf 100644 --- a/salt/utils/args.py +++ b/salt/utils/args.py @@ -15,6 +15,7 @@ import salt.utils.jid @@ -2500,7 +2500,7 @@ index 536aea3816..04a8a14054 100644 log = logging.getLogger(__name__) -@@ -399,7 +400,7 @@ def format_call( +@@ -437,7 +438,7 @@ def format_call( ret = initial_ret is not None and initial_ret or {} ret["args"] = [] @@ -2510,10 +2510,10 @@ index 536aea3816..04a8a14054 100644 aspec = get_function_argspec(fun, is_class_method=is_class_method) diff --git a/salt/utils/decorators/__init__.py b/salt/utils/decorators/__init__.py -index 1f62d5f3d6..1906cc2ecc 100644 +index 20803771ed..0aba77e194 100644 --- a/salt/utils/decorators/__init__.py +++ b/salt/utils/decorators/__init__.py -@@ -866,3 +866,27 @@ def ensure_unicode_args(function): +@@ -867,3 +867,27 @@ def ensure_unicode_args(function): return function(*args, **kwargs) return wrapped @@ -2542,7 +2542,7 @@ index 1f62d5f3d6..1906cc2ecc 100644 + + return f diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py -index 911b2cbb04..dc125de7d7 100644 +index 28660397d4..c0820e5df0 100644 --- a/salt/utils/parsers.py +++ b/salt/utils/parsers.py @@ -17,6 +17,7 @@ import optparse @@ -2561,7 +2561,7 @@ index 911b2cbb04..dc125de7d7 100644 import salt.utils.platform import salt.utils.process import salt.utils.stringutils -@@ -2026,6 +2028,118 @@ class SyndicOptionParser( +@@ -2088,6 +2090,118 @@ class SyndicOptionParser( return opts @@ -2698,25 +2698,25 @@ index 0000000000..4e0e79f3ea +if __name__ == "__main__": + salt_support() diff --git a/setup.py b/setup.py -index bd11ff95f7..d633af35ec 100755 +index af8e448007..2f6dfd6064 100755 --- a/setup.py +++ b/setup.py -@@ -1165,6 +1165,7 @@ class SaltDistribution(distutils.dist.Distribution): +@@ -1253,6 +1253,7 @@ class SaltDistribution(distutils.dist.Distribution): "scripts/salt-master", "scripts/salt-minion", "scripts/salt-proxy", + "scripts/salt-support", "scripts/salt-ssh", "scripts/salt-syndic", - "scripts/spm", -@@ -1216,6 +1217,7 @@ class SaltDistribution(distutils.dist.Distribution): + "scripts/salt-unity", +@@ -1299,6 +1300,7 @@ class SaltDistribution(distutils.dist.Distribution): "salt-key = salt.scripts:salt_key", "salt-master = salt.scripts:salt_master", "salt-minion = salt.scripts:salt_minion", + "salt-support = salt.scripts:salt_support", "salt-ssh = salt.scripts:salt_ssh", "salt-syndic = salt.scripts:salt_syndic", - "spm = salt.scripts:salt_spm", + "salt-unity = salt.scripts:salt_unity", diff --git a/tests/unit/cli/test_support.py b/tests/unit/cli/test_support.py new file mode 100644 index 0000000000..dc0e99bb3d @@ -3779,6 +3779,6 @@ index 0000000000..f9ce7be29a + "00:00:00.000 - The real TTYs became " "pseudo TTYs and vice versa" + ] -- -2.37.3 +2.34.1 diff --git a/enable-passing-a-unix_socket-for-mysql-returners-bsc.patch b/enable-passing-a-unix_socket-for-mysql-returners-bsc.patch index c81b44d..f05d98d 100644 --- a/enable-passing-a-unix_socket-for-mysql-returners-bsc.patch +++ b/enable-passing-a-unix_socket-for-mysql-returners-bsc.patch @@ -1,4 +1,4 @@ -From 465ea094039e771ea2b4ccda012d0dc12f7aa022 Mon Sep 17 00:00:00 2001 +From c8f4092f117bd93293e0957422555d3ae7bae999 Mon Sep 17 00:00:00 2001 From: Maximilian Meister Date: Thu, 3 May 2018 15:52:23 +0200 Subject: [PATCH] enable passing a unix_socket for mysql returners @@ -63,6 +63,6 @@ index 6fd4fdef2c..7a7e9a3284 100644 try: -- -2.37.3 +2.34.1 diff --git a/enhance-logging-when-inotify-beacon-is-missing-pyino.patch b/enhance-logging-when-inotify-beacon-is-missing-pyino.patch new file mode 100644 index 0000000..7b81993 --- /dev/null +++ b/enhance-logging-when-inotify-beacon-is-missing-pyino.patch @@ -0,0 +1,30 @@ +From cde0f9385e1afb9fa97fe2c86cfa77ae3b899aa0 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Fri, 18 Jun 2021 13:09:22 +0100 +Subject: [PATCH] Enhance logging when inotify beacon is missing + pyinotify (bsc#1186310) + +--- + salt/beacons/inotify.py | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/salt/beacons/inotify.py b/salt/beacons/inotify.py +index fa2f73c35f..a6b7548f97 100644 +--- a/salt/beacons/inotify.py ++++ b/salt/beacons/inotify.py +@@ -49,7 +49,9 @@ log = logging.getLogger(__name__) + def __virtual__(): + if HAS_PYINOTIFY: + return __virtualname__ +- return False ++ err_msg = "pyinotify library is missing" ++ log.error("Unable to load inotify beacon: {}".format(err_msg)) ++ return False, err_msg + + + def _get_mask(mask): +-- +2.31.1 + + diff --git a/enhance-openscap-module-add-xccdf_eval-call-386.patch b/enhance-openscap-module-add-xccdf_eval-call-386.patch index def9667..b899270 100644 --- a/enhance-openscap-module-add-xccdf_eval-call-386.patch +++ b/enhance-openscap-module-add-xccdf_eval-call-386.patch @@ -1,4 +1,4 @@ -From 93d7bdab18bdc657c8103a7b5f569458a97c8ca0 Mon Sep 17 00:00:00 2001 +From 933345d049a0207e730ca518dc5f016b0c05d761 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Wed, 7 Jul 2021 15:41:48 +0100 @@ -420,6 +420,6 @@ index 045c37f7c9..301c1869ec 100644 + }, + ) -- -2.37.3 +2.34.1 diff --git a/fix-62092-catch-zmq.error.zmqerror-to-set-hwm-for-zm.patch b/fix-62092-catch-zmq.error.zmqerror-to-set-hwm-for-zm.patch new file mode 100644 index 0000000..6c25a31 --- /dev/null +++ b/fix-62092-catch-zmq.error.zmqerror-to-set-hwm-for-zm.patch @@ -0,0 +1,35 @@ +From df474d3cc0a5f02591fea093f9efc324c6feef46 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Thu, 7 Jul 2022 11:38:09 +0100 +Subject: [PATCH] Fix #62092: Catch zmq.error.ZMQError to set HWM for + zmq >= 3 (#543) + +It looks like before release 23.0.0, when trying to access zmq.HWM it +was raising ``AttributeError``, which is now wrapped under pyzmq's own +``zmq.error.ZMQError``. +Simply caching that, should then set the HWM correctly for zmq >= 3 +and therefore fix #62092. + +Co-authored-by: Mircea Ulinic +--- + salt/transport/zeromq.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py +index 9e61b23255..aa06298ee1 100644 +--- a/salt/transport/zeromq.py ++++ b/salt/transport/zeromq.py +@@ -898,7 +898,7 @@ class ZeroMQPubServerChannel(salt.transport.server.PubServerChannel): + try: + pub_sock.setsockopt(zmq.HWM, self.opts.get("pub_hwm", 1000)) + # in zmq >= 3.0, there are separate send and receive HWM settings +- except AttributeError: ++ except (AttributeError, zmq.error.ZMQError): + # Set the High Water Marks. For more information on HWM, see: + # http://api.zeromq.org/4-1:zmq-setsockopt + pub_sock.setsockopt(zmq.SNDHWM, self.opts.get("pub_hwm", 1000)) +-- +2.36.1 + + diff --git a/fix-bsc-1065792.patch b/fix-bsc-1065792.patch index 7d3d817..19767e6 100644 --- a/fix-bsc-1065792.patch +++ b/fix-bsc-1065792.patch @@ -1,4 +1,4 @@ -From 559920223a010b70a4e469143b3390d8d3a7a4e2 Mon Sep 17 00:00:00 2001 +From 22fe4809712dbc59ba2d8c3c2045f531f81bc517 Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Thu, 14 Dec 2017 16:21:40 +0100 Subject: [PATCH] Fix bsc#1065792 @@ -8,7 +8,7 @@ Subject: [PATCH] Fix bsc#1065792 1 file changed, 1 insertion(+) diff --git a/salt/states/service.py b/salt/states/service.py -index 93c7c4fb07..0d8a4efa03 100644 +index 536e64a430..27595f7703 100644 --- a/salt/states/service.py +++ b/salt/states/service.py @@ -78,6 +78,7 @@ def __virtual__(): @@ -20,6 +20,6 @@ index 93c7c4fb07..0d8a4efa03 100644 return __virtualname__ else: -- -2.37.3 +2.33.0 diff --git a/fix-crash-when-calling-manage.not_alive-runners.patch b/fix-crash-when-calling-manage.not_alive-runners.patch new file mode 100644 index 0000000..f046128 --- /dev/null +++ b/fix-crash-when-calling-manage.not_alive-runners.patch @@ -0,0 +1,81 @@ +From 3ecb98a9bd7a8d35cff6d0a5f34b7fea96f89da7 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Fri, 8 Oct 2021 12:47:53 +0100 +Subject: [PATCH] Fix crash when calling manage.not_alive runners + +Fix unit tests for netlink_tool_remote_on + +Drop wrong test +--- + salt/utils/network.py | 1 + + tests/unit/utils/test_network.py | 17 ++++------------- + 2 files changed, 5 insertions(+), 13 deletions(-) + +diff --git a/salt/utils/network.py b/salt/utils/network.py +index 0dd20c5599..f0f5f1e8ce 100644 +--- a/salt/utils/network.py ++++ b/salt/utils/network.py +@@ -1701,6 +1701,7 @@ def _netlink_tool_remote_on(port, which_end): + elif "ESTAB" not in line: + continue + chunks = line.split() ++ local_host, local_port = chunks[3].rsplit(":", 1) + remote_host, remote_port = chunks[4].rsplit(":", 1) + + if which_end == "remote_port" and int(remote_port) != int(port): +diff --git a/tests/unit/utils/test_network.py b/tests/unit/utils/test_network.py +index 637d5e9811..3060aba0aa 100644 +--- a/tests/unit/utils/test_network.py ++++ b/tests/unit/utils/test_network.py +@@ -110,18 +110,14 @@ USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS + salt-master python2.781106 35 tcp4 127.0.0.1:61115 127.0.0.1:4506 + """ + +-NETLINK_SS = """ +-State Recv-Q Send-Q Local Address:Port Peer Address:Port +-ESTAB 0 0 127.0.0.1:56726 127.0.0.1:4505 +-ESTAB 0 0 ::ffff:1.2.3.4:5678 ::ffff:1.2.3.4:4505 +-""" +- + LINUX_NETLINK_SS_OUTPUT = """\ + State Recv-Q Send-Q Local Address:Port Peer Address:Port + TIME-WAIT 0 0 [::1]:8009 [::1]:40368 + LISTEN 0 128 127.0.0.1:5903 0.0.0.0:* + ESTAB 0 0 [::ffff:127.0.0.1]:4506 [::ffff:127.0.0.1]:32315 + ESTAB 0 0 192.168.122.1:4506 192.168.122.177:24545 ++ESTAB 0 0 127.0.0.1:56726 127.0.0.1:4505 ++ESTAB 0 0 ::ffff:1.2.3.4:5678 ::ffff:1.2.3.4:4505 + """ + + IPV4_SUBNETS = { +@@ -633,11 +629,11 @@ class NetworkTestCase(TestCase): + with patch( + "subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT + ): +- remotes = network._netlink_tool_remote_on("4506", "local") ++ remotes = network._netlink_tool_remote_on("4506", "local_port") + self.assertEqual(remotes, {"192.168.122.177", "::ffff:127.0.0.1"}) + + def test_netlink_tool_remote_on_b(self): +- with patch("subprocess.check_output", return_value=NETLINK_SS): ++ with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT): + remotes = network._netlink_tool_remote_on("4505", "remote_port") + self.assertEqual(remotes, {"127.0.0.1", "::ffff:1.2.3.4"}) + +@@ -1274,11 +1270,6 @@ class NetworkTestCase(TestCase): + ): + self.assertEqual(network.get_fqhostname(), host) + +- def test_netlink_tool_remote_on(self): +- with patch("subprocess.check_output", return_value=NETLINK_SS): +- remotes = network._netlink_tool_remote_on("4505", "remote") +- self.assertEqual(remotes, {"127.0.0.1", "::ffff:1.2.3.4"}) +- + def test_is_fqdn(self): + """ + Test is_fqdn function passes possible FQDN names. +-- +2.33.0 + + diff --git a/fix-exception-in-yumpkg.remove-for-not-installed-pac.patch b/fix-exception-in-yumpkg.remove-for-not-installed-pac.patch new file mode 100644 index 0000000..89d783c --- /dev/null +++ b/fix-exception-in-yumpkg.remove-for-not-installed-pac.patch @@ -0,0 +1,76 @@ +From 40d9cde9b90965e60520f36dbe189fb64d15559d Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Thu, 24 Jun 2021 13:17:13 +0300 +Subject: [PATCH] Fix exception in yumpkg.remove for not installed + package (#380) + +--- + salt/modules/yumpkg.py | 2 ++ + tests/pytests/unit/modules/test_yumpkg.py | 37 +++++++++++++++++++++++ + 2 files changed, 39 insertions(+) + +diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py +index 9737508377..9f8f548e5f 100644 +--- a/salt/modules/yumpkg.py ++++ b/salt/modules/yumpkg.py +@@ -2123,6 +2123,8 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613 + pkg_params.update(pkg_matches) + + for target in pkg_params: ++ if target not in old: ++ continue + version_to_remove = pkg_params[target] + + # Check if package version set to be removed is actually installed: +diff --git a/tests/pytests/unit/modules/test_yumpkg.py b/tests/pytests/unit/modules/test_yumpkg.py +index 3b35272550..c3456f7e29 100644 +--- a/tests/pytests/unit/modules/test_yumpkg.py ++++ b/tests/pytests/unit/modules/test_yumpkg.py +@@ -1284,6 +1284,43 @@ def test_install_error_reporting(): + assert exc_info.value.info == expected, exc_info.value.info + + ++def test_remove_not_installed(): ++ """ ++ Tests that no exception raised on removing not installed package ++ """ ++ name = "foo" ++ list_pkgs_mock = MagicMock(return_value={}) ++ cmd_mock = MagicMock( ++ return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""} ++ ) ++ salt_mock = { ++ "cmd.run_all": cmd_mock, ++ "lowpkg.version_cmp": rpm.version_cmp, ++ "pkg_resource.parse_targets": MagicMock( ++ return_value=({name: None}, "repository") ++ ), ++ } ++ with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch( ++ "salt.utils.systemd.has_scope", MagicMock(return_value=False) ++ ), patch.dict(yumpkg.__salt__, salt_mock): ++ ++ # Test yum ++ with patch.dict(yumpkg.__context__, {"yum_bin": "yum"}), patch.dict( ++ yumpkg.__grains__, {"os": "CentOS", "osrelease": 7} ++ ): ++ yumpkg.remove(name) ++ cmd_mock.assert_not_called() ++ ++ # Test dnf ++ yumpkg.__context__.pop("yum_bin") ++ cmd_mock.reset_mock() ++ with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict( ++ yumpkg.__grains__, {"os": "Fedora", "osrelease": 27} ++ ): ++ yumpkg.remove(name) ++ cmd_mock.assert_not_called() ++ ++ + def test_upgrade_with_options(): + with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})), patch( + "salt.utils.systemd.has_scope", MagicMock(return_value=False) +-- +2.34.1 + + diff --git a/fix-for-cve-2022-22967-bsc-1200566.patch b/fix-for-cve-2022-22967-bsc-1200566.patch new file mode 100644 index 0000000..51c4d99 --- /dev/null +++ b/fix-for-cve-2022-22967-bsc-1200566.patch @@ -0,0 +1,75 @@ +From a9c292fdf9ae53b86109337165214d8aadb155e7 Mon Sep 17 00:00:00 2001 +From: Wayne Werner +Date: Fri, 1 Apr 2022 14:21:57 -0500 +Subject: [PATCH] Fix for CVE-2022-22967 (bsc#1200566) + +--- + changelog/pam_auth.security | 1 + + salt/auth/pam.py | 2 +- + tests/pytests/unit/auth/test_pam.py | 32 +++++++++++++++++++++++++++++ + 3 files changed, 34 insertions(+), 1 deletion(-) + create mode 100644 changelog/pam_auth.security + create mode 100644 tests/pytests/unit/auth/test_pam.py + +diff --git a/changelog/pam_auth.security b/changelog/pam_auth.security +new file mode 100644 +index 0000000000..52943680f4 +--- /dev/null ++++ b/changelog/pam_auth.security +@@ -0,0 +1 @@ ++Fixed PAM auth to reject auth attempt if user account is locked. +diff --git a/salt/auth/pam.py b/salt/auth/pam.py +index a9dde95149..d91883b743 100644 +--- a/salt/auth/pam.py ++++ b/salt/auth/pam.py +@@ -209,7 +209,7 @@ def authenticate(username, password): + + retval = PAM_AUTHENTICATE(handle, 0) + if retval == 0: +- PAM_ACCT_MGMT(handle, 0) ++ retval = PAM_ACCT_MGMT(handle, 0) + PAM_END(handle, 0) + return retval == 0 + +diff --git a/tests/pytests/unit/auth/test_pam.py b/tests/pytests/unit/auth/test_pam.py +new file mode 100644 +index 0000000000..f5f49e65d8 +--- /dev/null ++++ b/tests/pytests/unit/auth/test_pam.py +@@ -0,0 +1,32 @@ ++import pytest ++import salt.auth.pam ++from tests.support.mock import patch ++ ++ ++@pytest.fixture ++def configure_loader_modules(): ++ return {salt.auth.pam: {}} ++ ++ ++@pytest.fixture ++def mock_pam(): ++ with patch("salt.auth.pam.CALLOC", autospec=True), patch( ++ "salt.auth.pam.pointer", autospec=True ++ ), patch("salt.auth.pam.PamHandle", autospec=True), patch( ++ "salt.auth.pam.PAM_START", autospec=True, return_value=0 ++ ), patch( ++ "salt.auth.pam.PAM_AUTHENTICATE", autospec=True, return_value=0 ++ ), patch( ++ "salt.auth.pam.PAM_END", autospec=True ++ ): ++ yield ++ ++ ++def test_cve_if_pam_acct_mgmt_returns_nonzero_authenticate_should_be_false(mock_pam): ++ with patch("salt.auth.pam.PAM_ACCT_MGMT", autospec=True, return_value=42): ++ assert salt.auth.pam.authenticate(username="fnord", password="fnord") is False ++ ++ ++def test_if_pam_acct_mgmt_returns_zero_authenticate_should_be_true(mock_pam): ++ with patch("salt.auth.pam.PAM_ACCT_MGMT", autospec=True, return_value=0): ++ assert salt.auth.pam.authenticate(username="fnord", password="fnord") is True +-- +2.36.1 + + diff --git a/fix-for-suse-expanded-support-detection.patch b/fix-for-suse-expanded-support-detection.patch index 21a1690..e9349fd 100644 --- a/fix-for-suse-expanded-support-detection.patch +++ b/fix-for-suse-expanded-support-detection.patch @@ -1,4 +1,4 @@ -From 51bcdcfdec73368d1517150eafda21e4af51dd7a Mon Sep 17 00:00:00 2001 +From 369a732537937dd6865152a87f04777539b27fcd Mon Sep 17 00:00:00 2001 From: Jochen Breuer Date: Thu, 6 Sep 2018 17:15:18 +0200 Subject: [PATCH] Fix for SUSE Expanded Support detection @@ -14,10 +14,10 @@ This change also adds a check for redhat-release and then marks the 1 file changed, 9 insertions(+) diff --git a/salt/grains/core.py b/salt/grains/core.py -index debbeb257d..b55ab4e472 100644 +index 436c058eb6..00bd0565bf 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py -@@ -2058,6 +2058,15 @@ def os_data(): +@@ -1990,6 +1990,15 @@ def os_data(): log.trace("Parsing distrib info from /etc/centos-release") # CentOS Linux grains["lsb_distrib_id"] = "CentOS" @@ -34,6 +34,6 @@ index debbeb257d..b55ab4e472 100644 for line in ifile: # Need to pull out the version and codename -- -2.37.3 +2.29.2 diff --git a/fix-inspector-module-export-function-bsc-1097531-481.patch b/fix-inspector-module-export-function-bsc-1097531-481.patch new file mode 100644 index 0000000..6aeaaf0 --- /dev/null +++ b/fix-inspector-module-export-function-bsc-1097531-481.patch @@ -0,0 +1,68 @@ +From 554b13dec6a9770b7fbf287b3bf9af91a2cdabde Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov +Date: Fri, 28 Jan 2022 16:44:25 +0300 +Subject: [PATCH] Fix inspector module export function (bsc#1097531) + (#481) + +--- + salt/modules/inspectlib/fsdb.py | 8 ++++---- + salt/modules/inspectlib/query.py | 2 +- + 2 files changed, 5 insertions(+), 5 deletions(-) + +diff --git a/salt/modules/inspectlib/fsdb.py b/salt/modules/inspectlib/fsdb.py +index 489fde5684..b834b8f678 100644 +--- a/salt/modules/inspectlib/fsdb.py ++++ b/salt/modules/inspectlib/fsdb.py +@@ -137,7 +137,7 @@ class CsvDB: + return self._tables.keys() + + def _load_table(self, table_name): +- with gzip.open(os.path.join(self.db_path, table_name), "rb") as table: ++ with gzip.open(os.path.join(self.db_path, table_name), "rt") as table: + return OrderedDict( + [tuple(elm.split(":")) for elm in next(csv.reader(table))] + ) +@@ -184,7 +184,7 @@ class CsvDB: + """ + get_type = lambda item: str(type(item)).split("'")[1] + if not os.path.exists(os.path.join(self.db_path, obj._TABLE)): +- with gzip.open(os.path.join(self.db_path, obj._TABLE), "wb") as table_file: ++ with gzip.open(os.path.join(self.db_path, obj._TABLE), "wt") as table_file: + csv.writer(table_file).writerow( + [ + "{col}:{type}".format(col=elm[0], type=get_type(elm[1])) +@@ -212,7 +212,7 @@ class CsvDB: + db_obj = self.get(obj.__class__, eq=fields) + if db_obj and distinct: + raise Exception("Object already in the database.") +- with gzip.open(os.path.join(self.db_path, obj._TABLE), "a") as table: ++ with gzip.open(os.path.join(self.db_path, obj._TABLE), "at") as table: + csv.writer(table).writerow(self._validate_object(obj)) + + def update(self, obj, matches=None, mt=None, lt=None, eq=None): +@@ -318,7 +318,7 @@ class CsvDB: + :return: + """ + objects = [] +- with gzip.open(os.path.join(self.db_path, obj._TABLE), "rb") as table: ++ with gzip.open(os.path.join(self.db_path, obj._TABLE), "rt") as table: + header = None + for data in csv.reader(table): + if not header: +diff --git a/salt/modules/inspectlib/query.py b/salt/modules/inspectlib/query.py +index 079cc29172..8027176a13 100644 +--- a/salt/modules/inspectlib/query.py ++++ b/salt/modules/inspectlib/query.py +@@ -74,7 +74,7 @@ class SysInfo: + for dev, dev_data in salt.utils.fsutils._blkid().items(): + dev = self._get_disk_size(dev) + device = dev.pop("device") +- dev["type"] = dev_data["type"] ++ dev["type"] = dev_data.get("type", "UNKNOWN") + data[device] = dev + + return data +-- +2.34.1 + + diff --git a/fix-ip6_interface-grain-to-not-leak-secondary-ipv4-a.patch b/fix-ip6_interface-grain-to-not-leak-secondary-ipv4-a.patch new file mode 100644 index 0000000..5e6bdb7 --- /dev/null +++ b/fix-ip6_interface-grain-to-not-leak-secondary-ipv4-a.patch @@ -0,0 +1,32 @@ +From 0571b8a6d0f4728e604bab9a8ef6f2123546671b Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Fri, 15 Oct 2021 13:08:53 +0100 +Subject: [PATCH] Fix ip6_interface grain to not leak secondary IPv4 + addrs + +--- + salt/grains/core.py | 6 +++++- + 1 file changed, 5 insertions(+), 1 deletion(-) + +diff --git a/salt/grains/core.py b/salt/grains/core.py +index f79110124f..88f1d2c053 100644 +--- a/salt/grains/core.py ++++ b/salt/grains/core.py +@@ -2537,7 +2537,11 @@ def ip6_interfaces(): + iface_ips.append(inet["address"]) + for secondary in ifaces[face].get("secondary", []): + if "address" in secondary: +- iface_ips.append(secondary["address"]) ++ try: ++ socket.inet_pton(socket.AF_INET6, secondary["address"]) ++ iface_ips.append(secondary["address"]) ++ except OSError: ++ pass + ret[face] = iface_ips + return {"ip6_interfaces": ret} + +-- +2.33.0 + + diff --git a/fix-issue-2068-test.patch b/fix-issue-2068-test.patch index 2a45430..0c1eee7 100644 --- a/fix-issue-2068-test.patch +++ b/fix-issue-2068-test.patch @@ -1,4 +1,4 @@ -From f87d92122cb141e8f6f4d1c5a6ed5685e1b3900c Mon Sep 17 00:00:00 2001 +From db77ad3e24daf3bc014dc3d85a49aa1bb33ae1ae Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Wed, 9 Jan 2019 16:08:19 +0100 Subject: [PATCH] Fix issue #2068 test @@ -13,7 +13,7 @@ Minor update: more correct is-dict check. 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/salt/state.py b/salt/state.py -index 3196f3c635..2385975b42 100644 +index b1bce4e0cd..cc6db7e1b2 100644 --- a/salt/state.py +++ b/salt/state.py @@ -12,6 +12,7 @@ The data sent to the state calls is as follows: @@ -24,7 +24,7 @@ index 3196f3c635..2385975b42 100644 import copy import datetime import fnmatch -@@ -3380,16 +3381,18 @@ class State: +@@ -3206,16 +3207,18 @@ class State: """ for chunk in high: state = high[chunk] @@ -47,6 +47,6 @@ index 3196f3c635..2385975b42 100644 def call_high(self, high, orchestration_jid=None): """ -- -2.37.3 +2.29.2 diff --git a/fix-issues-with-salt-ssh-s-extra-filerefs.patch b/fix-issues-with-salt-ssh-s-extra-filerefs.patch new file mode 100644 index 0000000..7573a38 --- /dev/null +++ b/fix-issues-with-salt-ssh-s-extra-filerefs.patch @@ -0,0 +1,24 @@ +From a268bfee70fabffc6d8fb6c297cd255fb3483ae1 Mon Sep 17 00:00:00 2001 +From: "Daniel A. Wozniak" +Date: Thu, 7 Oct 2021 17:22:37 -0700 +Subject: [PATCH] Fix issues with salt-ssh's extra-filerefs + +Verify salt-ssh can import from map files in states + +Add changelog for 60003.fixed +--- + changelog/60003.fixed | 1 + + 1 file changed, 1 insertion(+) + create mode 100644 changelog/60003.fixed + +diff --git a/changelog/60003.fixed b/changelog/60003.fixed +new file mode 100644 +index 0000000000..6fafbf5108 +--- /dev/null ++++ b/changelog/60003.fixed +@@ -0,0 +1 @@ ++Validate we can import map files in states +-- +2.34.1 + + diff --git a/fix-jinja2-contextfuntion-base-on-version-bsc-119874.patch b/fix-jinja2-contextfuntion-base-on-version-bsc-119874.patch new file mode 100644 index 0000000..4e03076 --- /dev/null +++ b/fix-jinja2-contextfuntion-base-on-version-bsc-119874.patch @@ -0,0 +1,83 @@ +From 65494338f5a9bdaa0be27afab3da3a03a92d8cda Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Fri, 8 Jul 2022 13:35:50 +0100 +Subject: [PATCH] fix: jinja2 contextfuntion base on version + (bsc#1198744) (#520) + +--- + salt/utils/jinja.py | 16 ++++++++++++++-- + tests/unit/utils/test_jinja.py | 8 +++++++- + 2 files changed, 21 insertions(+), 3 deletions(-) + +diff --git a/salt/utils/jinja.py b/salt/utils/jinja.py +index 0cb70bf64a..6b5b0d4e81 100644 +--- a/salt/utils/jinja.py ++++ b/salt/utils/jinja.py +@@ -25,7 +25,7 @@ import salt.utils.json + import salt.utils.stringutils + import salt.utils.url + import salt.utils.yaml +-from jinja2 import BaseLoader, Markup, TemplateNotFound, nodes ++from jinja2 import BaseLoader, TemplateNotFound, nodes + from jinja2.environment import TemplateModule + from jinja2.exceptions import TemplateRuntimeError + from jinja2.ext import Extension +@@ -34,6 +34,12 @@ from salt.utils.decorators.jinja import jinja_filter, jinja_global, jinja_test + from salt.utils.odict import OrderedDict + from salt.utils.versions import LooseVersion + ++try: ++ from markupsafe import Markup ++except ImportError: ++ # jinja < 3.1 ++ from jinja2 import Markup ++ + log = logging.getLogger(__name__) + + __all__ = ["SaltCacheLoader", "SerializerExtension"] +@@ -706,7 +712,13 @@ def method_call(obj, f_name, *f_args, **f_kwargs): + return getattr(obj, f_name, lambda *args, **kwargs: None)(*f_args, **f_kwargs) + + +-@jinja2.contextfunction ++try: ++ contextfunction = jinja2.contextfunction ++except AttributeError: ++ contextfunction = jinja2.pass_context ++ ++ ++@contextfunction + def show_full_context(ctx): + return salt.utils.data.simple_types_filter( + {key: value for key, value in ctx.items()} +diff --git a/tests/unit/utils/test_jinja.py b/tests/unit/utils/test_jinja.py +index 6502831aff..6bbcf9ef6f 100644 +--- a/tests/unit/utils/test_jinja.py ++++ b/tests/unit/utils/test_jinja.py +@@ -22,7 +22,7 @@ import salt.utils.files + import salt.utils.json + import salt.utils.stringutils + import salt.utils.yaml +-from jinja2 import DictLoader, Environment, Markup, exceptions ++from jinja2 import DictLoader, Environment, exceptions + from salt.exceptions import SaltRenderError + from salt.utils.decorators.jinja import JinjaFilter + from salt.utils.jinja import ( +@@ -46,6 +46,12 @@ try: + except ImportError: + HAS_TIMELIB = False + ++try: ++ from markupsafe import Markup ++except ImportError: ++ # jinja < 3.1 ++ from jinja2 import Markup ++ + BLINESEP = salt.utils.stringutils.to_bytes(os.linesep) + + +-- +2.36.1 + + diff --git a/fix-missing-minion-returns-in-batch-mode-360.patch b/fix-missing-minion-returns-in-batch-mode-360.patch index e14ac15..f379bb0 100644 --- a/fix-missing-minion-returns-in-batch-mode-360.patch +++ b/fix-missing-minion-returns-in-batch-mode-360.patch @@ -1,4 +1,4 @@ -From 3ed9869ee6847472846072d62cbc57dcb9104c90 Mon Sep 17 00:00:00 2001 +From 83fbfcbf49c98624029f1d215b7ad4d247128d39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Hole=C4=8Dek?= Date: Mon, 10 May 2021 16:23:19 +0200 Subject: [PATCH] Fix missing minion returns in batch mode (#360) @@ -12,10 +12,10 @@ Co-authored-by: Denis V. Meltsaykin 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/salt/client/__init__.py b/salt/client/__init__.py -index 2427516ca1..86888adc19 100644 +index ddb437604b..78f4d99e84 100644 --- a/salt/client/__init__.py +++ b/salt/client/__init__.py -@@ -972,7 +972,7 @@ class LocalClient: +@@ -920,7 +920,7 @@ class LocalClient: self._clean_up_subscriptions(pub_data["jid"]) finally: @@ -25,6 +25,6 @@ index 2427516ca1..86888adc19 100644 def cmd_full_return( -- -2.37.3 +2.31.1 diff --git a/fix-multiple-security-issues-bsc-1197417.patch b/fix-multiple-security-issues-bsc-1197417.patch new file mode 100644 index 0000000..902aa91 --- /dev/null +++ b/fix-multiple-security-issues-bsc-1197417.patch @@ -0,0 +1,2946 @@ +From a5a3839eae2aed3e2fe98c314e770560eed2ed70 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Wed, 23 Mar 2022 12:09:36 +0000 +Subject: [PATCH] Fix multiple security issues (bsc#1197417) + +* Sign authentication replies to prevent MiTM (CVE-2020-22935) +* Sign pillar data to prevent MiTM attacks. (CVE-2022-22934) +* Prevent job and fileserver replays (CVE-2022-22936) +* Fixed targeting bug, especially visible when using syndic and user auth. (CVE-2022-22941) +--- + salt/crypt.py | 275 +++-- + salt/master.py | 57 +- + salt/minion.py | 1 + + salt/pillar/__init__.py | 4 + + salt/transport/mixins/auth.py | 115 +- + salt/transport/tcp.py | 103 +- + salt/transport/zeromq.py | 91 +- + salt/utils/minions.py | 19 +- + salt/utils/network.py | 4 +- + tests/integration/files/ssh/known_hosts | 2 + + tests/integration/modules/test_ssh.py | 9 +- + .../states/test_ssh_known_hosts.py | 3 +- + .../transport/server/test_req_channel.py | 16 +- + .../zeromq/test_pub_server_channel.py | 57 +- + tests/pytests/unit/test_crypt.py | 151 +++ + tests/pytests/unit/test_minion.py | 1 + + tests/pytests/unit/transport/test_tcp.py | 20 +- + tests/pytests/unit/transport/test_zeromq.py | 1037 +++++++++++++++++ + tests/pytests/unit/utils/test_minions.py | 59 + + tests/pytests/unit/utils/test_network.py | 8 + + tests/unit/transport/test_ipc.py | 2 + + 21 files changed, 1779 insertions(+), 255 deletions(-) + create mode 100644 tests/pytests/unit/utils/test_network.py + +diff --git a/salt/crypt.py b/salt/crypt.py +index 776ffaba58..76870216fd 100644 +--- a/salt/crypt.py ++++ b/salt/crypt.py +@@ -17,6 +17,7 @@ import stat + import sys + import time + import traceback ++import uuid + import weakref + + import salt.defaults.exitcodes +@@ -262,7 +263,11 @@ def verify_signature(pubkey_path, message, signature): + md = EVP.MessageDigest("sha1") + md.update(salt.utils.stringutils.to_bytes(message)) + digest = md.final() +- return pubkey.verify(digest, signature) ++ try: ++ return pubkey.verify(digest, signature) ++ except RSA.RSAError as exc: ++ log.debug("Signature verification failed: %s", exc.args[0]) ++ return False + else: + verifier = PKCS1_v1_5.new(pubkey) + return verifier.verify( +@@ -696,9 +701,17 @@ class AsyncAuth: + self._authenticate_future.set_exception(error) + else: + key = self.__key(self.opts) +- AsyncAuth.creds_map[key] = creds +- self._creds = creds +- self._crypticle = Crypticle(self.opts, creds["aes"]) ++ if key not in AsyncAuth.creds_map: ++ log.debug("%s Got new master aes key.", self) ++ AsyncAuth.creds_map[key] = creds ++ self._creds = creds ++ self._crypticle = Crypticle(self.opts, creds["aes"]) ++ elif self._creds["aes"] != creds["aes"]: ++ log.debug("%s The master's aes key has changed.", self) ++ AsyncAuth.creds_map[key] = creds ++ self._creds = creds ++ self._crypticle = Crypticle(self.opts, creds["aes"]) ++ + self._authenticate_future.set_result( + True + ) # mark the sign-in as complete +@@ -729,7 +742,6 @@ class AsyncAuth: + with the publication port and the shared AES key. + + """ +- auth = {} + + auth_timeout = self.opts.get("auth_timeout", None) + if auth_timeout is not None: +@@ -741,10 +753,6 @@ class AsyncAuth: + if auth_tries is not None: + tries = auth_tries + +- m_pub_fn = os.path.join(self.opts["pki_dir"], self.mpub) +- +- auth["master_uri"] = self.opts["master_uri"] +- + close_channel = False + if not channel: + close_channel = True +@@ -769,59 +777,85 @@ class AsyncAuth: + finally: + if close_channel: + channel.close() ++ ret = self.handle_signin_response(sign_in_payload, payload) ++ raise salt.ext.tornado.gen.Return(ret) + +- if not isinstance(payload, dict): ++ def handle_signin_response(self, sign_in_payload, payload): ++ auth = {} ++ m_pub_fn = os.path.join(self.opts["pki_dir"], self.mpub) ++ auth["master_uri"] = self.opts["master_uri"] ++ if not isinstance(payload, dict) or "load" not in payload: + log.error("Sign-in attempt failed: %s", payload) +- raise salt.ext.tornado.gen.Return(False) +- if "load" in payload: +- if "ret" in payload["load"]: +- if not payload["load"]["ret"]: +- if self.opts["rejected_retry"]: +- log.error( +- "The Salt Master has rejected this minion's public " +- "key.\nTo repair this issue, delete the public key " +- "for this minion on the Salt Master.\nThe Salt " +- "Minion will attempt to to re-authenicate." +- ) +- raise salt.ext.tornado.gen.Return("retry") +- else: +- log.critical( +- "The Salt Master has rejected this minion's public " +- "key!\nTo repair this issue, delete the public key " +- "for this minion on the Salt Master and restart this " +- "minion.\nOr restart the Salt Master in open mode to " +- "clean out the keys. The Salt Minion will now exit." +- ) +- # Add a random sleep here for systems that are using a +- # a service manager to immediately restart the service +- # to avoid overloading the system +- time.sleep(random.randint(10, 20)) +- sys.exit(salt.defaults.exitcodes.EX_NOPERM) +- # has the master returned that its maxed out with minions? +- elif payload["load"]["ret"] == "full": +- raise salt.ext.tornado.gen.Return("full") +- else: ++ return False ++ ++ clear_signed_data = payload["load"] ++ clear_signature = payload["sig"] ++ payload = salt.payload.loads(clear_signed_data) ++ ++ if "pub_key" in payload: ++ auth["aes"] = self.verify_master( ++ payload, master_pub="token" in sign_in_payload ++ ) ++ if not auth["aes"]: ++ log.critical( ++ "The Salt Master server's public key did not authenticate!\n" ++ "The master may need to be updated if it is a version of Salt " ++ "lower than %s, or\n" ++ "If you are confident that you are connecting to a valid Salt " ++ "Master, then remove the master public key and restart the " ++ "Salt Minion.\nThe master public key can be found " ++ "at:\n%s", ++ salt.version.__version__, ++ m_pub_fn, ++ ) ++ raise SaltClientError("Invalid master key") ++ ++ master_pubkey_path = os.path.join(self.opts["pki_dir"], self.mpub) ++ if os.path.exists(master_pubkey_path) and not verify_signature( ++ master_pubkey_path, clear_signed_data, clear_signature ++ ): ++ log.critical("The payload signature did not validate.") ++ raise SaltClientError("Invalid signature") ++ ++ if payload["nonce"] != sign_in_payload["nonce"]: ++ log.critical("The payload nonce did not validate.") ++ raise SaltClientError("Invalid nonce") ++ ++ if "ret" in payload: ++ if not payload["ret"]: ++ if self.opts["rejected_retry"]: + log.error( +- "The Salt Master has cached the public key for this " +- "node, this salt minion will wait for %s seconds " +- "before attempting to re-authenticate", +- self.opts["acceptance_wait_time"], ++ "The Salt Master has rejected this minion's public " ++ "key.\nTo repair this issue, delete the public key " ++ "for this minion on the Salt Master.\nThe Salt " ++ "Minion will attempt to re-authenicate." + ) +- raise salt.ext.tornado.gen.Return("retry") +- auth["aes"] = self.verify_master(payload, master_pub="token" in sign_in_payload) +- if not auth["aes"]: +- log.critical( +- "The Salt Master server's public key did not authenticate!\n" +- "The master may need to be updated if it is a version of Salt " +- "lower than %s, or\n" +- "If you are confident that you are connecting to a valid Salt " +- "Master, then remove the master public key and restart the " +- "Salt Minion.\nThe master public key can be found " +- "at:\n%s", +- salt.version.__version__, +- m_pub_fn, +- ) +- raise SaltClientError("Invalid master key") ++ return "retry" ++ else: ++ log.critical( ++ "The Salt Master has rejected this minion's public " ++ "key!\nTo repair this issue, delete the public key " ++ "for this minion on the Salt Master and restart this " ++ "minion.\nOr restart the Salt Master in open mode to " ++ "clean out the keys. The Salt Minion will now exit." ++ ) ++ # Add a random sleep here for systems that are using a ++ # a service manager to immediately restart the service ++ # to avoid overloading the system ++ time.sleep(random.randint(10, 20)) ++ sys.exit(salt.defaults.exitcodes.EX_NOPERM) ++ # has the master returned that its maxed out with minions? ++ elif payload["ret"] == "full": ++ return "full" ++ else: ++ log.error( ++ "The Salt Master has cached the public key for this " ++ "node, this salt minion will wait for %s seconds " ++ "before attempting to re-authenticate", ++ self.opts["acceptance_wait_time"], ++ ) ++ return "retry" ++ + if self.opts.get("syndic_master", False): # Is syndic + syndic_finger = self.opts.get( + "syndic_finger", self.opts.get("master_finger", False) +@@ -843,8 +877,9 @@ class AsyncAuth: + != self.opts["master_finger"] + ): + self._finger_fail(self.opts["master_finger"], m_pub_fn) ++ + auth["publish_port"] = payload["publish_port"] +- raise salt.ext.tornado.gen.Return(auth) ++ return auth + + def get_keys(self): + """ +@@ -892,6 +927,7 @@ class AsyncAuth: + payload = {} + payload["cmd"] = "_auth" + payload["id"] = self.opts["id"] ++ payload["nonce"] = uuid.uuid4().hex + if "autosign_grains" in self.opts: + autosign_grains = {} + for grain in self.opts["autosign_grains"]: +@@ -1254,6 +1290,7 @@ class SAuth(AsyncAuth): + self.token = salt.utils.stringutils.to_bytes(Crypticle.generate_key_string()) + self.pub_path = os.path.join(self.opts["pki_dir"], "minion.pub") + self.rsa_path = os.path.join(self.opts["pki_dir"], "minion.pem") ++ self._creds = None + if "syndic_master" in self.opts: + self.mpub = "syndic_master.pub" + elif "alert_master" in self.opts: +@@ -1323,8 +1360,14 @@ class SAuth(AsyncAuth): + ) + continue + break +- self._creds = creds +- self._crypticle = Crypticle(self.opts, creds["aes"]) ++ if self._creds is None: ++ log.error("%s Got new master aes key.", self) ++ self._creds = creds ++ self._crypticle = Crypticle(self.opts, creds["aes"]) ++ elif self._creds["aes"] != creds["aes"]: ++ log.error("%s The master's aes key has changed.", self) ++ self._creds = creds ++ self._crypticle = Crypticle(self.opts, creds["aes"]) + + def sign_in(self, timeout=60, safe=True, tries=1, channel=None): + """ +@@ -1377,78 +1420,7 @@ class SAuth(AsyncAuth): + if close_channel: + channel.close() + +- if "load" in payload: +- if "ret" in payload["load"]: +- if not payload["load"]["ret"]: +- if self.opts["rejected_retry"]: +- log.error( +- "The Salt Master has rejected this minion's public " +- "key.\nTo repair this issue, delete the public key " +- "for this minion on the Salt Master.\nThe Salt " +- "Minion will attempt to to re-authenicate." +- ) +- return "retry" +- else: +- log.critical( +- "The Salt Master has rejected this minion's public " +- "key!\nTo repair this issue, delete the public key " +- "for this minion on the Salt Master and restart this " +- "minion.\nOr restart the Salt Master in open mode to " +- "clean out the keys. The Salt Minion will now exit." +- ) +- sys.exit(salt.defaults.exitcodes.EX_NOPERM) +- # has the master returned that its maxed out with minions? +- elif payload["load"]["ret"] == "full": +- return "full" +- else: +- log.error( +- "The Salt Master has cached the public key for this " +- "node. If this is the first time connecting to this " +- "master then this key may need to be accepted using " +- "'salt-key -a %s' on the salt master. This salt " +- "minion will wait for %s seconds before attempting " +- "to re-authenticate.", +- self.opts["id"], +- self.opts["acceptance_wait_time"], +- ) +- return "retry" +- auth["aes"] = self.verify_master(payload, master_pub="token" in sign_in_payload) +- if not auth["aes"]: +- log.critical( +- "The Salt Master server's public key did not authenticate!\n" +- "The master may need to be updated if it is a version of Salt " +- "lower than %s, or\n" +- "If you are confident that you are connecting to a valid Salt " +- "Master, then remove the master public key and restart the " +- "Salt Minion.\nThe master public key can be found " +- "at:\n%s", +- salt.version.__version__, +- m_pub_fn, +- ) +- sys.exit(42) +- if self.opts.get("syndic_master", False): # Is syndic +- syndic_finger = self.opts.get( +- "syndic_finger", self.opts.get("master_finger", False) +- ) +- if syndic_finger: +- if ( +- salt.utils.crypt.pem_finger( +- m_pub_fn, sum_type=self.opts["hash_type"] +- ) +- != syndic_finger +- ): +- self._finger_fail(syndic_finger, m_pub_fn) +- else: +- if self.opts.get("master_finger", False): +- if ( +- salt.utils.crypt.pem_finger( +- m_pub_fn, sum_type=self.opts["hash_type"] +- ) +- != self.opts["master_finger"] +- ): +- self._finger_fail(self.opts["master_finger"], m_pub_fn) +- auth["publish_port"] = payload["publish_port"] +- return auth ++ return self.handle_signin_response(sign_in_payload, payload) + + + class Crypticle: +@@ -1463,10 +1435,11 @@ class Crypticle: + AES_BLOCK_SIZE = 16 + SIG_SIZE = hashlib.sha256().digest_size + +- def __init__(self, opts, key_string, key_size=192): ++ def __init__(self, opts, key_string, key_size=192, serial=0): + self.key_string = key_string + self.keys = self.extract_keys(self.key_string, key_size) + self.key_size = key_size ++ self.serial = serial + + @classmethod + def generate_key_string(cls, key_size=192): +@@ -1536,13 +1509,17 @@ class Crypticle: + data = cypher.decrypt(data) + return data[: -data[-1]] + +- def dumps(self, obj): ++ def dumps(self, obj, nonce=None): + """ + Serialize and encrypt a python object + """ +- return self.encrypt(self.PICKLE_PAD + salt.payload.dumps(obj)) ++ if nonce: ++ toencrypt = self.PICKLE_PAD + nonce.encode() + salt.payload.dumps(obj) ++ else: ++ toencrypt = self.PICKLE_PAD + salt.payload.dumps(obj) ++ return self.encrypt(toencrypt) + +- def loads(self, data, raw=False): ++ def loads(self, data, raw=False, nonce=None): + """ + Decrypt and un-serialize a python object + """ +@@ -1550,5 +1527,25 @@ class Crypticle: + # simple integrity check to verify that we got meaningful data + if not data.startswith(self.PICKLE_PAD): + return {} +- load = salt.payload.loads(data[len(self.PICKLE_PAD) :], raw=raw) +- return load ++ data = data[len(self.PICKLE_PAD) :] ++ if nonce: ++ ret_nonce = data[:32].decode() ++ data = data[32:] ++ if ret_nonce != nonce: ++ raise SaltClientError("Nonce verification error") ++ payload = salt.payload.loads(data, raw=raw) ++ if isinstance(payload, dict): ++ if "serial" in payload: ++ serial = payload.pop("serial") ++ if serial <= self.serial: ++ log.critical( ++ "A message with an invalid serial was received.\n" ++ "this serial: %d\n" ++ "last serial: %d\n" ++ "The minion will not honor this request.", ++ serial, ++ self.serial, ++ ) ++ return {} ++ self.serial = serial ++ return payload +diff --git a/salt/master.py b/salt/master.py +index ee33bd8171..65b526c019 100644 +--- a/salt/master.py ++++ b/salt/master.py +@@ -129,6 +129,44 @@ class SMaster: + """ + return salt.daemons.masterapi.access_keys(self.opts) + ++ @classmethod ++ def get_serial(cls, opts=None, event=None): ++ with cls.secrets["aes"]["secret"].get_lock(): ++ if cls.secrets["aes"]["serial"].value == sys.maxsize: ++ cls.rotate_secrets(opts, event, use_lock=False) ++ else: ++ cls.secrets["aes"]["serial"].value += 1 ++ return cls.secrets["aes"]["serial"].value ++ ++ @classmethod ++ def rotate_secrets(cls, opts=None, event=None, use_lock=True): ++ log.info("Rotating master AES key") ++ if opts is None: ++ opts = {} ++ ++ for secret_key, secret_map in cls.secrets.items(): ++ # should be unnecessary-- since no one else should be modifying ++ if use_lock: ++ with secret_map["secret"].get_lock(): ++ secret_map["secret"].value = salt.utils.stringutils.to_bytes( ++ secret_map["reload"]() ++ ) ++ if "serial" in secret_map: ++ secret_map["serial"].value = 0 ++ else: ++ secret_map["secret"].value = salt.utils.stringutils.to_bytes( ++ secret_map["reload"]() ++ ) ++ if "serial" in secret_map: ++ secret_map["serial"].value = 0 ++ if event: ++ event.fire_event({"rotate_{}_key".format(secret_key): True}, tag="key") ++ ++ if opts.get("ping_on_rotate"): ++ # Ping all minions to get them to pick up the new key ++ log.debug("Pinging all connected minions due to key rotation") ++ salt.utils.master.ping_all_connected_minions(opts) ++ + + class Maintenance(salt.utils.process.SignalHandlingProcess): + """ +@@ -281,21 +319,8 @@ class Maintenance(salt.utils.process.SignalHandlingProcess): + to_rotate = True + + if to_rotate: +- log.info("Rotating master AES key") +- for secret_key, secret_map in SMaster.secrets.items(): +- # should be unnecessary-- since no one else should be modifying +- with secret_map["secret"].get_lock(): +- secret_map["secret"].value = salt.utils.stringutils.to_bytes( +- secret_map["reload"]() +- ) +- self.event.fire_event( +- {"rotate_{}_key".format(secret_key): True}, tag="key" +- ) ++ SMaster.rotate_secrets(self.opts, self.event) + self.rotate = now +- if self.opts.get("ping_on_rotate"): +- # Ping all minions to get them to pick up the new key +- log.debug("Pinging all connected minions due to key rotation") +- salt.utils.master.ping_all_connected_minions(self.opts) + + def handle_git_pillar(self): + """ +@@ -671,8 +696,12 @@ class Master(SMaster): + salt.crypt.Crypticle.generate_key_string() + ), + ), ++ "serial": multiprocessing.Value( ++ ctypes.c_longlong, lock=False # We'll use the lock from 'secret' ++ ), + "reload": salt.crypt.Crypticle.generate_key_string, + } ++ + log.info("Creating master process manager") + # Since there are children having their own ProcessManager we should wait for kill more time. + self.process_manager = salt.utils.process.ProcessManager(wait_for_kill=5) +diff --git a/salt/minion.py b/salt/minion.py +index dbce3986ab..de3ad50b5c 100644 +--- a/salt/minion.py ++++ b/salt/minion.py +@@ -1691,6 +1691,7 @@ class Minion(MinionBase): + Override this method if you wish to handle the decoded data + differently. + """ ++ + # Ensure payload is unicode. Disregard failure to decode binary blobs. + if "user" in data: + log.info( +diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py +index 22f5c3a0a9..e595b3fb1b 100644 +--- a/salt/pillar/__init__.py ++++ b/salt/pillar/__init__.py +@@ -9,6 +9,7 @@ import logging + import os + import sys + import traceback ++import uuid + + import salt.ext.tornado.gen + import salt.fileclient +@@ -240,6 +241,9 @@ class AsyncRemotePillar(RemotePillarMixin): + load, + dictkey="pillar", + ) ++ except salt.crypt.AuthenticationError as exc: ++ log.error(exc.message) ++ raise SaltClientError("Exception getting pillar.") + except Exception: # pylint: disable=broad-except + log.exception("Exception getting pillar:") + raise SaltClientError("Exception getting pillar.") +diff --git a/salt/transport/mixins/auth.py b/salt/transport/mixins/auth.py +index 90197fb506..1e2e8e6b7b 100644 +--- a/salt/transport/mixins/auth.py ++++ b/salt/transport/mixins/auth.py +@@ -112,7 +112,7 @@ class AESReqServerMixin: + + self.master_key = salt.crypt.MasterKeys(self.opts) + +- def _encrypt_private(self, ret, dictkey, target): ++ def _encrypt_private(self, ret, dictkey, target, nonce=None, sign_messages=True): + """ + The server equivalent of ReqChannel.crypted_transfer_decode_dictentry + """ +@@ -127,7 +127,6 @@ class AESReqServerMixin: + except OSError: + log.error("AES key not found") + return {"error": "AES key not found"} +- + pret = {} + key = salt.utils.stringutils.to_bytes(key) + if HAS_M2: +@@ -135,9 +134,33 @@ class AESReqServerMixin: + else: + cipher = PKCS1_OAEP.new(pub) + pret["key"] = cipher.encrypt(key) +- pret[dictkey] = pcrypt.dumps(ret if ret is not False else {}) ++ if ret is False: ++ ret = {} ++ if sign_messages: ++ if nonce is None: ++ return {"error": "Nonce not included in request"} ++ tosign = salt.payload.dumps( ++ {"key": pret["key"], "pillar": ret, "nonce": nonce} ++ ) ++ master_pem_path = os.path.join(self.opts["pki_dir"], "master.pem") ++ signed_msg = { ++ "data": tosign, ++ "sig": salt.crypt.sign_message(master_pem_path, tosign), ++ } ++ pret[dictkey] = pcrypt.dumps(signed_msg) ++ else: ++ pret[dictkey] = pcrypt.dumps(ret) + return pret + ++ def _clear_signed(self, load): ++ master_pem_path = os.path.join(self.opts["pki_dir"], "master.pem") ++ tosign = salt.payload.dumps(load) ++ return { ++ "enc": "clear", ++ "load": tosign, ++ "sig": salt.crypt.sign_message(master_pem_path, tosign), ++ } ++ + def _update_aes(self): + """ + Check to see if a fresh AES key is available and update the components +@@ -164,7 +187,7 @@ class AESReqServerMixin: + payload["load"] = self.crypticle.loads(payload["load"]) + return payload + +- def _auth(self, load): ++ def _auth(self, load, sign_messages=False): + """ + Authenticate the client, use the sent public key to encrypt the AES key + which was generated at start up. +@@ -182,7 +205,10 @@ class AESReqServerMixin: + + if not salt.utils.verify.valid_id(self.opts, load["id"]): + log.info("Authentication request from invalid id %s", load["id"]) +- return {"enc": "clear", "load": {"ret": False}} ++ if sign_messages: ++ return self._clear_signed({"ret": False, "nonce": load["nonce"]}) ++ else: ++ return {"enc": "clear", "load": {"ret": False}} + log.info("Authentication request from %s", load["id"]) + + # 0 is default which should be 'unlimited' +@@ -220,7 +246,12 @@ class AESReqServerMixin: + self.event.fire_event( + eload, salt.utils.event.tagify(prefix="auth") + ) +- return {"enc": "clear", "load": {"ret": "full"}} ++ if sign_messages: ++ return self._clear_signed( ++ {"ret": "full", "nonce": load["nonce"]} ++ ) ++ else: ++ return {"enc": "clear", "load": {"ret": "full"}} + + # Check if key is configured to be auto-rejected/signed + auto_reject = self.auto_key.check_autoreject(load["id"]) +@@ -247,8 +278,10 @@ class AESReqServerMixin: + eload = {"result": False, "id": load["id"], "pub": load["pub"]} + if self.opts.get("auth_events") is True: + self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) +- return {"enc": "clear", "load": {"ret": False}} +- ++ if sign_messages: ++ return self._clear_signed({"ret": False, "nonce": load["nonce"]}) ++ else: ++ return {"enc": "clear", "load": {"ret": False}} + elif os.path.isfile(pubfn): + # The key has been accepted, check it + with salt.utils.files.fopen(pubfn, "r") as pubfn_handle: +@@ -272,7 +305,12 @@ class AESReqServerMixin: + self.event.fire_event( + eload, salt.utils.event.tagify(prefix="auth") + ) +- return {"enc": "clear", "load": {"ret": False}} ++ if sign_messages: ++ return self._clear_signed( ++ {"ret": False, "nonce": load["nonce"]} ++ ) ++ else: ++ return {"enc": "clear", "load": {"ret": False}} + + elif not os.path.isfile(pubfn_pend): + # The key has not been accepted, this is a new minion +@@ -282,7 +320,10 @@ class AESReqServerMixin: + eload = {"result": False, "id": load["id"], "pub": load["pub"]} + if self.opts.get("auth_events") is True: + self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) +- return {"enc": "clear", "load": {"ret": False}} ++ if sign_messages: ++ return self._clear_signed({"ret": False, "nonce": load["nonce"]}) ++ else: ++ return {"enc": "clear", "load": {"ret": False}} + + if auto_reject: + key_path = pubfn_rejected +@@ -305,7 +346,6 @@ class AESReqServerMixin: + # Write the key to the appropriate location + with salt.utils.files.fopen(key_path, "w+") as fp_: + fp_.write(load["pub"]) +- ret = {"enc": "clear", "load": {"ret": key_result}} + eload = { + "result": key_result, + "act": key_act, +@@ -314,7 +354,12 @@ class AESReqServerMixin: + } + if self.opts.get("auth_events") is True: + self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) +- return ret ++ if sign_messages: ++ return self._clear_signed( ++ {"ret": key_result, "nonce": load["nonce"]} ++ ) ++ else: ++ return {"enc": "clear", "load": {"ret": key_result}} + + elif os.path.isfile(pubfn_pend): + # This key is in the pending dir and is awaiting acceptance +@@ -330,7 +375,6 @@ class AESReqServerMixin: + "Pending public key for %s rejected via autoreject_file", + load["id"], + ) +- ret = {"enc": "clear", "load": {"ret": False}} + eload = { + "result": False, + "act": "reject", +@@ -339,7 +383,10 @@ class AESReqServerMixin: + } + if self.opts.get("auth_events") is True: + self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) +- return ret ++ if sign_messages: ++ return self._clear_signed({"ret": False, "nonce": load["nonce"]}) ++ else: ++ return {"enc": "clear", "load": {"ret": False}} + + elif not auto_sign: + # This key is in the pending dir and is not being auto-signed. +@@ -367,7 +414,12 @@ class AESReqServerMixin: + self.event.fire_event( + eload, salt.utils.event.tagify(prefix="auth") + ) +- return {"enc": "clear", "load": {"ret": False}} ++ if sign_messages: ++ return self._clear_signed( ++ {"ret": False, "nonce": load["nonce"]} ++ ) ++ else: ++ return {"enc": "clear", "load": {"ret": False}} + else: + log.info( + "Authentication failed from host %s, the key is in " +@@ -386,7 +438,12 @@ class AESReqServerMixin: + self.event.fire_event( + eload, salt.utils.event.tagify(prefix="auth") + ) +- return {"enc": "clear", "load": {"ret": True}} ++ if sign_messages: ++ return self._clear_signed( ++ {"ret": True, "nonce": load["nonce"]} ++ ) ++ else: ++ return {"enc": "clear", "load": {"ret": True}} + else: + # This key is in pending and has been configured to be + # auto-signed. Check to see if it is the same key, and if +@@ -408,7 +465,12 @@ class AESReqServerMixin: + self.event.fire_event( + eload, salt.utils.event.tagify(prefix="auth") + ) +- return {"enc": "clear", "load": {"ret": False}} ++ if sign_messages: ++ return self._clear_signed( ++ {"ret": False, "nonce": load["nonce"]} ++ ) ++ else: ++ return {"enc": "clear", "load": {"ret": False}} + else: + os.remove(pubfn_pend) + +@@ -418,7 +480,10 @@ class AESReqServerMixin: + eload = {"result": False, "id": load["id"], "pub": load["pub"]} + if self.opts.get("auth_events") is True: + self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) +- return {"enc": "clear", "load": {"ret": False}} ++ if sign_messages: ++ return self._clear_signed({"ret": False, "nonce": load["nonce"]}) ++ else: ++ return {"enc": "clear", "load": {"ret": False}} + + log.info("Authentication accepted from %s", load["id"]) + # only write to disk if you are adding the file, and in open mode, +@@ -437,7 +502,10 @@ class AESReqServerMixin: + fp_.write(load["pub"]) + elif not load["pub"]: + log.error("Public key is empty: %s", load["id"]) +- return {"enc": "clear", "load": {"ret": False}} ++ if sign_messages: ++ return self._clear_signed({"ret": False, "nonce": load["nonce"]}) ++ else: ++ return {"enc": "clear", "load": {"ret": False}} + + pub = None + +@@ -451,7 +519,10 @@ class AESReqServerMixin: + pub = salt.crypt.get_rsa_pub_key(pubfn) + except salt.crypt.InvalidKeyError as err: + log.error('Corrupt public key "%s": %s', pubfn, err) +- return {"enc": "clear", "load": {"ret": False}} ++ if sign_messages: ++ return self._clear_signed({"ret": False, "nonce": load["nonce"]}) ++ else: ++ return {"enc": "clear", "load": {"ret": False}} + + if not HAS_M2: + cipher = PKCS1_OAEP.new(pub) +@@ -532,10 +603,14 @@ class AESReqServerMixin: + ret["aes"] = pub.public_encrypt(aes, RSA.pkcs1_oaep_padding) + else: + ret["aes"] = cipher.encrypt(aes) ++ + # Be aggressive about the signature + digest = salt.utils.stringutils.to_bytes(hashlib.sha256(aes).hexdigest()) + ret["sig"] = salt.crypt.private_encrypt(self.master_key.key, digest) + eload = {"result": True, "act": "accept", "id": load["id"], "pub": load["pub"]} + if self.opts.get("auth_events") is True: + self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) ++ if sign_messages: ++ ret["nonce"] = load["nonce"] ++ return self._clear_signed(ret) + return ret +diff --git a/salt/transport/tcp.py b/salt/transport/tcp.py +index f8f51eab66..f00b3c40eb 100644 +--- a/salt/transport/tcp.py ++++ b/salt/transport/tcp.py +@@ -13,6 +13,7 @@ import threading + import time + import traceback + import urllib.parse ++import uuid + + import salt.crypt + import salt.exceptions +@@ -266,12 +267,15 @@ class AsyncTCPReqChannel(salt.transport.client.ReqChannel): + return { + "enc": self.crypt, + "load": load, ++ "version": 2, + } + + @salt.ext.tornado.gen.coroutine + def crypted_transfer_decode_dictentry( + self, load, dictkey=None, tries=3, timeout=60 + ): ++ nonce = uuid.uuid4().hex ++ load["nonce"] = nonce + if not self.auth.authenticated: + yield self.auth.authenticate() + ret = yield self.message_client.send( +@@ -285,10 +289,29 @@ class AsyncTCPReqChannel(salt.transport.client.ReqChannel): + else: + cipher = PKCS1_OAEP.new(key) + aes = cipher.decrypt(ret["key"]) ++ ++ # Decrypt using the public key. + pcrypt = salt.crypt.Crypticle(self.opts, aes) +- data = pcrypt.loads(ret[dictkey]) +- data = salt.transport.frame.decode_embedded_strs(data) +- raise salt.ext.tornado.gen.Return(data) ++ signed_msg = pcrypt.loads(ret[dictkey]) ++ ++ # Validate the master's signature. ++ master_pubkey_path = os.path.join(self.opts["pki_dir"], "minion_master.pub") ++ if not salt.crypt.verify_signature( ++ master_pubkey_path, signed_msg["data"], signed_msg["sig"] ++ ): ++ raise salt.crypt.AuthenticationError( ++ "Pillar payload signature failed to validate." ++ ) ++ ++ # Make sure the signed key matches the key we used to decrypt the data. ++ data = salt.payload.loads(signed_msg["data"]) ++ if data["key"] != ret["key"]: ++ raise salt.crypt.AuthenticationError("Key verification failed.") ++ ++ # Validate the nonce. ++ if data["nonce"] != nonce: ++ raise salt.crypt.AuthenticationError("Pillar nonce verification failed.") ++ raise salt.ext.tornado.gen.Return(data["pillar"]) + + @salt.ext.tornado.gen.coroutine + def _crypted_transfer(self, load, tries=3, timeout=60): +@@ -298,6 +321,9 @@ class AsyncTCPReqChannel(salt.transport.client.ReqChannel): + Indeed, we can fail too early in case of a master restart during a + minion state execution call + """ ++ nonce = uuid.uuid4().hex ++ if load and isinstance(load, dict): ++ load["nonce"] = nonce + + @salt.ext.tornado.gen.coroutine + def _do_transfer(): +@@ -311,7 +337,7 @@ class AsyncTCPReqChannel(salt.transport.client.ReqChannel): + # communication, we do not subscribe to return events, we just + # upload the results to the master + if data: +- data = self.auth.crypticle.loads(data) ++ data = self.auth.crypticle.loads(data, nonce=nonce) + data = salt.transport.frame.decode_embedded_strs(data) + raise salt.ext.tornado.gen.Return(data) + +@@ -395,6 +421,7 @@ class AsyncTCPPubChannel( + return { + "enc": self.crypt, + "load": load, ++ "version": 2, + } + + @salt.ext.tornado.gen.coroutine +@@ -696,6 +723,14 @@ class TCPReqServerChannel( + ) + raise salt.ext.tornado.gen.Return() + ++ version = 0 ++ if "version" in payload: ++ version = payload["version"] ++ ++ sign_messages = False ++ if version > 1: ++ sign_messages = True ++ + # intercept the "_auth" commands, since the main daemon shouldn't know + # anything about our key auth + if ( +@@ -704,11 +739,15 @@ class TCPReqServerChannel( + ): + yield stream.write( + salt.transport.frame.frame_msg( +- self._auth(payload["load"]), header=header ++ self._auth(payload["load"], sign_messages), header=header + ) + ) + raise salt.ext.tornado.gen.Return() + ++ nonce = None ++ if version > 1: ++ nonce = payload["load"].pop("nonce", None) ++ + # TODO: test + try: + ret, req_opts = yield self.payload_handler(payload) +@@ -727,7 +766,7 @@ class TCPReqServerChannel( + elif req_fun == "send": + stream.write( + salt.transport.frame.frame_msg( +- self.crypticle.dumps(ret), header=header ++ self.crypticle.dumps(ret, nonce), header=header + ) + ) + elif req_fun == "send_private": +@@ -737,6 +776,8 @@ class TCPReqServerChannel( + ret, + req_opts["key"], + req_opts["tgt"], ++ nonce, ++ sign_messages, + ), + header=header, + ) +@@ -1381,7 +1422,7 @@ class PubServer(salt.ext.tornado.tcpserver.TCPServer): + TCP publisher + """ + +- def __init__(self, opts, io_loop=None): ++ def __init__(self, opts, io_loop=None, pack_publish=lambda _: _): + super().__init__(ssl_options=opts.get("ssl")) + self.io_loop = io_loop + self.opts = opts +@@ -1408,6 +1449,10 @@ class PubServer(salt.ext.tornado.tcpserver.TCPServer): + ) + else: + self.event = None ++ self._pack_publish = pack_publish ++ ++ def pack_publish(self, load): ++ return self._pack_publish(load) + + def close(self): + if self._closing: +@@ -1516,6 +1561,7 @@ class PubServer(salt.ext.tornado.tcpserver.TCPServer): + @salt.ext.tornado.gen.coroutine + def publish_payload(self, package, _): + log.debug("TCP PubServer sending payload: %s", package) ++ payload = self.pack_publish(package) + payload = salt.transport.frame.frame_msg(package["payload"]) + + to_remove = [] +@@ -1591,7 +1637,9 @@ class TCPPubServerChannel(salt.transport.server.PubServerChannel): + self.io_loop = salt.ext.tornado.ioloop.IOLoop.current() + + # Spin up the publisher +- pub_server = PubServer(self.opts, io_loop=self.io_loop) ++ pub_server = PubServer( ++ self.opts, io_loop=self.io_loop, pack_publish=self.pack_publish ++ ) + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + _set_tcp_keepalive(sock, self.opts) +@@ -1634,12 +1682,9 @@ class TCPPubServerChannel(salt.transport.server.PubServerChannel): + """ + process_manager.add_process(self._publish_daemon, kwargs=kwargs) + +- def publish(self, load): +- """ +- Publish "load" to minions +- """ ++ def pack_publish(self, load): + payload = {"enc": "aes"} +- ++ load["serial"] = salt.master.SMaster.get_serial() + crypticle = salt.crypt.Crypticle( + self.opts, salt.master.SMaster.secrets["aes"]["secret"].value + ) +@@ -1648,20 +1693,6 @@ class TCPPubServerChannel(salt.transport.server.PubServerChannel): + master_pem_path = os.path.join(self.opts["pki_dir"], "master.pem") + log.debug("Signing data packet") + payload["sig"] = salt.crypt.sign_message(master_pem_path, payload["load"]) +- # Use the Salt IPC server +- if self.opts.get("ipc_mode", "") == "tcp": +- pull_uri = int(self.opts.get("tcp_master_publish_pull", 4514)) +- else: +- pull_uri = os.path.join(self.opts["sock_dir"], "publish_pull.ipc") +- # TODO: switch to the actual asynchronous interface +- # pub_sock = salt.transport.ipc.IPCMessageClient(self.opts, io_loop=self.io_loop) +- pub_sock = salt.utils.asynchronous.SyncWrapper( +- salt.transport.ipc.IPCMessageClient, +- (pull_uri,), +- loop_kwarg="io_loop", +- ) +- pub_sock.connect() +- + int_payload = {"payload": salt.payload.dumps(payload)} + + # add some targeting stuff for lists only (for now) +@@ -1678,5 +1709,21 @@ class TCPPubServerChannel(salt.transport.server.PubServerChannel): + int_payload["topic_lst"] = match_ids + else: + int_payload["topic_lst"] = load["tgt"] ++ return int_payload ++ ++ def publish(self, load): ++ """ ++ Publish "load" to minions ++ """ + # Send it over IPC! +- pub_sock.send(int_payload) ++ if self.opts.get("ipc_mode", "") == "tcp": ++ pull_uri = int(self.opts.get("tcp_master_publish_pull", 4514)) ++ else: ++ pull_uri = os.path.join(self.opts["sock_dir"], "publish_pull.ipc") ++ pub_sock = salt.utils.asynchronous.SyncWrapper( ++ salt.transport.ipc.IPCMessageClient, ++ (pull_uri,), ++ loop_kwarg="io_loop", ++ ) ++ pub_sock.connect() ++ pub_sock.send(load) +diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py +index 357fb08553..9e61b23255 100644 +--- a/salt/transport/zeromq.py ++++ b/salt/transport/zeromq.py +@@ -8,6 +8,7 @@ import os + import signal + import sys + import threading ++import uuid + from random import randint + + import salt.auth +@@ -55,6 +56,7 @@ except ImportError: + except ImportError: + from Crypto.Cipher import PKCS1_OAEP # nosec + ++ + log = logging.getLogger(__name__) + + +@@ -66,12 +68,12 @@ def _get_master_uri(master_ip, master_port, source_ip=None, source_port=None): + rc = zmq_connect(socket, "tcp://192.168.1.17:5555;192.168.1.1:5555"); assert (rc == 0); + Source: http://api.zeromq.org/4-1:zmq-tcp + """ ++ + from salt.utils.zeromq import ip_bracket + + master_uri = "tcp://{master_ip}:{master_port}".format( + master_ip=ip_bracket(master_ip), master_port=master_port + ) +- + if source_ip or source_port: + if LIBZMQ_VERSION_INFO >= (4, 1, 6) and ZMQ_VERSION_INFO >= (16, 0, 1): + # The source:port syntax for ZeroMQ has been added in libzmq 4.1.6 +@@ -211,22 +213,27 @@ class AsyncZeroMQReqChannel(salt.transport.client.ReqChannel): + return { + "enc": self.crypt, + "load": load, ++ "version": 2, + } + + @salt.ext.tornado.gen.coroutine + def crypted_transfer_decode_dictentry( + self, load, dictkey=None, tries=3, timeout=60 + ): ++ nonce = uuid.uuid4().hex ++ load["nonce"] = nonce + if not self.auth.authenticated: + # Return control back to the caller, continue when authentication succeeds + yield self.auth.authenticate() +- # Return control to the caller. When send() completes, resume by populating ret with the Future.result ++ ++ # Return control to the caller. When send() completes, resume by ++ # populating ret with the Future.result + ret = yield self.message_client.send( + self._package_load(self.auth.crypticle.dumps(load)), + timeout=timeout, + tries=tries, + ) +- key = self.auth.get_keys() ++ + if "key" not in ret: + # Reauth in the case our key is deleted on the master side. + yield self.auth.authenticate() +@@ -235,15 +242,36 @@ class AsyncZeroMQReqChannel(salt.transport.client.ReqChannel): + timeout=timeout, + tries=tries, + ) ++ ++ key = self.auth.get_keys() + if HAS_M2: + aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) + else: + cipher = PKCS1_OAEP.new(key) + aes = cipher.decrypt(ret["key"]) ++ ++ # Decrypt using the public key. + pcrypt = salt.crypt.Crypticle(self.opts, aes) +- data = pcrypt.loads(ret[dictkey]) +- data = salt.transport.frame.decode_embedded_strs(data) +- raise salt.ext.tornado.gen.Return(data) ++ signed_msg = pcrypt.loads(ret[dictkey]) ++ ++ # Validate the master's signature. ++ master_pubkey_path = os.path.join(self.opts["pki_dir"], "minion_master.pub") ++ if not salt.crypt.verify_signature( ++ master_pubkey_path, signed_msg["data"], signed_msg["sig"] ++ ): ++ raise salt.crypt.AuthenticationError( ++ "Pillar payload signature failed to validate." ++ ) ++ ++ # Make sure the signed key matches the key we used to decrypt the data. ++ data = salt.payload.loads(signed_msg["data"]) ++ if data["key"] != ret["key"]: ++ raise salt.crypt.AuthenticationError("Key verification failed.") ++ ++ # Validate the nonce. ++ if data["nonce"] != nonce: ++ raise salt.crypt.AuthenticationError("Pillar nonce verification failed.") ++ raise salt.ext.tornado.gen.Return(data["pillar"]) + + @salt.ext.tornado.gen.coroutine + def _crypted_transfer(self, load, tries=3, timeout=60, raw=False): +@@ -260,6 +288,9 @@ class AsyncZeroMQReqChannel(salt.transport.client.ReqChannel): + :param int tries: The number of times to make before failure + :param int timeout: The number of seconds on a response before failing + """ ++ nonce = uuid.uuid4().hex ++ if load and isinstance(load, dict): ++ load["nonce"] = nonce + + @salt.ext.tornado.gen.coroutine + def _do_transfer(): +@@ -274,7 +305,7 @@ class AsyncZeroMQReqChannel(salt.transport.client.ReqChannel): + # communication, we do not subscribe to return events, we just + # upload the results to the master + if data: +- data = self.auth.crypticle.loads(data, raw) ++ data = self.auth.crypticle.loads(data, raw, nonce) + if not raw: + data = salt.transport.frame.decode_embedded_strs(data) + raise salt.ext.tornado.gen.Return(data) +@@ -735,12 +766,24 @@ class ZeroMQReqServerChannel( + ) + raise salt.ext.tornado.gen.Return() + ++ version = 0 ++ if "version" in payload: ++ version = payload["version"] ++ ++ sign_messages = False ++ if version > 1: ++ sign_messages = True ++ + # intercept the "_auth" commands, since the main daemon shouldn't know + # anything about our key auth + if payload["enc"] == "clear" and payload.get("load", {}).get("cmd") == "_auth": +- stream.send(salt.payload.dumps(self._auth(payload["load"]))) ++ stream.send(salt.payload.dumps(self._auth(payload["load"], sign_messages))) + raise salt.ext.tornado.gen.Return() + ++ nonce = None ++ if version > 1: ++ nonce = payload["load"].pop("nonce", None) ++ + # TODO: test + try: + # Take the payload_handler function that was registered when we created the channel +@@ -756,7 +799,7 @@ class ZeroMQReqServerChannel( + if req_fun == "send_clear": + stream.send(salt.payload.dumps(ret)) + elif req_fun == "send": +- stream.send(salt.payload.dumps(self.crypticle.dumps(ret))) ++ stream.send(salt.payload.dumps(self.crypticle.dumps(ret, nonce))) + elif req_fun == "send_private": + stream.send( + salt.payload.dumps( +@@ -764,6 +807,8 @@ class ZeroMQReqServerChannel( + ret, + req_opts["key"], + req_opts["tgt"], ++ nonce, ++ sign_messages, + ) + ) + ) +@@ -894,6 +939,8 @@ class ZeroMQPubServerChannel(salt.transport.server.PubServerChannel): + try: + log.debug("Publish daemon getting data from puller %s", pull_uri) + package = pull_sock.recv() ++ package = salt.payload.loads(package) ++ package = self.pack_publish(package) + log.debug("Publish daemon received payload. size=%d", len(package)) + + unpacked_package = salt.payload.unpackage(package) +@@ -986,8 +1033,8 @@ class ZeroMQPubServerChannel(salt.transport.server.PubServerChannel): + """ + if self.pub_sock: + self.pub_close() +- ctx = zmq.Context.instance() +- self._sock_data.sock = ctx.socket(zmq.PUSH) ++ self._sock_data._ctx = zmq.Context() ++ self._sock_data.sock = self._sock_data._ctx.socket(zmq.PUSH) + self.pub_sock.setsockopt(zmq.LINGER, -1) + if self.opts.get("ipc_mode", "") == "tcp": + pull_uri = "tcp://127.0.0.1:{}".format( +@@ -1009,15 +1056,12 @@ class ZeroMQPubServerChannel(salt.transport.server.PubServerChannel): + if hasattr(self._sock_data, "sock"): + self._sock_data.sock.close() + delattr(self._sock_data, "sock") ++ if hasattr(self._sock_data, "_ctx"): ++ self._sock_data._ctx.destroy() + +- def publish(self, load): +- """ +- Publish "load" to minions. This send the load to the publisher daemon +- process with does the actual sending to minions. +- +- :param dict load: A load to be sent across the wire to minions +- """ ++ def pack_publish(self, load): + payload = {"enc": "aes"} ++ load["serial"] = salt.master.SMaster.get_serial() + crypticle = salt.crypt.Crypticle( + self.opts, salt.master.SMaster.secrets["aes"]["secret"].value + ) +@@ -1048,9 +1092,18 @@ class ZeroMQPubServerChannel(salt.transport.server.PubServerChannel): + load.get("jid", None), + len(payload), + ) ++ return payload ++ ++ def publish(self, load): ++ """ ++ Publish "load" to minions. This send the load to the publisher daemon ++ process with does the actual sending to minions. ++ ++ :param dict load: A load to be sent across the wire to minions ++ """ + if not self.pub_sock: + self.pub_connect() +- self.pub_sock.send(payload) ++ self.pub_sock.send(salt.payload.dumps(load)) + log.debug("Sent payload to publish daemon.") + + +diff --git a/salt/utils/minions.py b/salt/utils/minions.py +index a639bbb513..3e2f448db6 100644 +--- a/salt/utils/minions.py ++++ b/salt/utils/minions.py +@@ -736,20 +736,27 @@ class CkMinions: + + def validate_tgt(self, valid, expr, tgt_type, minions=None, expr_form=None): + """ +- Return a Bool. This function returns if the expression sent in is +- within the scope of the valid expression ++ Validate the target minions against the possible valid minions. ++ ++ If ``minions`` is provided, they will be compared against the valid ++ minions. Otherwise, ``expr`` and ``tgt_type`` will be used to expand ++ to a list of target minions. ++ ++ Return True if all of the requested minions are valid minions, ++ otherwise return False. + """ + + v_minions = set(self.check_minions(valid, "compound").get("minions", [])) ++ if not v_minions: ++ # There are no valid minions, so it doesn't matter what we are ++ # targeting - this is a fail. ++ return False + if minions is None: + _res = self.check_minions(expr, tgt_type) + minions = set(_res["minions"]) + else: + minions = set(minions) +- d_bool = not bool(minions.difference(v_minions)) +- if len(v_minions) == len(minions) and d_bool: +- return True +- return d_bool ++ return minions.issubset(v_minions) + + def match_check(self, regex, fun): + """ +diff --git a/salt/utils/network.py b/salt/utils/network.py +index 349cfb6fce..90be389a59 100644 +--- a/salt/utils/network.py ++++ b/salt/utils/network.py +@@ -1003,10 +1003,10 @@ def _junos_interfaces_ifconfig(out): + + pip = re.compile( + r".*?inet\s*(primary)*\s+mtu" +- r" (\d+)\s+local=[^\d]*(.*?)\s+dest=[^\d]*(.*?)\/([\d]*)\s+bcast=((?:[0-9]{1,3}\.){3}[0-9]{1,3})" ++ r" (\d+)\s+local=[^\d]*(.*?)\s{0,40}dest=[^\d]*(.*?)\/([\d]*)\s{0,40}bcast=((?:[0-9]{1,3}\.){3}[0-9]{1,3})" + ) + pip6 = re.compile( +- r".*?inet6 mtu [^\d]+\s+local=([0-9a-f:]+)%([a-zA-Z0-9]*)/([\d]*)\s" ++ r".*?inet6 mtu [^\d]+\s{0,40}local=([0-9a-f:]+)%([a-zA-Z0-9]*)/([\d]*)\s" + ) + + pupdown = re.compile("UP") +diff --git a/tests/integration/files/ssh/known_hosts b/tests/integration/files/ssh/known_hosts +index b46ae35a6b..aa02480ca8 100644 +--- a/tests/integration/files/ssh/known_hosts ++++ b/tests/integration/files/ssh/known_hosts +@@ -1 +1,3 @@ + |1|muzcBqgq7+ByUY7aLICytOff8UI=|rZ1JBNlIOqRnwwsJl9yP+xMxgf8= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ== ++github.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg= ++github.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl +diff --git a/tests/integration/modules/test_ssh.py b/tests/integration/modules/test_ssh.py +index 4bae9c1019..ffa052402e 100644 +--- a/tests/integration/modules/test_ssh.py ++++ b/tests/integration/modules/test_ssh.py +@@ -132,7 +132,9 @@ class SSHModuleTest(ModuleCase): + """ + Check that known host information is returned from remote host + """ +- ret = self.run_function("ssh.recv_known_host_entries", ["github.com"]) ++ ret = self.run_function( ++ "ssh.recv_known_host_entries", ["github.com"], enc="ssh-rsa" ++ ) + try: + self.assertNotEqual(ret, None) + self.assertEqual(ret[0]["enc"], "ssh-rsa") +@@ -219,7 +221,10 @@ class SSHModuleTest(ModuleCase): + """ + # add item + ret = self.run_function( +- "ssh.set_known_host", ["root", "github.com"], config=self.known_hosts ++ "ssh.set_known_host", ++ ["root", "github.com"], ++ enc="ssh-rsa", ++ config=self.known_hosts, + ) + try: + self.assertEqual(ret["status"], "updated") +diff --git a/tests/integration/states/test_ssh_known_hosts.py b/tests/integration/states/test_ssh_known_hosts.py +index beeb0342bd..cb4b40d3a0 100644 +--- a/tests/integration/states/test_ssh_known_hosts.py ++++ b/tests/integration/states/test_ssh_known_hosts.py +@@ -11,7 +11,7 @@ from tests.support.mixins import SaltReturnAssertsMixin + from tests.support.runtests import RUNTIME_VARS + + GITHUB_FINGERPRINT = "9d:38:5b:83:a9:17:52:92:56:1a:5e:c4:d4:81:8e:0a:ca:51:a2:64:f1:74:20:11:2e:f8:8a:c3:a1:39:49:8f" +-GITHUB_IP = "192.30.253.113" ++GITHUB_IP = "140.82.121.4" + + + @pytest.mark.skip_if_binaries_missing("ssh", "ssh-keygen", check_all=True) +@@ -37,6 +37,7 @@ class SSHKnownHostsStateTest(ModuleCase, SaltReturnAssertsMixin): + kwargs = { + "name": "github.com", + "user": "root", ++ "enc": "ssh-rsa", + "fingerprint": GITHUB_FINGERPRINT, + "config": self.known_hosts, + } +diff --git a/tests/pytests/functional/transport/server/test_req_channel.py b/tests/pytests/functional/transport/server/test_req_channel.py +index 7a392cd758..17d8861ccf 100644 +--- a/tests/pytests/functional/transport/server/test_req_channel.py ++++ b/tests/pytests/functional/transport/server/test_req_channel.py +@@ -1,3 +1,4 @@ ++import ctypes + import logging + import multiprocessing + +@@ -6,6 +7,7 @@ import salt.config + import salt.exceptions + import salt.ext.tornado.gen + import salt.log.setup ++import salt.master + import salt.transport.client + import salt.transport.server + import salt.utils.platform +@@ -33,6 +35,18 @@ class ReqServerChannelProcess(salt.utils.process.SignalHandlingProcess): + self.running = multiprocessing.Event() + + def run(self): ++ salt.master.SMaster.secrets["aes"] = { ++ "secret": multiprocessing.Array( ++ ctypes.c_char, ++ salt.utils.stringutils.to_bytes( ++ salt.crypt.Crypticle.generate_key_string() ++ ), ++ ), ++ "serial": multiprocessing.Value( ++ ctypes.c_longlong, lock=False # We'll use the lock from 'secret' ++ ), ++ } ++ + self.io_loop = salt.ext.tornado.ioloop.IOLoop() + self.io_loop.make_current() + self.req_server_channel.post_fork(self._handle_payload, io_loop=self.io_loop) +@@ -121,7 +135,7 @@ def test_basic(req_channel): + {"baz": "qux", "list": [1, 2, 3]}, + ] + for msg in msgs: +- ret = req_channel.send(msg, timeout=5, tries=1) ++ ret = req_channel.send(dict(msg), timeout=5, tries=1) + assert ret["load"] == msg + + +diff --git a/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py b/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py +index 9e183c11e0..e7033f810a 100644 +--- a/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py ++++ b/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py +@@ -10,6 +10,7 @@ import salt.exceptions + import salt.ext.tornado.gen + import salt.ext.tornado.ioloop + import salt.log.setup ++import salt.master + import salt.transport.client + import salt.transport.server + import salt.transport.zeromq +@@ -40,6 +41,21 @@ class Collector(salt.utils.process.SignalHandlingProcess): + self.started = multiprocessing.Event() + self.running = multiprocessing.Event() + ++ def _rotate_secrets(self, now=None): ++ salt.master.SMaster.secrets["aes"] = { ++ "secret": multiprocessing.Array( ++ ctypes.c_char, ++ salt.utils.stringutils.to_bytes( ++ salt.crypt.Crypticle.generate_key_string() ++ ), ++ ), ++ "serial": multiprocessing.Value( ++ ctypes.c_longlong, lock=False # We'll use the lock from 'secret' ++ ), ++ "reload": salt.crypt.Crypticle.generate_key_string, ++ "rotate_master_key": self._rotate_secrets, ++ } ++ + def run(self): + """ + Gather results until then number of seconds specified by timeout passes +@@ -67,6 +83,8 @@ class Collector(salt.utils.process.SignalHandlingProcess): + try: + serial_payload = salt.payload.loads(payload) + payload = crypticle.loads(serial_payload["load"]) ++ if not payload: ++ continue + if "start" in payload: + self.running.set() + continue +@@ -108,10 +126,16 @@ class PubServerChannelProcess(salt.utils.process.SignalHandlingProcess): + self.master_config = master_config + self.minion_config = minion_config + self.collector_kwargs = collector_kwargs +- self.aes_key = multiprocessing.Array( +- ctypes.c_char, +- salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), +- ) ++ self.aes_key = salt.crypt.Crypticle.generate_key_string() ++ salt.master.SMaster.secrets["aes"] = { ++ "secret": multiprocessing.Array( ++ ctypes.c_char, ++ salt.utils.stringutils.to_bytes(self.aes_key), ++ ), ++ "serial": multiprocessing.Value( ++ ctypes.c_longlong, lock=False # We'll use the lock from 'secret' ++ ), ++ } + self.process_manager = salt.utils.process.ProcessManager( + name="ZMQ-PubServer-ProcessManager" + ) +@@ -126,14 +150,10 @@ class PubServerChannelProcess(salt.utils.process.SignalHandlingProcess): + self.queue = multiprocessing.Queue() + self.stopped = multiprocessing.Event() + self.collector = Collector( +- self.minion_config, +- self.pub_uri, +- self.aes_key.value, +- **self.collector_kwargs ++ self.minion_config, self.pub_uri, self.aes_key, **self.collector_kwargs + ) + + def run(self): +- salt.master.SMaster.secrets["aes"] = {"secret": self.aes_key} + try: + while True: + payload = self.queue.get() +@@ -227,12 +247,16 @@ def test_issue_36469_tcp(salt_master, salt_minion): + https://github.com/saltstack/salt/issues/36469 + """ + +- def _send_small(server_channel, sid, num=10): ++ def _send_small(opts, sid, num=10): ++ server_channel = salt.transport.zeromq.ZeroMQPubServerChannel(opts) + for idx in range(num): + load = {"tgt_type": "glob", "tgt": "*", "jid": "{}-s{}".format(sid, idx)} + server_channel.publish(load) ++ time.sleep(0.3) ++ server_channel.close_pub() + +- def _send_large(server_channel, sid, num=10, size=250000 * 3): ++ def _send_large(opts, sid, num=10, size=250000 * 3): ++ server_channel = salt.transport.zeromq.ZeroMQPubServerChannel(opts) + for idx in range(num): + load = { + "tgt_type": "glob", +@@ -241,16 +265,19 @@ def test_issue_36469_tcp(salt_master, salt_minion): + "xdata": "0" * size, + } + server_channel.publish(load) ++ time.sleep(0.3) ++ server_channel.close_pub() + + opts = dict(salt_master.config.copy(), ipc_mode="tcp", pub_hwm=0) + send_num = 10 * 4 + expect = [] + with PubServerChannelProcess(opts, salt_minion.config.copy()) as server_channel: ++ assert "aes" in salt.master.SMaster.secrets + with ThreadPoolExecutor(max_workers=4) as executor: +- executor.submit(_send_small, server_channel, 1) +- executor.submit(_send_large, server_channel, 2) +- executor.submit(_send_small, server_channel, 3) +- executor.submit(_send_large, server_channel, 4) ++ executor.submit(_send_small, opts, 1) ++ executor.submit(_send_large, opts, 2) ++ executor.submit(_send_small, opts, 3) ++ executor.submit(_send_large, opts, 4) + expect.extend(["{}-s{}".format(a, b) for a in range(10) for b in (1, 3)]) + expect.extend(["{}-l{}".format(a, b) for a in range(10) for b in (2, 4)]) + results = server_channel.collector.results +diff --git a/tests/pytests/unit/test_crypt.py b/tests/pytests/unit/test_crypt.py +index aa8f439b8c..a40c34b9d5 100644 +--- a/tests/pytests/unit/test_crypt.py ++++ b/tests/pytests/unit/test_crypt.py +@@ -4,10 +4,100 @@ tests.pytests.unit.test_crypt + + Unit tests for salt's crypt module + """ ++ ++import uuid ++ + import pytest + import salt.crypt ++import salt.master + import salt.utils.files + ++PRIV_KEY = """ ++-----BEGIN RSA PRIVATE KEY----- ++MIIEogIBAAKCAQEAoAsMPt+4kuIG6vKyw9r3+OuZrVBee/2vDdVetW+Js5dTlgrJ ++aghWWn3doGmKlEjqh7E4UTa+t2Jd6w8RSLnyHNJ/HpVhMG0M07MF6FMfILtDrrt8 ++ZX7eDVt8sx5gCEpYI+XG8Y07Ga9i3Hiczt+fu6HYwu96HggmG2pqkOrn3iGfqBvV ++YVFJzSZYe7e4c1PeEs0xYcrA4k+apyGsMtpef8vRUrNicRLc7dAcvfhtgt2DXEZ2 ++d72t/CR4ygtUvPXzisaTPW0G7OWAheCloqvTIIPQIjR8htFxGTz02STVXfnhnJ0Z ++k8KhqKF2v1SQvIYxsZU7jaDgl5i3zpeh58cYOwIDAQABAoIBABZUJEO7Y91+UnfC ++H6XKrZEZkcnH7j6/UIaOD9YhdyVKxhsnax1zh1S9vceNIgv5NltzIsfV6vrb6v2K ++Dx/F7Z0O0zR5o+MlO8ZncjoNKskex10gBEWG00Uqz/WPlddiQ/TSMJTv3uCBAzp+ ++S2Zjdb4wYPUlgzSgb2ygxrhsRahMcSMG9PoX6klxMXFKMD1JxiY8QfAHahPzQXy9 ++F7COZ0fCVo6BE+MqNuQ8tZeIxu8mOULQCCkLFwXmkz1FpfK/kNRmhIyhxwvCS+z4 ++JuErW3uXfE64RLERiLp1bSxlDdpvRO2R41HAoNELTsKXJOEt4JANRHm/CeyA5wsh ++NpscufUCgYEAxhgPfcMDy2v3nL6KtkgYjdcOyRvsAF50QRbEa8ldO+87IoMDD/Oe ++osFERJ5hhyyEO78QnaLVegnykiw5DWEF02RKMhD/4XU+1UYVhY0wJjKQIBadsufB ++2dnaKjvwzUhPh5BrBqNHl/FXwNCRDiYqXa79eWCPC9OFbZcUWWq70s8CgYEAztOI ++61zRfmXJ7f70GgYbHg+GA7IrsAcsGRITsFR82Ho0lqdFFCxz7oK8QfL6bwMCGKyk ++nzk+twh6hhj5UNp18KN8wktlo02zTgzgemHwaLa2cd6xKgmAyuPiTgcgnzt5LVNG ++FOjIWkLwSlpkDTl7ZzY2QSy7t+mq5d750fpIrtUCgYBWXZUbcpPL88WgDB7z/Bjg ++dlvW6JqLSqMK4b8/cyp4AARbNp12LfQC55o5BIhm48y/M70tzRmfvIiKnEc/gwaE ++NJx4mZrGFFURrR2i/Xx5mt/lbZbRsmN89JM+iKWjCpzJ8PgIi9Wh9DIbOZOUhKVB ++9RJEAgo70LvCnPTdS0CaVwKBgDJW3BllAvw/rBFIH4OB/vGnF5gosmdqp3oGo1Ik ++jipmPAx6895AH4tquIVYrUl9svHsezjhxvjnkGK5C115foEuWXw0u60uiTiy+6Pt ++2IS0C93VNMulenpnUrppE7CN2iWFAiaura0CY9fE/lsVpYpucHAWgi32Kok+ZxGL ++WEttAoGAN9Ehsz4LeQxEj3x8wVeEMHF6OsznpwYsI2oVh6VxpS4AjgKYqeLVcnNi ++TlZFsuQcqgod8OgzA91tdB+Rp86NygmWD5WzeKXpCOg9uA+y/YL+0sgZZHsuvbK6 ++PllUgXdYxqClk/hdBFB7v9AQoaj7K9Ga22v32msftYDQRJ94xOI= ++-----END RSA PRIVATE KEY----- ++""" ++ ++ ++PUB_KEY = """ ++-----BEGIN PUBLIC KEY----- ++MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoAsMPt+4kuIG6vKyw9r3 +++OuZrVBee/2vDdVetW+Js5dTlgrJaghWWn3doGmKlEjqh7E4UTa+t2Jd6w8RSLny ++HNJ/HpVhMG0M07MF6FMfILtDrrt8ZX7eDVt8sx5gCEpYI+XG8Y07Ga9i3Hiczt+f ++u6HYwu96HggmG2pqkOrn3iGfqBvVYVFJzSZYe7e4c1PeEs0xYcrA4k+apyGsMtpe ++f8vRUrNicRLc7dAcvfhtgt2DXEZ2d72t/CR4ygtUvPXzisaTPW0G7OWAheCloqvT ++IIPQIjR8htFxGTz02STVXfnhnJ0Zk8KhqKF2v1SQvIYxsZU7jaDgl5i3zpeh58cY ++OwIDAQAB ++-----END PUBLIC KEY----- ++""" ++ ++PRIV_KEY2 = """ ++-----BEGIN RSA PRIVATE KEY----- ++MIIEogIBAAKCAQEAp+8cTxguO6Vg+YO92VfHgNld3Zy8aM3JbZvpJcjTnis+YFJ7 ++Zlkcc647yPRRwY9nYBNywahnt5kIeuT1rTvTsMBZWvmUoEVUj1Xg8XXQkBvb9Ozy ++Gqy/G/p8KDDpzMP/U+XCnUeHiXTZrgnqgBIc2cKeCVvWFqDi0GRFGzyaXLaX3PPm ++M7DJ0MIPL1qgmcDq6+7Ze0gJ9SrDYFAeLmbuT1OqDfufXWQl/82JXeiwU2cOpqWq ++7n5fvPOWim7l1tzQ+dSiMRRm0xa6uNexCJww3oJSwvMbAmgzvOhqqhlqv+K7u0u7 ++FrFFojESsL36Gq4GBrISnvu2tk7u4GGNTYYQbQIDAQABAoIBAADrqWDQnd5DVZEA ++lR+WINiWuHJAy/KaIC7K4kAMBgbxrz2ZbiY9Ok/zBk5fcnxIZDVtXd1sZicmPlro ++GuWodIxdPZAnWpZ3UtOXUayZK/vCP1YsH1agmEqXuKsCu6Fc+K8VzReOHxLUkmXn ++FYM+tixGahXcjEOi/aNNTWitEB6OemRM1UeLJFzRcfyXiqzHpHCIZwBpTUAsmzcG ++QiVDkMTKubwo/m+PVXburX2CGibUydctgbrYIc7EJvyx/cpRiPZXo1PhHQWdu4Y1 ++SOaC66WLsP/wqvtHo58JQ6EN/gjSsbAgGGVkZ1xMo66nR+pLpR27coS7o03xCks6 ++DY/0mukCgYEAuLIGgBnqoh7YsOBLd/Bc1UTfDMxJhNseo+hZemtkSXz2Jn51322F ++Zw/FVN4ArXgluH+XsOhvG/MFFpojwZSrb0Qq5b1MRdo9qycq8lGqNtlN1WHqosDQ ++zW29kpL0tlRrSDpww3wRESsN9rH5XIrJ1b3ZXuO7asR+KBVQMy/+NcUCgYEA6MSC ++c+fywltKPgmPl5j0DPoDe5SXE/6JQy7w/vVGrGfWGf/zEJmhzS2R+CcfTTEqaT0T ++Yw8+XbFgKAqsxwtE9MUXLTVLI3sSUyE4g7blCYscOqhZ8ItCUKDXWkSpt++rG0Um ++1+cEJP/0oCazG6MWqvBC4NpQ1nzh46QpjWqMwokCgYAKDLXJ1p8rvx3vUeUJW6zR ++dfPlEGCXuAyMwqHLxXgpf4EtSwhC5gSyPOtx2LqUtcrnpRmt6JfTH4ARYMW9TMef ++QEhNQ+WYj213mKP/l235mg1gJPnNbUxvQR9lkFV8bk+AGJ32JRQQqRUTbU+yN2MQ ++HEptnVqfTp3GtJIultfwOQKBgG+RyYmu8wBP650izg33BXu21raEeYne5oIqXN+I ++R5DZ0JjzwtkBGroTDrVoYyuH1nFNEh7YLqeQHqvyufBKKYo9cid8NQDTu+vWr5UK ++tGvHnwdKrJmM1oN5JOAiq0r7+QMAOWchVy449VNSWWV03aeftB685iR5BXkstbIQ ++EVopAoGAfcGBTAhmceK/4Q83H/FXBWy0PAa1kZGg/q8+Z0KY76AqyxOVl0/CU/rB ++3tO3sKhaMTHPME/MiQjQQGoaK1JgPY6JHYvly2KomrJ8QTugqNGyMzdVJkXAK2AM ++GAwC8ivAkHf8CHrHa1W7l8t2IqBjW1aRt7mOW92nfG88Hck0Mbo= ++-----END RSA PRIVATE KEY----- ++""" ++ ++ ++PUB_KEY2 = """ ++-----BEGIN PUBLIC KEY----- ++MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp+8cTxguO6Vg+YO92VfH ++gNld3Zy8aM3JbZvpJcjTnis+YFJ7Zlkcc647yPRRwY9nYBNywahnt5kIeuT1rTvT ++sMBZWvmUoEVUj1Xg8XXQkBvb9OzyGqy/G/p8KDDpzMP/U+XCnUeHiXTZrgnqgBIc ++2cKeCVvWFqDi0GRFGzyaXLaX3PPmM7DJ0MIPL1qgmcDq6+7Ze0gJ9SrDYFAeLmbu ++T1OqDfufXWQl/82JXeiwU2cOpqWq7n5fvPOWim7l1tzQ+dSiMRRm0xa6uNexCJww ++3oJSwvMbAmgzvOhqqhlqv+K7u0u7FrFFojESsL36Gq4GBrISnvu2tk7u4GGNTYYQ ++bQIDAQAB ++-----END PUBLIC KEY----- ++""" ++ + + def test_get_rsa_pub_key_bad_key(tmp_path): + """ +@@ -18,3 +108,64 @@ def test_get_rsa_pub_key_bad_key(tmp_path): + fp.write("") + with pytest.raises(salt.crypt.InvalidKeyError): + salt.crypt.get_rsa_pub_key(key_path) ++ ++ ++def test_cryptical_dumps_no_nonce(): ++ master_crypt = salt.crypt.Crypticle({}, salt.crypt.Crypticle.generate_key_string()) ++ data = {"foo": "bar"} ++ ret = master_crypt.dumps(data) ++ ++ # Validate message structure ++ assert isinstance(ret, bytes) ++ une = master_crypt.decrypt(ret) ++ une.startswith(master_crypt.PICKLE_PAD) ++ assert salt.payload.loads(une[len(master_crypt.PICKLE_PAD) :]) == data ++ ++ # Validate load back to orig data ++ assert master_crypt.loads(ret) == data ++ ++ ++def test_cryptical_dumps_valid_nonce(): ++ nonce = uuid.uuid4().hex ++ master_crypt = salt.crypt.Crypticle({}, salt.crypt.Crypticle.generate_key_string()) ++ data = {"foo": "bar"} ++ ret = master_crypt.dumps(data, nonce=nonce) ++ ++ assert isinstance(ret, bytes) ++ une = master_crypt.decrypt(ret) ++ une.startswith(master_crypt.PICKLE_PAD) ++ nonce_and_data = une[len(master_crypt.PICKLE_PAD) :] ++ assert nonce_and_data.startswith(nonce.encode()) ++ assert salt.payload.loads(nonce_and_data[len(nonce) :]) == data ++ ++ assert master_crypt.loads(ret, nonce=nonce) == data ++ ++ ++def test_cryptical_dumps_invalid_nonce(): ++ nonce = uuid.uuid4().hex ++ master_crypt = salt.crypt.Crypticle({}, salt.crypt.Crypticle.generate_key_string()) ++ data = {"foo": "bar"} ++ ret = master_crypt.dumps(data, nonce=nonce) ++ assert isinstance(ret, bytes) ++ with pytest.raises(salt.crypt.SaltClientError, match="Nonce verification error"): ++ assert master_crypt.loads(ret, nonce="abcde") ++ ++ ++def test_verify_signature(tmpdir): ++ tmpdir.join("foo.pem").write(PRIV_KEY.strip()) ++ tmpdir.join("foo.pub").write(PUB_KEY.strip()) ++ tmpdir.join("bar.pem").write(PRIV_KEY2.strip()) ++ tmpdir.join("bar.pub").write(PUB_KEY2.strip()) ++ msg = b"foo bar" ++ sig = salt.crypt.sign_message(str(tmpdir.join("foo.pem")), msg) ++ assert salt.crypt.verify_signature(str(tmpdir.join("foo.pub")), msg, sig) ++ ++ ++def test_verify_signature_bad_sig(tmpdir): ++ tmpdir.join("foo.pem").write(PRIV_KEY.strip()) ++ tmpdir.join("foo.pub").write(PUB_KEY.strip()) ++ tmpdir.join("bar.pem").write(PRIV_KEY2.strip()) ++ tmpdir.join("bar.pub").write(PUB_KEY2.strip()) ++ msg = b"foo bar" ++ sig = salt.crypt.sign_message(str(tmpdir.join("foo.pem")), msg) ++ assert not salt.crypt.verify_signature(str(tmpdir.join("bar.pub")), msg, sig) +diff --git a/tests/pytests/unit/test_minion.py b/tests/pytests/unit/test_minion.py +index 7de60c49e3..985ec99276 100644 +--- a/tests/pytests/unit/test_minion.py ++++ b/tests/pytests/unit/test_minion.py +@@ -10,6 +10,7 @@ import salt.minion + import salt.syspaths + import salt.utils.crypt + import salt.utils.event as event ++import salt.utils.jid + import salt.utils.platform + import salt.utils.process + from salt._compat import ipaddress +diff --git a/tests/pytests/unit/transport/test_tcp.py b/tests/pytests/unit/transport/test_tcp.py +index d003797d29..3b6e175472 100644 +--- a/tests/pytests/unit/transport/test_tcp.py ++++ b/tests/pytests/unit/transport/test_tcp.py +@@ -210,15 +210,17 @@ def test_tcp_pub_server_channel_publish_filtering(temp_salt_master): + SyncWrapper.return_value = wrap + + # try simple publish with glob tgt_type +- channel.publish({"test": "value", "tgt_type": "glob", "tgt": "*"}) +- payload = wrap.send.call_args[0][0] ++ payload = channel.pack_publish( ++ {"test": "value", "tgt_type": "glob", "tgt": "*"} ++ ) + + # verify we send it without any specific topic + assert "topic_lst" not in payload + + # try simple publish with list tgt_type +- channel.publish({"test": "value", "tgt_type": "list", "tgt": ["minion01"]}) +- payload = wrap.send.call_args[0][0] ++ payload = channel.pack_publish( ++ {"test": "value", "tgt_type": "list", "tgt": ["minion01"]} ++ ) + + # verify we send it with correct topic + assert "topic_lst" in payload +@@ -226,8 +228,9 @@ def test_tcp_pub_server_channel_publish_filtering(temp_salt_master): + + # try with syndic settings + opts["order_masters"] = True +- channel.publish({"test": "value", "tgt_type": "list", "tgt": ["minion01"]}) +- payload = wrap.send.call_args[0][0] ++ payload = channel.pack_publish( ++ {"test": "value", "tgt_type": "list", "tgt": ["minion01"]} ++ ) + + # verify we send it without topic for syndics + assert "topic_lst" not in payload +@@ -257,8 +260,9 @@ def test_tcp_pub_server_channel_publish_filtering_str_list(temp_salt_master): + check_minions.return_value = {"minions": ["minion02"]} + + # try simple publish with list tgt_type +- channel.publish({"test": "value", "tgt_type": "list", "tgt": "minion02"}) +- payload = wrap.send.call_args[0][0] ++ payload = channel.pack_publish( ++ {"test": "value", "tgt_type": "list", "tgt": "minion02"} ++ ) + + # verify we send it with correct topic + assert "topic_lst" in payload +diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py +index 44f38ee998..1f0515c91a 100644 +--- a/tests/pytests/unit/transport/test_zeromq.py ++++ b/tests/pytests/unit/transport/test_zeromq.py +@@ -2,9 +2,16 @@ + :codeauthor: Thomas Jackson + """ + ++import ctypes + import hashlib ++import logging ++import multiprocessing ++import os ++import uuid + ++import pytest + import salt.config ++import salt.crypt + import salt.exceptions + import salt.ext.tornado.gen + import salt.ext.tornado.ioloop +@@ -14,9 +21,236 @@ import salt.transport.server + import salt.utils.platform + import salt.utils.process + import salt.utils.stringutils ++from salt.master import SMaster + from salt.transport.zeromq import AsyncReqMessageClientPool + from tests.support.mock import MagicMock, patch + ++try: ++ from M2Crypto import RSA ++ ++ HAS_M2 = True ++except ImportError: ++ HAS_M2 = False ++ try: ++ from Cryptodome.Cipher import PKCS1_OAEP ++ except ImportError: ++ from Crypto.Cipher import PKCS1_OAEP # nosec ++ ++log = logging.getLogger(__name__) ++ ++MASTER_PRIV_KEY = """ ++-----BEGIN RSA PRIVATE KEY----- ++MIIEogIBAAKCAQEAoAsMPt+4kuIG6vKyw9r3+OuZrVBee/2vDdVetW+Js5dTlgrJ ++aghWWn3doGmKlEjqh7E4UTa+t2Jd6w8RSLnyHNJ/HpVhMG0M07MF6FMfILtDrrt8 ++ZX7eDVt8sx5gCEpYI+XG8Y07Ga9i3Hiczt+fu6HYwu96HggmG2pqkOrn3iGfqBvV ++YVFJzSZYe7e4c1PeEs0xYcrA4k+apyGsMtpef8vRUrNicRLc7dAcvfhtgt2DXEZ2 ++d72t/CR4ygtUvPXzisaTPW0G7OWAheCloqvTIIPQIjR8htFxGTz02STVXfnhnJ0Z ++k8KhqKF2v1SQvIYxsZU7jaDgl5i3zpeh58cYOwIDAQABAoIBABZUJEO7Y91+UnfC ++H6XKrZEZkcnH7j6/UIaOD9YhdyVKxhsnax1zh1S9vceNIgv5NltzIsfV6vrb6v2K ++Dx/F7Z0O0zR5o+MlO8ZncjoNKskex10gBEWG00Uqz/WPlddiQ/TSMJTv3uCBAzp+ ++S2Zjdb4wYPUlgzSgb2ygxrhsRahMcSMG9PoX6klxMXFKMD1JxiY8QfAHahPzQXy9 ++F7COZ0fCVo6BE+MqNuQ8tZeIxu8mOULQCCkLFwXmkz1FpfK/kNRmhIyhxwvCS+z4 ++JuErW3uXfE64RLERiLp1bSxlDdpvRO2R41HAoNELTsKXJOEt4JANRHm/CeyA5wsh ++NpscufUCgYEAxhgPfcMDy2v3nL6KtkgYjdcOyRvsAF50QRbEa8ldO+87IoMDD/Oe ++osFERJ5hhyyEO78QnaLVegnykiw5DWEF02RKMhD/4XU+1UYVhY0wJjKQIBadsufB ++2dnaKjvwzUhPh5BrBqNHl/FXwNCRDiYqXa79eWCPC9OFbZcUWWq70s8CgYEAztOI ++61zRfmXJ7f70GgYbHg+GA7IrsAcsGRITsFR82Ho0lqdFFCxz7oK8QfL6bwMCGKyk ++nzk+twh6hhj5UNp18KN8wktlo02zTgzgemHwaLa2cd6xKgmAyuPiTgcgnzt5LVNG ++FOjIWkLwSlpkDTl7ZzY2QSy7t+mq5d750fpIrtUCgYBWXZUbcpPL88WgDB7z/Bjg ++dlvW6JqLSqMK4b8/cyp4AARbNp12LfQC55o5BIhm48y/M70tzRmfvIiKnEc/gwaE ++NJx4mZrGFFURrR2i/Xx5mt/lbZbRsmN89JM+iKWjCpzJ8PgIi9Wh9DIbOZOUhKVB ++9RJEAgo70LvCnPTdS0CaVwKBgDJW3BllAvw/rBFIH4OB/vGnF5gosmdqp3oGo1Ik ++jipmPAx6895AH4tquIVYrUl9svHsezjhxvjnkGK5C115foEuWXw0u60uiTiy+6Pt ++2IS0C93VNMulenpnUrppE7CN2iWFAiaura0CY9fE/lsVpYpucHAWgi32Kok+ZxGL ++WEttAoGAN9Ehsz4LeQxEj3x8wVeEMHF6OsznpwYsI2oVh6VxpS4AjgKYqeLVcnNi ++TlZFsuQcqgod8OgzA91tdB+Rp86NygmWD5WzeKXpCOg9uA+y/YL+0sgZZHsuvbK6 ++PllUgXdYxqClk/hdBFB7v9AQoaj7K9Ga22v32msftYDQRJ94xOI= ++-----END RSA PRIVATE KEY----- ++""" ++ ++ ++MASTER_PUB_KEY = """ ++-----BEGIN PUBLIC KEY----- ++MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoAsMPt+4kuIG6vKyw9r3 +++OuZrVBee/2vDdVetW+Js5dTlgrJaghWWn3doGmKlEjqh7E4UTa+t2Jd6w8RSLny ++HNJ/HpVhMG0M07MF6FMfILtDrrt8ZX7eDVt8sx5gCEpYI+XG8Y07Ga9i3Hiczt+f ++u6HYwu96HggmG2pqkOrn3iGfqBvVYVFJzSZYe7e4c1PeEs0xYcrA4k+apyGsMtpe ++f8vRUrNicRLc7dAcvfhtgt2DXEZ2d72t/CR4ygtUvPXzisaTPW0G7OWAheCloqvT ++IIPQIjR8htFxGTz02STVXfnhnJ0Zk8KhqKF2v1SQvIYxsZU7jaDgl5i3zpeh58cY ++OwIDAQAB ++-----END PUBLIC KEY----- ++""" ++ ++MASTER2_PRIV_KEY = """ ++-----BEGIN RSA PRIVATE KEY----- ++MIIEogIBAAKCAQEAp+8cTxguO6Vg+YO92VfHgNld3Zy8aM3JbZvpJcjTnis+YFJ7 ++Zlkcc647yPRRwY9nYBNywahnt5kIeuT1rTvTsMBZWvmUoEVUj1Xg8XXQkBvb9Ozy ++Gqy/G/p8KDDpzMP/U+XCnUeHiXTZrgnqgBIc2cKeCVvWFqDi0GRFGzyaXLaX3PPm ++M7DJ0MIPL1qgmcDq6+7Ze0gJ9SrDYFAeLmbuT1OqDfufXWQl/82JXeiwU2cOpqWq ++7n5fvPOWim7l1tzQ+dSiMRRm0xa6uNexCJww3oJSwvMbAmgzvOhqqhlqv+K7u0u7 ++FrFFojESsL36Gq4GBrISnvu2tk7u4GGNTYYQbQIDAQABAoIBAADrqWDQnd5DVZEA ++lR+WINiWuHJAy/KaIC7K4kAMBgbxrz2ZbiY9Ok/zBk5fcnxIZDVtXd1sZicmPlro ++GuWodIxdPZAnWpZ3UtOXUayZK/vCP1YsH1agmEqXuKsCu6Fc+K8VzReOHxLUkmXn ++FYM+tixGahXcjEOi/aNNTWitEB6OemRM1UeLJFzRcfyXiqzHpHCIZwBpTUAsmzcG ++QiVDkMTKubwo/m+PVXburX2CGibUydctgbrYIc7EJvyx/cpRiPZXo1PhHQWdu4Y1 ++SOaC66WLsP/wqvtHo58JQ6EN/gjSsbAgGGVkZ1xMo66nR+pLpR27coS7o03xCks6 ++DY/0mukCgYEAuLIGgBnqoh7YsOBLd/Bc1UTfDMxJhNseo+hZemtkSXz2Jn51322F ++Zw/FVN4ArXgluH+XsOhvG/MFFpojwZSrb0Qq5b1MRdo9qycq8lGqNtlN1WHqosDQ ++zW29kpL0tlRrSDpww3wRESsN9rH5XIrJ1b3ZXuO7asR+KBVQMy/+NcUCgYEA6MSC ++c+fywltKPgmPl5j0DPoDe5SXE/6JQy7w/vVGrGfWGf/zEJmhzS2R+CcfTTEqaT0T ++Yw8+XbFgKAqsxwtE9MUXLTVLI3sSUyE4g7blCYscOqhZ8ItCUKDXWkSpt++rG0Um ++1+cEJP/0oCazG6MWqvBC4NpQ1nzh46QpjWqMwokCgYAKDLXJ1p8rvx3vUeUJW6zR ++dfPlEGCXuAyMwqHLxXgpf4EtSwhC5gSyPOtx2LqUtcrnpRmt6JfTH4ARYMW9TMef ++QEhNQ+WYj213mKP/l235mg1gJPnNbUxvQR9lkFV8bk+AGJ32JRQQqRUTbU+yN2MQ ++HEptnVqfTp3GtJIultfwOQKBgG+RyYmu8wBP650izg33BXu21raEeYne5oIqXN+I ++R5DZ0JjzwtkBGroTDrVoYyuH1nFNEh7YLqeQHqvyufBKKYo9cid8NQDTu+vWr5UK ++tGvHnwdKrJmM1oN5JOAiq0r7+QMAOWchVy449VNSWWV03aeftB685iR5BXkstbIQ ++EVopAoGAfcGBTAhmceK/4Q83H/FXBWy0PAa1kZGg/q8+Z0KY76AqyxOVl0/CU/rB ++3tO3sKhaMTHPME/MiQjQQGoaK1JgPY6JHYvly2KomrJ8QTugqNGyMzdVJkXAK2AM ++GAwC8ivAkHf8CHrHa1W7l8t2IqBjW1aRt7mOW92nfG88Hck0Mbo= ++-----END RSA PRIVATE KEY----- ++""" ++ ++ ++MASTER2_PUB_KEY = """ ++-----BEGIN PUBLIC KEY----- ++MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp+8cTxguO6Vg+YO92VfH ++gNld3Zy8aM3JbZvpJcjTnis+YFJ7Zlkcc647yPRRwY9nYBNywahnt5kIeuT1rTvT ++sMBZWvmUoEVUj1Xg8XXQkBvb9OzyGqy/G/p8KDDpzMP/U+XCnUeHiXTZrgnqgBIc ++2cKeCVvWFqDi0GRFGzyaXLaX3PPmM7DJ0MIPL1qgmcDq6+7Ze0gJ9SrDYFAeLmbu ++T1OqDfufXWQl/82JXeiwU2cOpqWq7n5fvPOWim7l1tzQ+dSiMRRm0xa6uNexCJww ++3oJSwvMbAmgzvOhqqhlqv+K7u0u7FrFFojESsL36Gq4GBrISnvu2tk7u4GGNTYYQ ++bQIDAQAB ++-----END PUBLIC KEY----- ++""" ++ ++ ++MASTER_SIGNING_PRIV = """ ++-----BEGIN RSA PRIVATE KEY----- ++MIIEpAIBAAKCAQEAtieqrBMTM0MSIbhPKkDcozHqyXKyL/+bXYYw+iVPsns7c7bJ ++zBqenLQlWoRVyrVyBFrrwQSrKu/0Mqn3l639iOGPlUoR3I7aZKIpyEdDkqd3xGIC ++e+BtNNDqhUai67L63hEdG+iYAchi8UZw3LZGtcGpJ3FkBH4cYFX9EOam2QjbD7WY ++EO7m1+j6XEYIOTCmAP9dGAvBbU0Jblc+wYxG3qNr+2dBWsK76QXWEqib2VSOGP+z ++gjJa8tqY7PXXdOJpalQXNphmD/4o4pHKR4Euy0yL/1oMkpacmrV61LWB8Trnx9nS ++9gdVrUteQF/cL1KAGwOsdVmiLpHfvqLLRqSAAQIDAQABAoIBABjB+HEN4Kixf4fk ++wKHKEhL+SF6b/7sFX00NXZ/KLXRhSnnWSMQ8g/1hgMg2P2DfW4FbCDsCUu9xkLvI ++HTZY+CJAIh9U42uaYPWXkt09TmJi76TZ+2Nx4/XvRUjbCm7Fs1I2ekHeUbbAUS5g +++BsPjTnL+h05zLHNoDa5yT0gVGIgFsQcX/w38arZCe8Rjp9le7PXUB5IIqASsDiw ++t8zJvdyWToeXd0WswCHTQu5coHvKo5MCjIZZ1Ink1yJcCCc3rKDc+q3jB2z9T9oW ++cUsKzJ4VuleiYj1eRxFITBmXbjKrb/GPRRUkeqCQbs68Hyj2d3UtOFDPeF4vng/3 ++jGsHPq8CgYEA0AHAbwykVC6NMa37BTvEqcKoxbjTtErxR+yczlmVDfma9vkwtZvx ++FJdbS/+WGA/ucDby5x5b2T5k1J9ueMR86xukb+HnyS0WKsZ94Ie8WnJAcbp+38M6 ++7LD0u74Cgk93oagDAzUHqdLq9cXxv/ppBpxVB1Uvu8DfVMHj+wt6ie8CgYEA4C7u ++u+6b8EmbGqEdtlPpScKG0WFstJEDGXRARDCRiVP2w6wm25v8UssCPvWcwf8U1Hoq ++lhMY+H6a5dnRRiNYql1MGQAsqMi7VeJNYb0B1uxi7X8MPM+SvXoAglX7wm1z0cVy ++O4CE5sEKbBg6aQabx1x9tzdrm80SKuSsLc5HRQ8CgYEAp/mCKSuQWNru8ruJBwTp ++IB4upN1JOUN77ZVKW+lD0XFMjz1U9JPl77b65ziTQQM8jioRpkqB6cHVM088qxIh ++vssn06Iex/s893YrmPKETJYPLMhqRNEn+JQ+To53ADykY0uGg0SD18SYMbmULHBP +++CKvF6jXT0vGDnA1ZzoxzskCgYEA2nQhYrRS9EVlhP93KpJ+A8gxA5tCCHo+YPFt ++JoWFbCKLlYUNoHZR3IPCPoOsK0Zbj+kz0mXtsUf9vPkR+py669haLQqEejyQgFIz ++QYiiYEKc6/0feapzvXtDP751w7JQaBtVAzJrT0jQ1SCO2oT8C7rPLlgs3fdpOq72 ++MPSPcnUCgYBWHm6bn4HvaoUSr0v2hyD9fHZS/wDTnlXVe5c1XXgyKlJemo5dvycf ++HUCmN/xIuO6AsiMdqIzv+arNJdboz+O+bNtS43LkTJfEH3xj2/DdUogdvOgG/iPM ++u9KBT1h+euws7PqC5qt4vqLwCTTCZXmUS8Riv+62RCC3kZ5AbpT3ZA== ++-----END RSA PRIVATE KEY----- ++""" ++ ++MASTER_SIGNING_PUB = """ ++-----BEGIN PUBLIC KEY----- ++MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtieqrBMTM0MSIbhPKkDc ++ozHqyXKyL/+bXYYw+iVPsns7c7bJzBqenLQlWoRVyrVyBFrrwQSrKu/0Mqn3l639 ++iOGPlUoR3I7aZKIpyEdDkqd3xGICe+BtNNDqhUai67L63hEdG+iYAchi8UZw3LZG ++tcGpJ3FkBH4cYFX9EOam2QjbD7WYEO7m1+j6XEYIOTCmAP9dGAvBbU0Jblc+wYxG ++3qNr+2dBWsK76QXWEqib2VSOGP+zgjJa8tqY7PXXdOJpalQXNphmD/4o4pHKR4Eu ++y0yL/1oMkpacmrV61LWB8Trnx9nS9gdVrUteQF/cL1KAGwOsdVmiLpHfvqLLRqSA ++AQIDAQAB ++-----END PUBLIC KEY----- ++""" ++ ++MINION_PRIV_KEY = """ ++-----BEGIN RSA PRIVATE KEY----- ++MIIEowIBAAKCAQEAsT6TwnlI0L7urjXu6D5E11tFJ/NglQ45jW/WN9tAUNvphq6Q ++cjJCd/aWmdqlqe7ix8y9M/8rgwghRQsnPXblVBvPwFcUEXhMRnOGzqbq/0zyQX01 ++KecT0plBhlDt2lTyCLU6E4XCqyLbPfOxgXzsVqM0/TnzRtpVvGNy+5N4eFGylrjb ++cJhPxKt2G9TDOCM/hYacDs5RVIYQQmcYb8LJq7G3++FfWpYRDaxdKoHNFDspEynd ++jzr67hgThnwzc388OKNJx/7B2atwPTunPb3YBjgwDyRO/01OKK4gUHdw5KoctFgp ++kDCDjwjemlyXV+MYODRTIdtOlAP83ZkntEuLoQIDAQABAoIBAAJOKNtvFGfF2l9H ++S4CXZSUGU0a+JaCkR+wmnjsPwPn/dXDpAe8nGpidpNicPWqRm6WABjeQHaxda+fB ++lpSrRtEdo3zoi2957xQJ5wddDtI1pmXJQrdbm0H/K39oIg/Xtv/IZT769TM6OtVg ++paUxG/aftmeGXDtGfIL8w1jkuPABRBLOakWQA9uVdeG19KTU0Ag8ilpJdEX64uFJ ++W75bpVjT+KO/6aV1inuCntQSP097aYvUWajRwuiYVJOxoBZHme3IObcE6mdnYXeQ ++wblyWBpJUHrOS4MP4HCODV2pHKZ2rr7Nwhh8lMNw/eY9OP0ifz2AcAqe3sUMQOKP ++T0qRC6ECgYEAyeU5JvUPOpxXvvChYh6gJ8pYTIh1ueDP0O5e4t3vhz6lfy9DKtRN ++ROJLUorHvw/yVXMR72nT07a0z2VswcrUSw8ov3sI53F0NkLGEafQ35lVhTGs4vTl ++CFoQCuAKPsxeUl4AIbfbpkDsLGQqzW1diFArK7YeQkpGuGaGodXl480CgYEA4L40 ++x5cUXnAhTPsybo7sbcpiwFHoGblmdkvpYvHA2QxtNSi2iHHdqGo8qP1YsZjKQn58 ++371NhtqidrJ6i/8EBFP1dy+y/jr9qYlZNNGcQeBi+lshrEOIf1ct56KePG79s8lm ++DmD1OY8tO2R37+Py46Nq1n6viT/ST4NjLQI3GyUCgYEAiOswSDA3ZLs0cqRD/gPg ++/zsliLmehTFmHj4aEWcLkz+0Ar3tojUaNdX12QOPFQ7efH6uMhwl8NVeZ6xUBlTk ++hgbAzqLE1hjGBCpiowSZDZqyOcMHiV8ll/VkHcv0hsQYT2m6UyOaDXTH9g70TB6Y ++KOKddGZsvO4cad/1+/jQkB0CgYAzDEEkzLY9tS57M9uCrUgasAu6L2CO50PUvu1m ++Ig9xvZbYqkS7vVFhva/FmrYYsOHQNLbcgz0m0mZwm52mSuh4qzFoPxdjE7cmWSJA ++ExRxCiyxPR3q6PQKKJ0urgtPIs7RlX9u6KsKxfC6OtnbTWWQO0A7NE9e13ZHxUoz ++oPsvWQKBgCa0+Fb2lzUeiQz9bV1CBkWneDZUXuZHmabAZomokX+h/bq+GcJFzZjW ++3kAHwYkIy9IAy3SyO/6CP0V3vAye1p+XbotiwsQ/XZnr0pflSQL3J1l1CyN3aopg ++Niv7k/zBn15B72aK73R/CpUSk9W/eJGqk1NcNwf8hJHsboRYx6BR ++-----END RSA PRIVATE KEY----- ++""" ++ ++ ++MINION_PUB_KEY = """ ++-----BEGIN PUBLIC KEY----- ++MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsT6TwnlI0L7urjXu6D5E ++11tFJ/NglQ45jW/WN9tAUNvphq6QcjJCd/aWmdqlqe7ix8y9M/8rgwghRQsnPXbl ++VBvPwFcUEXhMRnOGzqbq/0zyQX01KecT0plBhlDt2lTyCLU6E4XCqyLbPfOxgXzs ++VqM0/TnzRtpVvGNy+5N4eFGylrjbcJhPxKt2G9TDOCM/hYacDs5RVIYQQmcYb8LJ ++q7G3++FfWpYRDaxdKoHNFDspEyndjzr67hgThnwzc388OKNJx/7B2atwPTunPb3Y ++BjgwDyRO/01OKK4gUHdw5KoctFgpkDCDjwjemlyXV+MYODRTIdtOlAP83ZkntEuL ++oQIDAQAB ++-----END PUBLIC KEY----- ++""" ++ ++AES_KEY = "8wxWlOaMMQ4d3yT74LL4+hGrGTf65w8VgrcNjLJeLRQ2Q6zMa8ItY2EQUgMKKDb7JY+RnPUxbB0=" ++ ++ ++@pytest.fixture ++def pki_dir(tmpdir): ++ madir = tmpdir.mkdir("master") ++ ++ mapriv = madir.join("master.pem") ++ mapriv.write(MASTER_PRIV_KEY.strip()) ++ mapub = madir.join("master.pub") ++ mapub.write(MASTER_PUB_KEY.strip()) ++ ++ maspriv = madir.join("master_sign.pem") ++ maspriv.write(MASTER_SIGNING_PRIV.strip()) ++ maspub = madir.join("master_sign.pub") ++ maspub.write(MASTER_SIGNING_PUB.strip()) ++ ++ mipub = madir.mkdir("minions").join("minion") ++ mipub.write(MINION_PUB_KEY.strip()) ++ for sdir in [ ++ "minions_autosign", ++ "minions_denied", ++ "minions_pre", ++ "minions_rejected", ++ ]: ++ madir.mkdir(sdir) ++ ++ midir = tmpdir.mkdir("minion") ++ mipub = midir.join("minion.pub") ++ mipub.write(MINION_PUB_KEY.strip()) ++ mipriv = midir.join("minion.pem") ++ mipriv.write(MINION_PRIV_KEY.strip()) ++ mimapriv = midir.join("minion_master.pub") ++ mimapriv.write(MASTER_PUB_KEY.strip()) ++ mimaspriv = midir.join("master_sign.pub") ++ mimaspriv.write(MASTER_SIGNING_PUB.strip()) ++ try: ++ yield tmpdir ++ finally: ++ tmpdir.remove() ++ + + def test_master_uri(): + """ +@@ -236,3 +470,806 @@ def test_zeromq_async_pub_channel_filtering_decode_message( + res = channel._decode_messages(message) + + assert res.result()["enc"] == "aes" ++ ++ ++def test_req_server_chan_encrypt_v2(pki_dir): ++ loop = salt.ext.tornado.ioloop.IOLoop.current() ++ opts = { ++ "worker_threads": 1, ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "zmq_monitor": False, ++ "mworker_queue_niceness": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("master")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ } ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(opts) ++ dictkey = "pillar" ++ nonce = "abcdefg" ++ pillar_data = {"pillar1": "meh"} ++ ret = server._encrypt_private(pillar_data, dictkey, "minion", nonce) ++ assert "key" in ret ++ assert dictkey in ret ++ ++ key = salt.crypt.get_rsa_key(str(pki_dir.join("minion", "minion.pem")), None) ++ if HAS_M2: ++ aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) ++ else: ++ cipher = PKCS1_OAEP.new(key) ++ aes = cipher.decrypt(ret["key"]) ++ pcrypt = salt.crypt.Crypticle(opts, aes) ++ signed_msg = pcrypt.loads(ret[dictkey]) ++ ++ assert "sig" in signed_msg ++ assert "data" in signed_msg ++ data = salt.payload.loads(signed_msg["data"]) ++ assert "key" in data ++ assert data["key"] == ret["key"] ++ assert "key" in data ++ assert data["nonce"] == nonce ++ assert "pillar" in data ++ assert data["pillar"] == pillar_data ++ ++ ++def test_req_server_chan_encrypt_v1(pki_dir): ++ loop = salt.ext.tornado.ioloop.IOLoop.current() ++ opts = { ++ "worker_threads": 1, ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "zmq_monitor": False, ++ "mworker_queue_niceness": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("master")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ } ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(opts) ++ dictkey = "pillar" ++ nonce = "abcdefg" ++ pillar_data = {"pillar1": "meh"} ++ ret = server._encrypt_private(pillar_data, dictkey, "minion", sign_messages=False) ++ ++ assert "key" in ret ++ assert dictkey in ret ++ ++ key = salt.crypt.get_rsa_key(str(pki_dir.join("minion", "minion.pem")), None) ++ if HAS_M2: ++ aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) ++ else: ++ cipher = PKCS1_OAEP.new(key) ++ aes = cipher.decrypt(ret["key"]) ++ pcrypt = salt.crypt.Crypticle(opts, aes) ++ data = pcrypt.loads(ret[dictkey]) ++ assert data == pillar_data ++ ++ ++def test_req_chan_decode_data_dict_entry_v1(pki_dir): ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ client = salt.transport.zeromq.AsyncZeroMQReqChannel(opts, io_loop=mockloop) ++ dictkey = "pillar" ++ target = "minion" ++ pillar_data = {"pillar1": "meh"} ++ ret = server._encrypt_private(pillar_data, dictkey, target, sign_messages=False) ++ key = client.auth.get_keys() ++ if HAS_M2: ++ aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) ++ else: ++ cipher = PKCS1_OAEP.new(key) ++ aes = cipher.decrypt(ret["key"]) ++ pcrypt = salt.crypt.Crypticle(client.opts, aes) ++ ret_pillar_data = pcrypt.loads(ret[dictkey]) ++ assert ret_pillar_data == pillar_data ++ ++ ++async def test_req_chan_decode_data_dict_entry_v2(pki_dir): ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ client = salt.transport.zeromq.AsyncZeroMQReqChannel(opts, io_loop=mockloop) ++ ++ dictkey = "pillar" ++ target = "minion" ++ pillar_data = {"pillar1": "meh"} ++ ++ # Mock auth and message client. ++ auth = client.auth ++ auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) ++ client.auth = MagicMock() ++ client.auth.authenticated = True ++ client.auth.get_keys = auth.get_keys ++ client.auth.crypticle.dumps = auth.crypticle.dumps ++ client.auth.crypticle.loads = auth.crypticle.loads ++ client.message_client = MagicMock() ++ ++ @salt.ext.tornado.gen.coroutine ++ def mocksend(msg, timeout=60, tries=3): ++ client.message_client.msg = msg ++ load = client.auth.crypticle.loads(msg["load"]) ++ ret = server._encrypt_private( ++ pillar_data, dictkey, target, nonce=load["nonce"], sign_messages=True ++ ) ++ raise salt.ext.tornado.gen.Return(ret) ++ ++ client.message_client.send = mocksend ++ ++ # Note the 'ver' value in 'load' does not represent the the 'version' sent ++ # in the top level of the transport's message. ++ load = { ++ "id": target, ++ "grains": {}, ++ "saltenv": "base", ++ "pillarenv": "base", ++ "pillar_override": True, ++ "extra_minion_data": {}, ++ "ver": "2", ++ "cmd": "_pillar", ++ } ++ ret = await client.crypted_transfer_decode_dictentry( ++ load, ++ dictkey="pillar", ++ ) ++ assert "version" in client.message_client.msg ++ assert client.message_client.msg["version"] == 2 ++ assert ret == {"pillar1": "meh"} ++ ++ ++async def test_req_chan_decode_data_dict_entry_v2_bad_nonce(pki_dir): ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ client = salt.transport.zeromq.AsyncZeroMQReqChannel(opts, io_loop=mockloop) ++ ++ dictkey = "pillar" ++ badnonce = "abcdefg" ++ target = "minion" ++ pillar_data = {"pillar1": "meh"} ++ ++ # Mock auth and message client. ++ auth = client.auth ++ auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) ++ client.auth = MagicMock() ++ client.auth.authenticated = True ++ client.auth.get_keys = auth.get_keys ++ client.auth.crypticle.dumps = auth.crypticle.dumps ++ client.auth.crypticle.loads = auth.crypticle.loads ++ client.message_client = MagicMock() ++ ret = server._encrypt_private( ++ pillar_data, dictkey, target, nonce=badnonce, sign_messages=True ++ ) ++ ++ @salt.ext.tornado.gen.coroutine ++ def mocksend(msg, timeout=60, tries=3): ++ client.message_client.msg = msg ++ raise salt.ext.tornado.gen.Return(ret) ++ ++ client.message_client.send = mocksend ++ ++ # Note the 'ver' value in 'load' does not represent the the 'version' sent ++ # in the top level of the transport's message. ++ load = { ++ "id": target, ++ "grains": {}, ++ "saltenv": "base", ++ "pillarenv": "base", ++ "pillar_override": True, ++ "extra_minion_data": {}, ++ "ver": "2", ++ "cmd": "_pillar", ++ } ++ ++ with pytest.raises(salt.crypt.AuthenticationError) as excinfo: ++ ret = await client.crypted_transfer_decode_dictentry( ++ load, ++ dictkey="pillar", ++ ) ++ assert "Pillar nonce verification failed." == excinfo.value.message ++ ++ ++async def test_req_chan_decode_data_dict_entry_v2_bad_signature(pki_dir): ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ client = salt.transport.zeromq.AsyncZeroMQReqChannel(opts, io_loop=mockloop) ++ ++ dictkey = "pillar" ++ badnonce = "abcdefg" ++ target = "minion" ++ pillar_data = {"pillar1": "meh"} ++ ++ # Mock auth and message client. ++ auth = client.auth ++ auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) ++ client.auth = MagicMock() ++ client.auth.authenticated = True ++ client.auth.get_keys = auth.get_keys ++ client.auth.crypticle.dumps = auth.crypticle.dumps ++ client.auth.crypticle.loads = auth.crypticle.loads ++ client.message_client = MagicMock() ++ ++ @salt.ext.tornado.gen.coroutine ++ def mocksend(msg, timeout=60, tries=3): ++ client.message_client.msg = msg ++ load = client.auth.crypticle.loads(msg["load"]) ++ ret = server._encrypt_private( ++ pillar_data, dictkey, target, nonce=load["nonce"], sign_messages=True ++ ) ++ ++ key = client.auth.get_keys() ++ if HAS_M2: ++ aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) ++ else: ++ cipher = PKCS1_OAEP.new(key) ++ aes = cipher.decrypt(ret["key"]) ++ pcrypt = salt.crypt.Crypticle(client.opts, aes) ++ signed_msg = pcrypt.loads(ret[dictkey]) ++ # Changing the pillar data will cause the signature verification to ++ # fail. ++ data = salt.payload.loads(signed_msg["data"]) ++ data["pillar"] = {"pillar1": "bar"} ++ signed_msg["data"] = salt.payload.dumps(data) ++ ret[dictkey] = pcrypt.dumps(signed_msg) ++ raise salt.ext.tornado.gen.Return(ret) ++ ++ client.message_client.send = mocksend ++ ++ # Note the 'ver' value in 'load' does not represent the the 'version' sent ++ # in the top level of the transport's message. ++ load = { ++ "id": target, ++ "grains": {}, ++ "saltenv": "base", ++ "pillarenv": "base", ++ "pillar_override": True, ++ "extra_minion_data": {}, ++ "ver": "2", ++ "cmd": "_pillar", ++ } ++ ++ with pytest.raises(salt.crypt.AuthenticationError) as excinfo: ++ ret = await client.crypted_transfer_decode_dictentry( ++ load, ++ dictkey="pillar", ++ ) ++ assert "Pillar payload signature failed to validate." == excinfo.value.message ++ ++ ++async def test_req_chan_decode_data_dict_entry_v2_bad_key(pki_dir): ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ client = salt.transport.zeromq.AsyncZeroMQReqChannel(opts, io_loop=mockloop) ++ ++ dictkey = "pillar" ++ badnonce = "abcdefg" ++ target = "minion" ++ pillar_data = {"pillar1": "meh"} ++ ++ # Mock auth and message client. ++ auth = client.auth ++ auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) ++ client.auth = MagicMock() ++ client.auth.authenticated = True ++ client.auth.get_keys = auth.get_keys ++ client.auth.crypticle.dumps = auth.crypticle.dumps ++ client.auth.crypticle.loads = auth.crypticle.loads ++ client.message_client = MagicMock() ++ ++ @salt.ext.tornado.gen.coroutine ++ def mocksend(msg, timeout=60, tries=3): ++ client.message_client.msg = msg ++ load = client.auth.crypticle.loads(msg["load"]) ++ ret = server._encrypt_private( ++ pillar_data, dictkey, target, nonce=load["nonce"], sign_messages=True ++ ) ++ ++ key = client.auth.get_keys() ++ if HAS_M2: ++ aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) ++ else: ++ cipher = PKCS1_OAEP.new(key) ++ aes = cipher.decrypt(ret["key"]) ++ pcrypt = salt.crypt.Crypticle(client.opts, aes) ++ signed_msg = pcrypt.loads(ret[dictkey]) ++ ++ # Now encrypt with a different key ++ key = salt.crypt.Crypticle.generate_key_string() ++ pcrypt = salt.crypt.Crypticle(opts, key) ++ pubfn = os.path.join(master_opts["pki_dir"], "minions", "minion") ++ pub = salt.crypt.get_rsa_pub_key(pubfn) ++ ret[dictkey] = pcrypt.dumps(signed_msg) ++ key = salt.utils.stringutils.to_bytes(key) ++ if HAS_M2: ++ ret["key"] = pub.public_encrypt(key, RSA.pkcs1_oaep_padding) ++ else: ++ cipher = PKCS1_OAEP.new(pub) ++ ret["key"] = cipher.encrypt(key) ++ raise salt.ext.tornado.gen.Return(ret) ++ ++ client.message_client.send = mocksend ++ ++ # Note the 'ver' value in 'load' does not represent the the 'version' sent ++ # in the top level of the transport's message. ++ load = { ++ "id": target, ++ "grains": {}, ++ "saltenv": "base", ++ "pillarenv": "base", ++ "pillar_override": True, ++ "extra_minion_data": {}, ++ "ver": "2", ++ "cmd": "_pillar", ++ } ++ ++ with pytest.raises(salt.crypt.AuthenticationError) as excinfo: ++ ret = await client.crypted_transfer_decode_dictentry( ++ load, ++ dictkey="pillar", ++ ) ++ assert "Key verification failed." == excinfo.value.message ++ ++ ++async def test_req_serv_auth_v1(pki_dir): ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ "max_minions": 0, ++ "auto_accept": False, ++ "open_mode": False, ++ "key_pass": None, ++ "master_sign_pubkey": False, ++ "publish_port": 4505, ++ "auth_mode": 1, ++ } ++ SMaster.secrets["aes"] = { ++ "secret": multiprocessing.Array( ++ ctypes.c_char, ++ salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), ++ ), ++ "reload": salt.crypt.Crypticle.generate_key_string, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) ++ server.cache_cli = False ++ server.master_key = salt.crypt.MasterKeys(server.opts) ++ ++ pub = salt.crypt.get_rsa_pub_key(str(pki_dir.join("minion", "minion.pub"))) ++ token = salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()) ++ nonce = uuid.uuid4().hex ++ ++ # We need to read the public key with fopen otherwise the newlines might ++ # not match on windows. ++ with salt.utils.files.fopen(str(pki_dir.join("minion", "minion.pub")), "r") as fp: ++ pub_key = fp.read() ++ ++ load = { ++ "cmd": "_auth", ++ "id": "minion", ++ "token": token, ++ "pub": pub_key, ++ } ++ ret = server._auth(load, sign_messages=False) ++ assert "load" not in ret ++ ++ ++async def test_req_serv_auth_v2(pki_dir): ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ "max_minions": 0, ++ "auto_accept": False, ++ "open_mode": False, ++ "key_pass": None, ++ "master_sign_pubkey": False, ++ "publish_port": 4505, ++ "auth_mode": 1, ++ } ++ SMaster.secrets["aes"] = { ++ "secret": multiprocessing.Array( ++ ctypes.c_char, ++ salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), ++ ), ++ "reload": salt.crypt.Crypticle.generate_key_string, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) ++ server.cache_cli = False ++ server.master_key = salt.crypt.MasterKeys(server.opts) ++ ++ pub = salt.crypt.get_rsa_pub_key(str(pki_dir.join("minion", "minion.pub"))) ++ token = salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()) ++ nonce = uuid.uuid4().hex ++ ++ # We need to read the public key with fopen otherwise the newlines might ++ # not match on windows. ++ with salt.utils.files.fopen(str(pki_dir.join("minion", "minion.pub")), "r") as fp: ++ pub_key = fp.read() ++ ++ load = { ++ "cmd": "_auth", ++ "id": "minion", ++ "nonce": nonce, ++ "token": token, ++ "pub": pub_key, ++ } ++ ret = server._auth(load, sign_messages=True) ++ assert "sig" in ret ++ assert "load" in ret ++ ++ ++async def test_req_chan_auth_v2(pki_dir, io_loop): ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ "max_minions": 0, ++ "auto_accept": False, ++ "open_mode": False, ++ "key_pass": None, ++ "publish_port": 4505, ++ "auth_mode": 1, ++ } ++ SMaster.secrets["aes"] = { ++ "secret": multiprocessing.Array( ++ ctypes.c_char, ++ salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), ++ ), ++ "reload": salt.crypt.Crypticle.generate_key_string, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ master_opts["master_sign_pubkey"] = False ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) ++ server.cache_cli = False ++ server.master_key = salt.crypt.MasterKeys(server.opts) ++ opts["verify_master_pubkey_sign"] = False ++ opts["always_verify_signature"] = False ++ client = salt.transport.zeromq.AsyncZeroMQReqChannel(opts, io_loop=io_loop) ++ signin_payload = client.auth.minion_sign_in_payload() ++ pload = client._package_load(signin_payload) ++ assert "version" in pload ++ assert pload["version"] == 2 ++ ++ ret = server._auth(pload["load"], sign_messages=True) ++ assert "sig" in ret ++ ret = client.auth.handle_signin_response(signin_payload, ret) ++ assert "aes" in ret ++ assert "master_uri" in ret ++ assert "publish_port" in ret ++ ++ ++async def test_req_chan_auth_v2_with_master_signing(pki_dir, io_loop): ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ "max_minions": 0, ++ "auto_accept": False, ++ "open_mode": False, ++ "key_pass": None, ++ "publish_port": 4505, ++ "auth_mode": 1, ++ } ++ SMaster.secrets["aes"] = { ++ "secret": multiprocessing.Array( ++ ctypes.c_char, ++ salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), ++ ), ++ "reload": salt.crypt.Crypticle.generate_key_string, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ master_opts["master_sign_pubkey"] = True ++ master_opts["master_use_pubkey_signature"] = False ++ master_opts["signing_key_pass"] = True ++ master_opts["master_sign_key_name"] = "master_sign" ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) ++ server.cache_cli = False ++ server.master_key = salt.crypt.MasterKeys(server.opts) ++ opts["verify_master_pubkey_sign"] = True ++ opts["always_verify_signature"] = True ++ opts["master_sign_key_name"] = "master_sign" ++ opts["master"] = "master" ++ ++ assert ( ++ pki_dir.join("minion", "minion_master.pub").read() ++ == pki_dir.join("master", "master.pub").read() ++ ) ++ ++ client = salt.transport.zeromq.AsyncZeroMQReqChannel(opts, io_loop=io_loop) ++ signin_payload = client.auth.minion_sign_in_payload() ++ pload = client._package_load(signin_payload) ++ assert "version" in pload ++ assert pload["version"] == 2 ++ ++ server_reply = server._auth(pload["load"], sign_messages=True) ++ # With version 2 we always get a clear signed response ++ assert "enc" in server_reply ++ assert server_reply["enc"] == "clear" ++ assert "sig" in server_reply ++ assert "load" in server_reply ++ ret = client.auth.handle_signin_response(signin_payload, server_reply) ++ assert "aes" in ret ++ assert "master_uri" in ret ++ assert "publish_port" in ret ++ ++ # Now create a new master key pair and try auth with it. ++ mapriv = pki_dir.join("master", "master.pem") ++ mapriv.remove() ++ mapriv.write(MASTER2_PRIV_KEY.strip()) ++ mapub = pki_dir.join("master", "master.pub") ++ mapub.remove() ++ mapub.write(MASTER2_PUB_KEY.strip()) ++ ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) ++ server.cache_cli = False ++ server.master_key = salt.crypt.MasterKeys(server.opts) ++ ++ signin_payload = client.auth.minion_sign_in_payload() ++ pload = client._package_load(signin_payload) ++ server_reply = server._auth(pload["load"], sign_messages=True) ++ ret = client.auth.handle_signin_response(signin_payload, server_reply) ++ ++ assert "aes" in ret ++ assert "master_uri" in ret ++ assert "publish_port" in ret ++ ++ assert ( ++ pki_dir.join("minion", "minion_master.pub").read() ++ == pki_dir.join("master", "master.pub").read() ++ ) ++ ++ ++async def test_req_chan_auth_v2_new_minion_with_master_pub(pki_dir, io_loop): ++ ++ pki_dir.join("master", "minions", "minion").remove() ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ "max_minions": 0, ++ "auto_accept": False, ++ "open_mode": False, ++ "key_pass": None, ++ "publish_port": 4505, ++ "auth_mode": 1, ++ "acceptance_wait_time": 3, ++ } ++ SMaster.secrets["aes"] = { ++ "secret": multiprocessing.Array( ++ ctypes.c_char, ++ salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), ++ ), ++ "reload": salt.crypt.Crypticle.generate_key_string, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ master_opts["master_sign_pubkey"] = False ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) ++ server.cache_cli = False ++ server.master_key = salt.crypt.MasterKeys(server.opts) ++ opts["verify_master_pubkey_sign"] = False ++ opts["always_verify_signature"] = False ++ client = salt.transport.zeromq.AsyncZeroMQReqChannel(opts, io_loop=io_loop) ++ signin_payload = client.auth.minion_sign_in_payload() ++ pload = client._package_load(signin_payload) ++ assert "version" in pload ++ assert pload["version"] == 2 ++ ++ ret = server._auth(pload["load"], sign_messages=True) ++ assert "sig" in ret ++ ret = client.auth.handle_signin_response(signin_payload, ret) ++ assert ret == "retry" ++ ++ ++async def test_req_chan_auth_v2_new_minion_with_master_pub_bad_sig(pki_dir, io_loop): ++ ++ pki_dir.join("master", "minions", "minion").remove() ++ ++ # Give the master a different key than the minion has. ++ mapriv = pki_dir.join("master", "master.pem") ++ mapriv.remove() ++ mapriv.write(MASTER2_PRIV_KEY.strip()) ++ mapub = pki_dir.join("master", "master.pub") ++ mapub.remove() ++ mapub.write(MASTER2_PUB_KEY.strip()) ++ ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ "max_minions": 0, ++ "auto_accept": False, ++ "open_mode": False, ++ "key_pass": None, ++ "publish_port": 4505, ++ "auth_mode": 1, ++ "acceptance_wait_time": 3, ++ } ++ SMaster.secrets["aes"] = { ++ "secret": multiprocessing.Array( ++ ctypes.c_char, ++ salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), ++ ), ++ "reload": salt.crypt.Crypticle.generate_key_string, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ master_opts["master_sign_pubkey"] = False ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) ++ server.cache_cli = False ++ server.master_key = salt.crypt.MasterKeys(server.opts) ++ opts["verify_master_pubkey_sign"] = False ++ opts["always_verify_signature"] = False ++ client = salt.transport.zeromq.AsyncZeroMQReqChannel(opts, io_loop=io_loop) ++ signin_payload = client.auth.minion_sign_in_payload() ++ pload = client._package_load(signin_payload) ++ assert "version" in pload ++ assert pload["version"] == 2 ++ ++ ret = server._auth(pload["load"], sign_messages=True) ++ assert "sig" in ret ++ with pytest.raises(salt.crypt.SaltClientError, match="Invalid signature"): ++ ret = client.auth.handle_signin_response(signin_payload, ret) ++ ++ ++async def test_req_chan_auth_v2_new_minion_without_master_pub(pki_dir, io_loop): ++ ++ pki_dir.join("master", "minions", "minion").remove() ++ pki_dir.join("minion", "minion_master.pub").remove() ++ mockloop = MagicMock() ++ opts = { ++ "master_uri": "tcp://127.0.0.1:4506", ++ "interface": "127.0.0.1", ++ "ret_port": 4506, ++ "ipv6": False, ++ "sock_dir": ".", ++ "pki_dir": str(pki_dir.join("minion")), ++ "id": "minion", ++ "__role": "minion", ++ "keysize": 4096, ++ "max_minions": 0, ++ "auto_accept": False, ++ "open_mode": False, ++ "key_pass": None, ++ "publish_port": 4505, ++ "auth_mode": 1, ++ "acceptance_wait_time": 3, ++ } ++ SMaster.secrets["aes"] = { ++ "secret": multiprocessing.Array( ++ ctypes.c_char, ++ salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), ++ ), ++ "reload": salt.crypt.Crypticle.generate_key_string, ++ } ++ master_opts = dict(opts, pki_dir=str(pki_dir.join("master"))) ++ master_opts["master_sign_pubkey"] = False ++ server = salt.transport.zeromq.ZeroMQReqServerChannel(master_opts) ++ server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) ++ server.cache_cli = False ++ server.master_key = salt.crypt.MasterKeys(server.opts) ++ opts["verify_master_pubkey_sign"] = False ++ opts["always_verify_signature"] = False ++ client = salt.transport.zeromq.AsyncZeroMQReqChannel(opts, io_loop=io_loop) ++ signin_payload = client.auth.minion_sign_in_payload() ++ pload = client._package_load(signin_payload) ++ assert "version" in pload ++ assert pload["version"] == 2 ++ ++ ret = server._auth(pload["load"], sign_messages=True) ++ assert "sig" in ret ++ ret = client.auth.handle_signin_response(signin_payload, ret) ++ assert ret == "retry" +diff --git a/tests/pytests/unit/utils/test_minions.py b/tests/pytests/unit/utils/test_minions.py +index 6bc6c80bbd..2e0fa5a653 100644 +--- a/tests/pytests/unit/utils/test_minions.py ++++ b/tests/pytests/unit/utils/test_minions.py +@@ -1,3 +1,4 @@ ++import pytest + import salt.utils.minions + import salt.utils.network + from tests.support.mock import patch +@@ -53,3 +54,61 @@ def test_connected_ids_remote_minions(): + with patch_net, patch_list, patch_fetch, patch_remote_net: + ret = ckminions.connected_ids() + assert ret == {minion2, minion} ++ ++ ++# These validate_tgt tests make the assumption that CkMinions.check_minions is ++# correct. In other words, these tests are only worthwhile if check_minions is ++# also correct. ++def test_validate_tgt_should_return_false_when_no_valid_minions_have_been_found(): ++ ckminions = salt.utils.minions.CkMinions(opts={}) ++ with patch( ++ "salt.utils.minions.CkMinions.check_minions", autospec=True, return_value={} ++ ): ++ result = ckminions.validate_tgt("fnord", "fnord", "fnord", minions=[]) ++ assert result is False ++ ++ ++@pytest.mark.parametrize( ++ "valid_minions, target_minions", ++ [ ++ (["one", "two", "three"], ["one", "two", "five"]), ++ (["one"], ["one", "two"]), ++ (["one", "two", "three", "four"], ["five"]), ++ ], ++) ++def test_validate_tgt_should_return_false_when_minions_have_minions_not_in_valid_minions( ++ valid_minions, target_minions ++): ++ ckminions = salt.utils.minions.CkMinions(opts={}) ++ with patch( ++ "salt.utils.minions.CkMinions.check_minions", ++ autospec=True, ++ return_value={"minions": valid_minions}, ++ ): ++ result = ckminions.validate_tgt( ++ "fnord", "fnord", "fnord", minions=target_minions ++ ) ++ assert result is False ++ ++ ++@pytest.mark.parametrize( ++ "valid_minions, target_minions", ++ [ ++ (["one", "two", "three", "five"], ["one", "two", "five"]), ++ (["one"], ["one"]), ++ (["one", "two", "three", "four", "five"], ["five"]), ++ ], ++) ++def test_validate_tgt_should_return_true_when_all_minions_are_found_in_valid_minions( ++ valid_minions, target_minions ++): ++ ckminions = salt.utils.minions.CkMinions(opts={}) ++ with patch( ++ "salt.utils.minions.CkMinions.check_minions", ++ autospec=True, ++ return_value={"minions": valid_minions}, ++ ): ++ result = ckminions.validate_tgt( ++ "fnord", "fnord", "fnord", minions=target_minions ++ ) ++ assert result is True +diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py +new file mode 100644 +index 0000000000..c5f976f674 +--- /dev/null ++++ b/tests/pytests/unit/utils/test_network.py +@@ -0,0 +1,8 @@ ++import salt.utils.network ++ ++ ++def test_junos_ifconfig_output_parsing(): ++ ret = salt.utils.network._junos_interfaces_ifconfig( ++ "inet mtu 0 local=" + " " * 3456 ++ ) ++ assert ret == {"inet": {"up": False}} +diff --git a/tests/unit/transport/test_ipc.py b/tests/unit/transport/test_ipc.py +index 9d84f59320..7177b7f6c4 100644 +--- a/tests/unit/transport/test_ipc.py ++++ b/tests/unit/transport/test_ipc.py +@@ -40,6 +40,8 @@ class IPCMessagePubSubCase(salt.ext.tornado.testing.AsyncTestCase): + def setUp(self): + super().setUp() + self.opts = {"ipc_write_buffer": 0} ++ if not os.path.exists(RUNTIME_VARS.TMP): ++ os.mkdir(RUNTIME_VARS.TMP) + self.socket_path = os.path.join(RUNTIME_VARS.TMP, "ipc_test.ipc") + self.pub_channel = self._get_pub_channel() + self.sub_channel = self._get_sub_channel() +-- +2.35.1 + + diff --git a/fix-ownership-of-salt-thin-directory-when-using-the-.patch b/fix-ownership-of-salt-thin-directory-when-using-the-.patch index ec3293c..ab9a8bb 100644 --- a/fix-ownership-of-salt-thin-directory-when-using-the-.patch +++ b/fix-ownership-of-salt-thin-directory-when-using-the-.patch @@ -1,4 +1,4 @@ -From 7ac8c79f38960c787f6b5324e347707325c65e79 Mon Sep 17 00:00:00 2001 +From 34a81d88db3862bcc03cdda4974e576723af7643 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Mon, 27 Jun 2022 18:03:49 +0300 Subject: [PATCH] Fix ownership of salt thin directory when using the @@ -45,6 +45,6 @@ index 293ea1b7fa..95171f7aea 100644 if venv_salt_call is None: # Use Salt thin only if Salt Bundle (venv-salt-minion) is not available -- -2.37.3 +2.36.1 diff --git a/fix-regression-with-depending-client.ssh-on-psutil-b.patch b/fix-regression-with-depending-client.ssh-on-psutil-b.patch index 8909b36..58a1878 100644 --- a/fix-regression-with-depending-client.ssh-on-psutil-b.patch +++ b/fix-regression-with-depending-client.ssh-on-psutil-b.patch @@ -1,4 +1,4 @@ -From 6e0e6c26f4ec0955b55937ae3b58ca485b99facd Mon Sep 17 00:00:00 2001 +From 0c4a71224d49e778b4a2c683c63de52a0876de69 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Tue, 12 Apr 2022 10:08:17 +0300 Subject: [PATCH] Fix regression with depending client.ssh on psutil @@ -9,7 +9,7 @@ Subject: [PATCH] Fix regression with depending client.ssh on psutil 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py -index fe1213b723..1b76a38e0b 100644 +index 6d24d8d716..396f9457f2 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py @@ -12,7 +12,6 @@ import hashlib @@ -20,7 +20,7 @@ index fe1213b723..1b76a38e0b 100644 import queue import re import shlex -@@ -420,6 +419,16 @@ class SSH(MultiprocessingStateMixin): +@@ -407,6 +406,16 @@ class SSH: self.__parsed_rosters[self.ROSTER_UPDATE_FLAG] = False return @@ -37,7 +37,7 @@ index fe1213b723..1b76a38e0b 100644 def _update_roster(self, hostname=None, user=None): """ Update default flat roster with the passed in information. -@@ -639,7 +648,8 @@ class SSH(MultiprocessingStateMixin): +@@ -626,7 +635,8 @@ class SSH: pid_running = ( False if cached_session["pid"] == 0 @@ -48,6 +48,6 @@ index fe1213b723..1b76a38e0b 100644 if ( pid_running and prev_session_running < self.max_pid_wait -- -2.37.3 +2.35.1 diff --git a/fix-salt-call-event.send-call-with-grains-and-pillar.patch b/fix-salt-call-event.send-call-with-grains-and-pillar.patch new file mode 100644 index 0000000..3dcec46 --- /dev/null +++ b/fix-salt-call-event.send-call-with-grains-and-pillar.patch @@ -0,0 +1,70 @@ +From 245bd5f2aab798f7f647ad2d2307c0dd1381c1c8 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?C=C3=A9dric=20Bosdonnat?= +Date: Thu, 18 Nov 2021 14:46:25 +0100 +Subject: [PATCH] Fix salt-call event.send call with grains and pillar + +--- + changelog/61252.fixed | 1 + + salt/modules/event.py | 4 ++-- + tests/pytests/integration/modules/test_event.py | 12 +++++++++++- + 3 files changed, 14 insertions(+), 3 deletions(-) + create mode 100644 changelog/61252.fixed + +diff --git a/changelog/61252.fixed b/changelog/61252.fixed +new file mode 100644 +index 0000000000..2692f9b7b7 +--- /dev/null ++++ b/changelog/61252.fixed +@@ -0,0 +1 @@ ++Fix salt-call event.event with pillar or grains +diff --git a/salt/modules/event.py b/salt/modules/event.py +index 03dad5e614..7fe701708b 100644 +--- a/salt/modules/event.py ++++ b/salt/modules/event.py +@@ -216,13 +216,13 @@ def send( + if isinstance(with_grains, list): + data_dict["grains"] = _dict_subset(with_grains, __grains__) + else: +- data_dict["grains"] = __grains__ ++ data_dict["grains"] = __grains__.value() + + if with_pillar: + if isinstance(with_pillar, list): + data_dict["pillar"] = _dict_subset(with_pillar, __pillar__) + else: +- data_dict["pillar"] = __pillar__ ++ data_dict["pillar"] = __pillar__.value() + + if with_env_opts: + data_dict["saltenv"] = __opts__.get("saltenv", "base") +diff --git a/tests/pytests/integration/modules/test_event.py b/tests/pytests/integration/modules/test_event.py +index 54087b1b65..8912c1e807 100644 +--- a/tests/pytests/integration/modules/test_event.py ++++ b/tests/pytests/integration/modules/test_event.py +@@ -68,7 +68,14 @@ def test_send(event_listener, salt_master, salt_minion, salt_call_cli): + event_tag = random_string("salt/test/event/") + data = {"event.fire": "just test it!!!!"} + start_time = time.time() +- ret = salt_call_cli.run("event.send", event_tag, data=data) ++ ret = salt_call_cli.run( ++ "event.send", ++ event_tag, ++ data=data, ++ with_grains=True, ++ with_pillar=True, ++ preload={"foo": "bar"}, ++ ) + assert ret.exitcode == 0 + assert ret.json + assert ret.json is True +@@ -82,3 +89,6 @@ def test_send(event_listener, salt_master, salt_minion, salt_call_cli): + assert event.data["id"] == salt_minion.id + assert event.data["cmd"] == "_minion_event" + assert "event.fire" in event.data["data"] ++ assert event.data["foo"] == "bar" ++ assert event.data["data"]["grains"]["test_grain"] == "cheese" ++ assert event.data["data"]["pillar"]["ext_spam"] == "eggs" +-- +2.34.1 + + diff --git a/fix-salt-ssh-opts-poisoning-bsc-1197637-3004-501.patch b/fix-salt-ssh-opts-poisoning-bsc-1197637-3004-501.patch index 9403f28..2990779 100644 --- a/fix-salt-ssh-opts-poisoning-bsc-1197637-3004-501.patch +++ b/fix-salt-ssh-opts-poisoning-bsc-1197637-3004-501.patch @@ -1,4 +1,4 @@ -From 59200b4a4578c96dcffc0584725d8cba40c20ff7 Mon Sep 17 00:00:00 2001 +From 7096332546a65c0c507fbd4bccbf7062e7c3c9c7 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Thu, 31 Mar 2022 13:39:57 +0300 Subject: [PATCH] Fix salt-ssh opts poisoning (bsc#1197637) - 3004 (#501) @@ -14,19 +14,19 @@ Subject: [PATCH] Fix salt-ssh opts poisoning (bsc#1197637) - 3004 (#501) 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py -index 6db2dfcbb0..8ae417f575 100644 +index 3e032c7197..bc77eb700e 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py -@@ -338,7 +338,7 @@ class SSH(MultiprocessingStateMixin): +@@ -340,7 +340,7 @@ class SSH: self.session_flock_file = os.path.join( self.opts["cachedir"], "salt-ssh.session.lock" ) - self.ssh_session_grace_time = int(self.opts.get("ssh_session_grace_time", 3)) + self.ssh_session_grace_time = int(self.opts.get("ssh_session_grace_time", 1)) - # __setstate__ and __getstate__ are only used on spawning platforms. - def __setstate__(self, state): -@@ -571,7 +571,6 @@ class SSH(MultiprocessingStateMixin): + @property + def parse_tgt(self): +@@ -558,7 +558,6 @@ class SSH: """ LOG_LOCK.release() salt.loader.LOAD_LOCK.release() @@ -34,7 +34,7 @@ index 6db2dfcbb0..8ae417f575 100644 single = Single( opts, opts["argv"], -@@ -608,6 +607,7 @@ class SSH(MultiprocessingStateMixin): +@@ -595,6 +594,7 @@ class SSH: Spin up the needed threads or processes and execute the subsequent routines """ @@ -42,7 +42,7 @@ index 6db2dfcbb0..8ae417f575 100644 que = multiprocessing.Queue() running = {} targets_queue = deque(self.targets.keys()) -@@ -618,7 +618,7 @@ class SSH(MultiprocessingStateMixin): +@@ -605,7 +605,7 @@ class SSH: if not self.targets: log.error("No matching targets found in roster.") break @@ -51,7 +51,7 @@ index 6db2dfcbb0..8ae417f575 100644 if targets_queue: host = targets_queue.popleft() else: -@@ -636,7 +636,7 @@ class SSH(MultiprocessingStateMixin): +@@ -623,7 +623,7 @@ class SSH: pid_running = ( False if cached_session["pid"] == 0 @@ -60,7 +60,7 @@ index 6db2dfcbb0..8ae417f575 100644 ) if ( pid_running and prev_session_running < self.max_pid_wait -@@ -651,9 +651,10 @@ class SSH(MultiprocessingStateMixin): +@@ -638,9 +638,10 @@ class SSH: "salt-ssh/session", host, { @@ -72,7 +72,7 @@ index 6db2dfcbb0..8ae417f575 100644 }, ) for default in self.defaults: -@@ -681,7 +682,7 @@ class SSH(MultiprocessingStateMixin): +@@ -668,7 +669,7 @@ class SSH: continue args = ( que, @@ -81,7 +81,7 @@ index 6db2dfcbb0..8ae417f575 100644 host, self.targets[host], mine, -@@ -717,6 +718,7 @@ class SSH(MultiprocessingStateMixin): +@@ -704,6 +705,7 @@ class SSH: "pid": routine.pid, "master_id": self.master_id, "ts": time.time(), @@ -89,7 +89,7 @@ index 6db2dfcbb0..8ae417f575 100644 }, ) continue -@@ -768,12 +770,13 @@ class SSH(MultiprocessingStateMixin): +@@ -755,12 +757,13 @@ class SSH: "pid": 0, "master_id": self.master_id, "ts": time.time(), @@ -105,10 +105,10 @@ index 6db2dfcbb0..8ae417f575 100644 ) >= len(running): time.sleep(0.1) diff --git a/salt/loader/__init__.py b/salt/loader/__init__.py -index 32f8a7702c..bbe4269839 100644 +index a0f2220476..bc3634bb7f 100644 --- a/salt/loader/__init__.py +++ b/salt/loader/__init__.py -@@ -757,7 +757,12 @@ def roster(opts, runner=None, utils=None, whitelist=None, loaded_base_name=None, +@@ -622,7 +622,12 @@ def roster(opts, runner=None, utils=None, whitelist=None, context=None): opts, tag="roster", whitelist=whitelist, @@ -120,9 +120,9 @@ index 32f8a7702c..bbe4269839 100644 + "__opts__": opts, + }, extra_module_dirs=utils.module_dirs if utils else None, - loaded_base_name=loaded_base_name, ) + -- -2.37.3 +2.35.1 diff --git a/fix-salt.states.file.managed-for-follow_symlinks-tru.patch b/fix-salt.states.file.managed-for-follow_symlinks-tru.patch index 661c70c..843e075 100644 --- a/fix-salt.states.file.managed-for-follow_symlinks-tru.patch +++ b/fix-salt.states.file.managed-for-follow_symlinks-tru.patch @@ -1,8 +1,8 @@ -From e328d2029c93153c519e10e9596c635f6f3febcf Mon Sep 17 00:00:00 2001 +From 10705d922a11e5f2654d26e83e9f302862fafb18 Mon Sep 17 00:00:00 2001 From: Petr Pavlu <31453820+petrpavlu@users.noreply.github.com> Date: Fri, 8 Jul 2022 10:11:52 +0200 -Subject: [PATCH] Fix salt.states.file.managed() for follow_symlinks=True - and test=True (bsc#1199372) (#535) +Subject: [PATCH] Fix salt.states.file.managed() for + follow_symlinks=True and test=True (bsc#1199372) (#535) When managing file /etc/test as follows: > file /etc/test: @@ -33,9 +33,13 @@ Fixes #62066. [Cherry-picked from upstream commit 95bfbe31a2dc54723af3f1783d40de152760fe1a.] --- - changelog/62066.fixed | 1 + - 1 file changed, 1 insertion(+) + changelog/62066.fixed | 1 + + salt/modules/file.py | 27 +++- + salt/states/file.py | 1 + + .../unit/modules/file/test_file_check.py | 144 ++++++++++++++++++ + 4 files changed, 172 insertions(+), 1 deletion(-) create mode 100644 changelog/62066.fixed + create mode 100644 tests/pytests/unit/modules/file/test_file_check.py diff --git a/changelog/62066.fixed b/changelog/62066.fixed new file mode 100644 @@ -44,7 +48,261 @@ index 0000000000..68216a03c1 +++ b/changelog/62066.fixed @@ -0,0 +1 @@ +Fixed salt.states.file.managed() for follow_symlinks=True and test=True +diff --git a/salt/modules/file.py b/salt/modules/file.py +index 73619064ef..40c07455e3 100644 +--- a/salt/modules/file.py ++++ b/salt/modules/file.py +@@ -5281,11 +5281,18 @@ def check_managed( + serole=None, + setype=None, + serange=None, ++ follow_symlinks=False, + **kwargs + ): + """ + Check to see what changes need to be made for a file + ++ follow_symlinks ++ If the desired path is a symlink, follow it and check the permissions ++ of the file to which the symlink points. ++ ++ .. versionadded:: 3005 ++ + CLI Example: + + .. code-block:: bash +@@ -5336,6 +5343,7 @@ def check_managed( + serole=serole, + setype=setype, + serange=serange, ++ follow_symlinks=follow_symlinks, + ) + # Ignore permission for files written temporary directories + # Files in any path will still be set correctly using get_managed() +@@ -5372,6 +5380,7 @@ def check_managed_changes( + setype=None, + serange=None, + verify_ssl=True, ++ follow_symlinks=False, + **kwargs + ): + """ +@@ -5387,6 +5396,12 @@ def check_managed_changes( + + .. versionadded:: 3002 + ++ follow_symlinks ++ If the desired path is a symlink, follow it and check the permissions ++ of the file to which the symlink points. ++ ++ .. versionadded:: 3005 ++ + CLI Example: + + .. code-block:: bash +@@ -5456,6 +5471,7 @@ def check_managed_changes( + serole=serole, + setype=setype, + serange=serange, ++ follow_symlinks=follow_symlinks, + ) + __clean_tmp(sfn) + return changes +@@ -5477,6 +5493,7 @@ def check_file_meta( + setype=None, + serange=None, + verify_ssl=True, ++ follow_symlinks=False, + ): + """ + Check for the changes in the file metadata. +@@ -5553,6 +5570,12 @@ def check_file_meta( + will not attempt to validate the servers certificate. Default is True. + + .. versionadded:: 3002 ++ ++ follow_symlinks ++ If the desired path is a symlink, follow it and check the permissions ++ of the file to which the symlink points. ++ ++ .. versionadded:: 3005 + """ + changes = {} + if not source_sum: +@@ -5560,7 +5583,9 @@ def check_file_meta( + + try: + lstats = stats( +- name, hash_type=source_sum.get("hash_type", None), follow_symlinks=False ++ name, ++ hash_type=source_sum.get("hash_type", None), ++ follow_symlinks=follow_symlinks, + ) + except CommandExecutionError: + lstats = {} +diff --git a/salt/states/file.py b/salt/states/file.py +index 54e7decf86..a6288025e5 100644 +--- a/salt/states/file.py ++++ b/salt/states/file.py +@@ -3038,6 +3038,7 @@ def managed( + setype=setype, + serange=serange, + verify_ssl=verify_ssl, ++ follow_symlinks=follow_symlinks, + **kwargs + ) + +diff --git a/tests/pytests/unit/modules/file/test_file_check.py b/tests/pytests/unit/modules/file/test_file_check.py +new file mode 100644 +index 0000000000..bd0379ddae +--- /dev/null ++++ b/tests/pytests/unit/modules/file/test_file_check.py +@@ -0,0 +1,144 @@ ++import getpass ++import logging ++import os ++ ++import pytest ++import salt.modules.file as filemod ++import salt.utils.files ++import salt.utils.platform ++ ++log = logging.getLogger(__name__) ++ ++ ++@pytest.fixture ++def configure_loader_modules(): ++ return {filemod: {"__context__": {}}} ++ ++ ++@pytest.fixture ++def tfile(tmp_path): ++ filename = str(tmp_path / "file-check-test-file") ++ ++ with salt.utils.files.fopen(filename, "w") as fp: ++ fp.write("Hi hello! I am a file.") ++ os.chmod(filename, 0o644) ++ ++ yield filename ++ ++ os.remove(filename) ++ ++ ++@pytest.fixture ++def a_link(tmp_path, tfile): ++ linkname = str(tmp_path / "a_link") ++ os.symlink(tfile, linkname) ++ ++ yield linkname ++ ++ os.remove(linkname) ++ ++ ++def get_link_perms(): ++ if salt.utils.platform.is_linux(): ++ return "0777" ++ return "0755" ++ ++ ++@pytest.mark.skip_on_windows(reason="os.symlink is not available on Windows") ++def test_check_file_meta_follow_symlinks(a_link, tfile): ++ user = getpass.getuser() ++ lperms = get_link_perms() ++ ++ # follow_symlinks=False (default) ++ ret = filemod.check_file_meta( ++ a_link, tfile, None, None, user, None, lperms, None, None ++ ) ++ assert ret == {} ++ ++ ret = filemod.check_file_meta( ++ a_link, tfile, None, None, user, None, "0644", None, None ++ ) ++ assert ret == {"mode": "0644"} ++ ++ # follow_symlinks=True ++ ret = filemod.check_file_meta( ++ a_link, tfile, None, None, user, None, "0644", None, None, follow_symlinks=True ++ ) ++ assert ret == {} ++ ++ ++@pytest.mark.skip_on_windows(reason="os.symlink is not available on Windows") ++def test_check_managed_follow_symlinks(a_link, tfile): ++ user = getpass.getuser() ++ lperms = get_link_perms() ++ ++ # Function check_managed() ignores mode changes for files in the temp directory. ++ # Trick it to not recognize a_link as such. ++ a_link = "/" + a_link ++ ++ # follow_symlinks=False (default) ++ ret, comments = filemod.check_managed( ++ a_link, tfile, None, None, user, None, lperms, None, None, None, None, None ++ ) ++ assert ret is True ++ assert comments == "The file {} is in the correct state".format(a_link) ++ ++ ret, comments = filemod.check_managed( ++ a_link, tfile, None, None, user, None, "0644", None, None, None, None, None ++ ) ++ assert ret is None ++ assert comments == "The following values are set to be changed:\nmode: 0644\n" ++ ++ # follow_symlinks=True ++ ret, comments = filemod.check_managed( ++ a_link, ++ tfile, ++ None, ++ None, ++ user, ++ None, ++ "0644", ++ None, ++ None, ++ None, ++ None, ++ None, ++ follow_symlinks=True, ++ ) ++ assert ret is True ++ assert comments == "The file {} is in the correct state".format(a_link) ++ ++ ++@pytest.mark.skip_on_windows(reason="os.symlink is not available on Windows") ++def test_check_managed_changes_follow_symlinks(a_link, tfile): ++ user = getpass.getuser() ++ lperms = get_link_perms() ++ ++ # follow_symlinks=False (default) ++ ret = filemod.check_managed_changes( ++ a_link, tfile, None, None, user, None, lperms, None, None, None, None, None ++ ) ++ assert ret == {} ++ ++ ret = filemod.check_managed_changes( ++ a_link, tfile, None, None, user, None, "0644", None, None, None, None, None ++ ) ++ assert ret == {"mode": "0644"} ++ ++ # follow_symlinks=True ++ ret = filemod.check_managed_changes( ++ a_link, ++ tfile, ++ None, ++ None, ++ user, ++ None, ++ "0644", ++ None, ++ None, ++ None, ++ None, ++ None, ++ follow_symlinks=True, ++ ) ++ assert ret == {} -- -2.37.3 +2.36.1 diff --git a/fix-salt.utils.stringutils.to_str-calls-to-make-it-w.patch b/fix-salt.utils.stringutils.to_str-calls-to-make-it-w.patch index b40e9bb..8bed8a5 100644 --- a/fix-salt.utils.stringutils.to_str-calls-to-make-it-w.patch +++ b/fix-salt.utils.stringutils.to_str-calls-to-make-it-w.patch @@ -1,4 +1,4 @@ -From f348f291093ea3f7c841b03a975ae81b40963842 Mon Sep 17 00:00:00 2001 +From 435d9fbee299b06e1c58cdc0574b6a1975841879 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Wed, 25 Nov 2020 15:09:41 +0300 Subject: [PATCH] Fix salt.utils.stringutils.to_str calls to make it @@ -10,10 +10,10 @@ Subject: [PATCH] Fix salt.utils.stringutils.to_str calls to make it 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/salt/modules/file.py b/salt/modules/file.py -index d475e3c2e3..d3de4da467 100644 +index b830b390d3..b9744393d7 100644 --- a/salt/modules/file.py +++ b/salt/modules/file.py -@@ -5036,6 +5036,12 @@ def check_perms( +@@ -4970,6 +4970,12 @@ def check_perms( is_dir = os.path.isdir(name) is_link = os.path.islink(name) @@ -26,7 +26,7 @@ index d475e3c2e3..d3de4da467 100644 # user/group changes if needed, then check if it worked if user: if isinstance(user, int): -@@ -5045,7 +5051,7 @@ def check_perms( +@@ -4979,7 +4985,7 @@ def check_perms( and user_to_uid(user) != user_to_uid(perms["luser"]) ) or ( not salt.utils.platform.is_windows() @@ -35,7 +35,7 @@ index d475e3c2e3..d3de4da467 100644 ): perms["cuser"] = user -@@ -5057,7 +5063,7 @@ def check_perms( +@@ -4991,7 +4997,7 @@ def check_perms( and group_to_gid(group) != group_to_gid(perms["lgroup"]) ) or ( not salt.utils.platform.is_windows() @@ -44,7 +44,7 @@ index d475e3c2e3..d3de4da467 100644 ): perms["cgroup"] = group -@@ -5089,8 +5095,7 @@ def check_perms( +@@ -5023,8 +5029,7 @@ def check_perms( and user != "" ) or ( not salt.utils.platform.is_windows() @@ -54,7 +54,7 @@ index d475e3c2e3..d3de4da467 100644 and user != "" ): if __opts__["test"] is True: -@@ -5111,8 +5116,7 @@ def check_perms( +@@ -5045,8 +5050,7 @@ def check_perms( and group != "" ) or ( not salt.utils.platform.is_windows() @@ -65,10 +65,10 @@ index d475e3c2e3..d3de4da467 100644 ): if __opts__["test"] is True: diff --git a/salt/states/file.py b/salt/states/file.py -index 50ceef1158..1083bb46d6 100644 +index 89c70eb454..fd8ffde757 100644 --- a/salt/states/file.py +++ b/salt/states/file.py -@@ -863,15 +863,22 @@ def _check_dir_meta(name, user, group, mode, follow_symlinks=False): +@@ -989,15 +989,22 @@ def _check_dir_meta(name, user, group, mode, follow_symlinks=False): if not stats: changes["directory"] = "new" return changes @@ -94,6 +94,6 @@ index 50ceef1158..1083bb46d6 100644 ): changes["group"] = group -- -2.37.3 +2.29.2 diff --git a/fix-state.apply-in-test-mode-with-file-state-module-.patch b/fix-state.apply-in-test-mode-with-file-state-module-.patch index 85a7e84..a8f3318 100644 --- a/fix-state.apply-in-test-mode-with-file-state-module-.patch +++ b/fix-state.apply-in-test-mode-with-file-state-module-.patch @@ -1,8 +1,8 @@ -From 58317cda7a347581b495ab7fd71ce75f0740d8d6 Mon Sep 17 00:00:00 2001 +From ed567e5f339f7bf95d4361ac47e67427db71714c Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Thu, 1 Sep 2022 14:44:26 +0300 -Subject: [PATCH] Fix state.apply in test mode with file state module on - user/group checking (bsc#1202167) +Subject: [PATCH] Fix state.apply in test mode with file state module + on user/group checking (bsc#1202167) * Do not fail on checking user/group in test mode @@ -15,11 +15,12 @@ Co-authored-by: nicholasmhughes Co-authored-by: nicholasmhughes --- changelog/61846.fixed | 1 + - salt/states/file.py | 5 +++ - tests/pytests/unit/states/file/test_copy.py | 35 ++++++++++++++++ - .../unit/states/file/test_filestate.py | 42 +++++++++++++++++++ - .../pytests/unit/states/file/test_managed.py | 31 ++++++++++++++ - 5 files changed, 114 insertions(+) + salt/states/file.py | 5 ++ + tests/pytests/unit/states/file/test_copy.py | 35 ++++++++++++ + .../unit/states/file/test_directory.py | 55 +++++++++++++++++++ + .../unit/states/file/test_filestate.py | 42 ++++++++++++++ + .../pytests/unit/states/file/test_managed.py | 31 +++++++++++ + 6 files changed, 169 insertions(+) create mode 100644 changelog/61846.fixed diff --git a/changelog/61846.fixed b/changelog/61846.fixed @@ -30,7 +31,7 @@ index 0000000000..c4024efe9f @@ -0,0 +1 @@ +Fix the reporting of errors for file.directory in test mode diff --git a/salt/states/file.py b/salt/states/file.py -index 1083bb46d6..5cb58f5454 100644 +index a6288025e5..39cf83b78e 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -379,6 +379,11 @@ def _check_user(user, group): @@ -88,6 +89,69 @@ index ce7161f02d..a11adf5ae0 100644 + ) + assert ret["result"] is not False + assert "is not available" not in ret["comment"] +diff --git a/tests/pytests/unit/states/file/test_directory.py b/tests/pytests/unit/states/file/test_directory.py +index 0e15e1d3ca..1287609c6a 100644 +--- a/tests/pytests/unit/states/file/test_directory.py ++++ b/tests/pytests/unit/states/file/test_directory.py +@@ -291,3 +291,58 @@ def test_directory(): + assert ( + filestate.directory(name, user=user, group=group) == ret + ) ++ ++ ++def test_directory_test_mode_user_group_not_present(): ++ name = "/etc/testdir" ++ user = "salt" ++ group = "saltstack" ++ if salt.utils.platform.is_windows(): ++ name = name.replace("/", "\\") ++ ++ ret = { ++ "name": name, ++ "result": None, ++ "comment": "", ++ "changes": {name: {"directory": "new"}}, ++ } ++ ++ if salt.utils.platform.is_windows(): ++ comt = 'The directory "{}" will be changed' "".format(name) ++ else: ++ comt = "The following files will be changed:\n{}:" " directory - new\n".format( ++ name ++ ) ++ ret["comment"] = comt ++ ++ mock_f = MagicMock(return_value=False) ++ mock_uid = MagicMock( ++ side_effect=[ ++ "", ++ "U12", ++ "", ++ ] ++ ) ++ mock_gid = MagicMock( ++ side_effect=[ ++ "G12", ++ "", ++ "", ++ ] ++ ) ++ mock_error = CommandExecutionError ++ with patch.dict( ++ filestate.__salt__, ++ { ++ "file.user_to_uid": mock_uid, ++ "file.group_to_gid": mock_gid, ++ "file.stats": mock_f, ++ }, ++ ), patch("salt.utils.win_dacl.get_sid", mock_error), patch.object( ++ os.path, "isdir", mock_f ++ ), patch.dict( ++ filestate.__opts__, {"test": True} ++ ): ++ assert filestate.directory(name, user=user, group=group) == ret ++ assert filestate.directory(name, user=user, group=group) == ret ++ assert filestate.directory(name, user=user, group=group) == ret diff --git a/tests/pytests/unit/states/file/test_filestate.py b/tests/pytests/unit/states/file/test_filestate.py index 2f9f369fb2..c373cb3449 100644 --- a/tests/pytests/unit/states/file/test_filestate.py @@ -178,6 +242,6 @@ index 9d9fb17717..0b341e09a9 100644 + assert ret["result"] is not False + assert "is not available" not in ret["comment"] -- -2.37.3 +2.37.2 diff --git a/fix-test_ipc-unit-tests.patch b/fix-test_ipc-unit-tests.patch index 5a24a11..bfa49f5 100644 --- a/fix-test_ipc-unit-tests.patch +++ b/fix-test_ipc-unit-tests.patch @@ -1,4 +1,4 @@ -From 4cc528dadfbffdeb90df41bbd848d0c2c7efec78 Mon Sep 17 00:00:00 2001 +From 61d9b5e4ceaa0f5feb7fc364c9089cb624006812 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 12 Jul 2022 14:02:58 +0200 Subject: [PATCH] Fix test_ipc unit tests @@ -8,10 +8,10 @@ Subject: [PATCH] Fix test_ipc unit tests 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit/transport/test_ipc.py b/tests/unit/transport/test_ipc.py -index 4a0a7c29e2..af001d9650 100644 +index 79b49f9406..7177b7f6c4 100644 --- a/tests/unit/transport/test_ipc.py +++ b/tests/unit/transport/test_ipc.py -@@ -105,8 +105,8 @@ class IPCMessagePubSubCase(salt.ext.tornado.testing.AsyncTestCase): +@@ -107,8 +107,8 @@ class IPCMessagePubSubCase(salt.ext.tornado.testing.AsyncTestCase): self.stop() # Now let both waiting data at once @@ -22,7 +22,7 @@ index 4a0a7c29e2..af001d9650 100644 self.pub_channel.publish("TEST") self.wait() self.assertEqual(len(call_cnt), 2) -@@ -148,7 +148,7 @@ class IPCMessagePubSubCase(salt.ext.tornado.testing.AsyncTestCase): +@@ -150,7 +150,7 @@ class IPCMessagePubSubCase(salt.ext.tornado.testing.AsyncTestCase): pass try: @@ -32,6 +32,6 @@ index 4a0a7c29e2..af001d9650 100644 except StreamClosedError as ex: assert False, "StreamClosedError was raised inside the Future" -- -2.37.3 +2.36.1 diff --git a/fix-the-regression-for-yumnotify-plugin-456.patch b/fix-the-regression-for-yumnotify-plugin-456.patch index e8532c8..3e071f6 100644 --- a/fix-the-regression-for-yumnotify-plugin-456.patch +++ b/fix-the-regression-for-yumnotify-plugin-456.patch @@ -1,4 +1,4 @@ -From 48a924bfad537f236593395a9d4cf108d6e9a03f Mon Sep 17 00:00:00 2001 +From a33a7b2e8e477912548cfd24c0dff2c38c44eae8 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> Date: Tue, 9 Nov 2021 16:19:56 +0300 Subject: [PATCH] Fix the regression for yumnotify plugin (#456) @@ -18,6 +18,6 @@ index 0d117e8946..cec5256d20 100644 - print("Unable to save the cookie file: %s" % (e), file=sys.stderr) + sys.stderr.write("Unable to save the cookie file: %s\n" % (e)) -- -2.37.3 +2.33.1 diff --git a/fix-the-regression-in-schedule-module-releasded-in-3.patch b/fix-the-regression-in-schedule-module-releasded-in-3.patch new file mode 100644 index 0000000..f95b939 --- /dev/null +++ b/fix-the-regression-in-schedule-module-releasded-in-3.patch @@ -0,0 +1,820 @@ +From 7803275a8aaeedf2124706f51b6a54cfcfb2d032 Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov +Date: Thu, 1 Sep 2022 14:45:13 +0300 +Subject: [PATCH] Fix the regression in schedule module releasded in + 3004 (bsc#1202631) + +Co-authored-by: Gareth J. Greenaway +--- + changelog/61324.changed | 1 + + salt/modules/schedule.py | 449 ++++++++++++++------ + tests/pytests/unit/modules/test_schedule.py | 138 +++++- + 3 files changed, 442 insertions(+), 146 deletions(-) + create mode 100644 changelog/61324.changed + +diff --git a/changelog/61324.changed b/changelog/61324.changed +new file mode 100644 +index 0000000000..d67051a8da +--- /dev/null ++++ b/changelog/61324.changed +@@ -0,0 +1 @@ ++Adding the ability to add, delete, purge, and modify Salt scheduler jobs when the Salt minion is not running. +diff --git a/salt/modules/schedule.py b/salt/modules/schedule.py +index bcd64f2851..913a101ea6 100644 +--- a/salt/modules/schedule.py ++++ b/salt/modules/schedule.py +@@ -15,6 +15,7 @@ import salt.utils.event + import salt.utils.files + import salt.utils.odict + import salt.utils.yaml ++import yaml + + try: + import dateutil.parser as dateutil_parser +@@ -64,7 +65,35 @@ SCHEDULE_CONF = [ + ] + + +-def list_(show_all=False, show_disabled=True, where=None, return_yaml=True): ++def _get_schedule_config_file(): ++ """ ++ Return the minion schedule configuration file ++ """ ++ config_dir = __opts__.get("conf_dir", None) ++ if config_dir is None and "conf_file" in __opts__: ++ config_dir = os.path.dirname(__opts__["conf_file"]) ++ if config_dir is None: ++ config_dir = salt.syspaths.CONFIG_DIR ++ ++ minion_d_dir = os.path.join( ++ config_dir, ++ os.path.dirname( ++ __opts__.get( ++ "default_include", ++ salt.config.DEFAULT_MINION_OPTS["default_include"], ++ ) ++ ), ++ ) ++ ++ if not os.path.isdir(minion_d_dir): ++ os.makedirs(minion_d_dir) ++ ++ return os.path.join(minion_d_dir, "_schedule.conf") ++ ++ ++def list_( ++ show_all=False, show_disabled=True, where=None, return_yaml=True, offline=False ++): + """ + List the jobs currently scheduled on the minion + +@@ -83,24 +112,33 @@ def list_(show_all=False, show_disabled=True, where=None, return_yaml=True): + """ + + schedule = {} +- try: +- with salt.utils.event.get_event("minion", opts=__opts__) as event_bus: +- res = __salt__["event.fire"]( +- {"func": "list", "where": where}, "manage_schedule" +- ) +- if res: +- event_ret = event_bus.get_event( +- tag="/salt/minion/minion_schedule_list_complete", wait=30 ++ if offline: ++ schedule_config = _get_schedule_config_file() ++ if os.path.exists(schedule_config): ++ with salt.utils.files.fopen(schedule_config) as fp_: ++ schedule_yaml = fp_.read() ++ if schedule_yaml: ++ schedule_contents = yaml.safe_load(schedule_yaml) ++ schedule = schedule_contents.get("schedule", {}) ++ else: ++ try: ++ with salt.utils.event.get_event("minion", opts=__opts__) as event_bus: ++ res = __salt__["event.fire"]( ++ {"func": "list", "where": where}, "manage_schedule" + ) +- if event_ret and event_ret["complete"]: +- schedule = event_ret["schedule"] +- except KeyError: +- # Effectively a no-op, since we can't really return without an event system +- ret = {} +- ret["comment"] = "Event module not available. Schedule list failed." +- ret["result"] = True +- log.debug("Event module not available. Schedule list failed.") +- return ret ++ if res: ++ event_ret = event_bus.get_event( ++ tag="/salt/minion/minion_schedule_list_complete", wait=30 ++ ) ++ if event_ret and event_ret["complete"]: ++ schedule = event_ret["schedule"] ++ except KeyError: ++ # Effectively a no-op, since we can't really return without an event system ++ ret = {} ++ ret["comment"] = "Event module not available. Schedule list failed." ++ ret["result"] = True ++ log.debug("Event module not available. Schedule list failed.") ++ return ret + + _hidden = ["enabled", "skip_function", "skip_during_range"] + for job in list(schedule.keys()): # iterate over a copy since we will mutate it +@@ -139,14 +177,11 @@ def list_(show_all=False, show_disabled=True, where=None, return_yaml=True): + # remove _seconds from the listing + del schedule[job]["_seconds"] + +- if schedule: +- if return_yaml: +- tmp = {"schedule": schedule} +- return salt.utils.yaml.safe_dump(tmp, default_flow_style=False) +- else: +- return schedule ++ if return_yaml: ++ tmp = {"schedule": schedule} ++ return salt.utils.yaml.safe_dump(tmp, default_flow_style=False) + else: +- return {"schedule": {}} ++ return schedule + + + def is_enabled(name=None): +@@ -186,11 +221,18 @@ def purge(**kwargs): + .. code-block:: bash + + salt '*' schedule.purge ++ ++ # Purge jobs on Salt minion ++ salt '*' schedule.purge ++ + """ + +- ret = {"comment": [], "result": True} ++ ret = {"comment": [], "changes": {}, "result": True} + +- for name in list_(show_all=True, return_yaml=False): ++ current_schedule = list_( ++ show_all=True, return_yaml=False, offline=kwargs.get("offline") ++ ) ++ for name in pycopy.deepcopy(current_schedule): + if name == "enabled": + continue + if name.startswith("__"): +@@ -202,37 +244,65 @@ def purge(**kwargs): + "Job: {} would be deleted from schedule.".format(name) + ) + else: +- persist = kwargs.get("persist", True) ++ if kwargs.get("offline"): ++ del current_schedule[name] + +- try: +- with salt.utils.event.get_event("minion", opts=__opts__) as event_bus: +- res = __salt__["event.fire"]( +- {"name": name, "func": "delete", "persist": persist}, +- "manage_schedule", +- ) +- if res: +- event_ret = event_bus.get_event( +- tag="/salt/minion/minion_schedule_delete_complete", wait=30 ++ ret["comment"].append("Deleted job: {} from schedule.".format(name)) ++ ret["changes"][name] = "removed" ++ ++ else: ++ persist = kwargs.get("persist", True) ++ try: ++ with salt.utils.event.get_event( ++ "minion", opts=__opts__ ++ ) as event_bus: ++ res = __salt__["event.fire"]( ++ {"name": name, "func": "delete", "persist": persist}, ++ "manage_schedule", + ) +- if event_ret and event_ret["complete"]: +- _schedule_ret = event_ret["schedule"] +- if name not in _schedule_ret: +- ret["result"] = True +- ret["comment"].append( +- "Deleted job: {} from schedule.".format(name) +- ) +- else: +- ret["comment"].append( +- "Failed to delete job {} from schedule.".format( +- name ++ if res: ++ event_ret = event_bus.get_event( ++ tag="/salt/minion/minion_schedule_delete_complete", ++ wait=30, ++ ) ++ if event_ret and event_ret["complete"]: ++ _schedule_ret = event_ret["schedule"] ++ if name not in _schedule_ret: ++ ret["result"] = True ++ ret["changes"][name] = "removed" ++ ret["comment"].append( ++ "Deleted job: {} from schedule.".format(name) + ) +- ) +- ret["result"] = True ++ else: ++ ret["comment"].append( ++ "Failed to delete job {} from schedule.".format( ++ name ++ ) ++ ) ++ ret["result"] = True ++ ++ except KeyError: ++ # Effectively a no-op, since we can't really return without an event system ++ ret["comment"] = "Event module not available. Schedule add failed." ++ ret["result"] = True ++ ++ # wait until the end to write file in offline mode ++ if kwargs.get("offline"): ++ schedule_conf = _get_schedule_config_file() ++ ++ try: ++ with salt.utils.files.fopen(schedule_conf, "wb+") as fp_: ++ fp_.write( ++ salt.utils.stringutils.to_bytes( ++ salt.utils.yaml.safe_dump({"schedule": current_schedule}) ++ ) ++ ) ++ except OSError: ++ log.error( ++ "Failed to persist the updated schedule", ++ exc_info_on_loglevel=logging.DEBUG, ++ ) + +- except KeyError: +- # Effectively a no-op, since we can't really return without an event system +- ret["comment"] = "Event module not available. Schedule add failed." +- ret["result"] = True + return ret + + +@@ -245,6 +315,10 @@ def delete(name, **kwargs): + .. code-block:: bash + + salt '*' schedule.delete job1 ++ ++ # Delete job on Salt minion when the Salt minion is not running ++ salt '*' schedule.delete job1 ++ + """ + + ret = { +@@ -260,45 +334,86 @@ def delete(name, **kwargs): + ret["comment"] = "Job: {} would be deleted from schedule.".format(name) + ret["result"] = True + else: +- persist = kwargs.get("persist", True) ++ if kwargs.get("offline"): ++ current_schedule = list_( ++ show_all=True, ++ where="opts", ++ return_yaml=False, ++ offline=kwargs.get("offline"), ++ ) + +- if name in list_(show_all=True, where="opts", return_yaml=False): +- event_data = {"name": name, "func": "delete", "persist": persist} +- elif name in list_(show_all=True, where="pillar", return_yaml=False): +- event_data = { +- "name": name, +- "where": "pillar", +- "func": "delete", +- "persist": False, +- } +- else: +- ret["comment"] = "Job {} does not exist.".format(name) +- return ret ++ del current_schedule[name] + +- try: +- with salt.utils.event.get_event("minion", opts=__opts__) as event_bus: +- res = __salt__["event.fire"](event_data, "manage_schedule") +- if res: +- event_ret = event_bus.get_event( +- tag="/salt/minion/minion_schedule_delete_complete", +- wait=30, ++ schedule_conf = _get_schedule_config_file() ++ ++ try: ++ with salt.utils.files.fopen(schedule_conf, "wb+") as fp_: ++ fp_.write( ++ salt.utils.stringutils.to_bytes( ++ salt.utils.yaml.safe_dump({"schedule": current_schedule}) ++ ) + ) +- if event_ret and event_ret["complete"]: +- schedule = event_ret["schedule"] +- if name not in schedule: +- ret["result"] = True +- ret["comment"] = "Deleted Job {} from schedule.".format( +- name +- ) +- ret["changes"][name] = "removed" +- else: +- ret[ +- "comment" +- ] = "Failed to delete job {} from schedule.".format(name) +- return ret +- except KeyError: +- # Effectively a no-op, since we can't really return without an event system +- ret["comment"] = "Event module not available. Schedule add failed." ++ except OSError: ++ log.error( ++ "Failed to persist the updated schedule", ++ exc_info_on_loglevel=logging.DEBUG, ++ ) ++ ++ ret["result"] = True ++ ret["comment"] = "Deleted Job {} from schedule.".format(name) ++ ret["changes"][name] = "removed" ++ else: ++ persist = kwargs.get("persist", True) ++ ++ if name in list_( ++ show_all=True, ++ where="opts", ++ return_yaml=False, ++ offline=kwargs.get("offline"), ++ ): ++ event_data = {"name": name, "func": "delete", "persist": persist} ++ elif name in list_( ++ show_all=True, ++ where="pillar", ++ return_yaml=False, ++ offline=kwargs.get("offline"), ++ ): ++ event_data = { ++ "name": name, ++ "where": "pillar", ++ "func": "delete", ++ "persist": False, ++ } ++ else: ++ ret["comment"] = "Job {} does not exist.".format(name) ++ return ret ++ ++ try: ++ with salt.utils.event.get_event("minion", opts=__opts__) as event_bus: ++ res = __salt__["event.fire"](event_data, "manage_schedule") ++ if res: ++ event_ret = event_bus.get_event( ++ tag="/salt/minion/minion_schedule_delete_complete", ++ wait=30, ++ ) ++ if event_ret and event_ret["complete"]: ++ schedule = event_ret["schedule"] ++ if name not in schedule: ++ ret["result"] = True ++ ret["comment"] = "Deleted Job {} from schedule.".format( ++ name ++ ) ++ ret["changes"][name] = "removed" ++ else: ++ ret[ ++ "comment" ++ ] = "Failed to delete job {} from schedule.".format( ++ name ++ ) ++ return ret ++ except KeyError: ++ # Effectively a no-op, since we can't really return without an event system ++ ret["comment"] = "Event module not available. Schedule add failed." + return ret + + +@@ -438,6 +553,10 @@ def add(name, **kwargs): + salt '*' schedule.add job1 function='test.ping' seconds=3600 + # If function have some arguments, use job_args + salt '*' schedule.add job2 function='cmd.run' job_args="['date >> /tmp/date.log']" seconds=60 ++ ++ # Add job to Salt minion when the Salt minion is not running ++ salt '*' schedule.add job1 function='test.ping' seconds=3600 offline=True ++ + """ + + ret = { +@@ -445,8 +564,11 @@ def add(name, **kwargs): + "result": False, + "changes": {}, + } ++ current_schedule = list_( ++ show_all=True, return_yaml=False, offline=kwargs.get("offline") ++ ) + +- if name in list_(show_all=True, return_yaml=False): ++ if name in current_schedule: + ret["comment"] = "Job {} already exists in schedule.".format(name) + ret["result"] = False + return ret +@@ -486,32 +608,56 @@ def add(name, **kwargs): + ret["comment"] = "Job: {} would be added to schedule.".format(name) + ret["result"] = True + else: +- try: +- with salt.utils.event.get_event("minion", opts=__opts__) as event_bus: +- res = __salt__["event.fire"]( +- { +- "name": name, +- "schedule": schedule_data, +- "func": "add", +- "persist": persist, +- }, +- "manage_schedule", ++ if kwargs.get("offline"): ++ current_schedule.update(schedule_data) ++ ++ schedule_conf = _get_schedule_config_file() ++ ++ try: ++ with salt.utils.files.fopen(schedule_conf, "wb+") as fp_: ++ fp_.write( ++ salt.utils.stringutils.to_bytes( ++ salt.utils.yaml.safe_dump({"schedule": current_schedule}) ++ ) ++ ) ++ except OSError: ++ log.error( ++ "Failed to persist the updated schedule", ++ exc_info_on_loglevel=logging.DEBUG, + ) +- if res: +- event_ret = event_bus.get_event( +- tag="/salt/minion/minion_schedule_add_complete", +- wait=30, ++ ++ ret["result"] = True ++ ret["comment"] = "Added job: {} to schedule.".format(name) ++ ret["changes"][name] = "added" ++ else: ++ try: ++ with salt.utils.event.get_event("minion", opts=__opts__) as event_bus: ++ res = __salt__["event.fire"]( ++ { ++ "name": name, ++ "schedule": schedule_data, ++ "func": "add", ++ "persist": persist, ++ }, ++ "manage_schedule", + ) +- if event_ret and event_ret["complete"]: +- schedule = event_ret["schedule"] +- if name in schedule: +- ret["result"] = True +- ret["comment"] = "Added job: {} to schedule.".format(name) +- ret["changes"][name] = "added" +- return ret +- except KeyError: +- # Effectively a no-op, since we can't really return without an event system +- ret["comment"] = "Event module not available. Schedule add failed." ++ if res: ++ event_ret = event_bus.get_event( ++ tag="/salt/minion/minion_schedule_add_complete", ++ wait=30, ++ ) ++ if event_ret and event_ret["complete"]: ++ schedule = event_ret["schedule"] ++ if name in schedule: ++ ret["result"] = True ++ ret["comment"] = "Added job: {} to schedule.".format( ++ name ++ ) ++ ret["changes"][name] = "added" ++ return ret ++ except KeyError: ++ # Effectively a no-op, since we can't really return without an event system ++ ret["comment"] = "Event module not available. Schedule add failed." + return ret + + +@@ -524,6 +670,10 @@ def modify(name, **kwargs): + .. code-block:: bash + + salt '*' schedule.modify job1 function='test.ping' seconds=3600 ++ ++ # Modify job on Salt minion when the Salt minion is not running ++ salt '*' schedule.modify job1 function='test.ping' seconds=3600 offline=True ++ + """ + + ret = {"comment": "", "changes": {}, "result": True} +@@ -549,7 +699,9 @@ def modify(name, **kwargs): + ret["comment"] = 'Unable to use "when" and "cron" options together. Ignoring.' + return ret + +- current_schedule = list_(show_all=True, return_yaml=False) ++ current_schedule = list_( ++ show_all=True, return_yaml=False, offline=kwargs.get("offline") ++ ) + + if name not in current_schedule: + ret["comment"] = "Job {} does not exist in schedule.".format(name) +@@ -566,8 +718,7 @@ def modify(name, **kwargs): + _current["seconds"] = _current.pop("_seconds") + + # Copy _current _new, then update values from kwargs +- _new = pycopy.deepcopy(_current) +- _new.update(kwargs) ++ _new = build_schedule_item(name, **kwargs) + + # Remove test from kwargs, it's not a valid schedule option + _new.pop("test", None) +@@ -587,29 +738,51 @@ def modify(name, **kwargs): + if "test" in kwargs and kwargs["test"]: + ret["comment"] = "Job: {} would be modified in schedule.".format(name) + else: +- persist = kwargs.get("persist", True) +- if name in list_(show_all=True, where="opts", return_yaml=False): +- event_data = { +- "name": name, +- "schedule": _new, +- "func": "modify", +- "persist": persist, +- } +- elif name in list_(show_all=True, where="pillar", return_yaml=False): +- event_data = { +- "name": name, +- "schedule": _new, +- "where": "pillar", +- "func": "modify", +- "persist": False, +- } ++ if kwargs.get("offline"): ++ current_schedule[name].update(_new) + +- out = __salt__["event.fire"](event_data, "manage_schedule") +- if out: ++ schedule_conf = _get_schedule_config_file() ++ ++ try: ++ with salt.utils.files.fopen(schedule_conf, "wb+") as fp_: ++ fp_.write( ++ salt.utils.stringutils.to_bytes( ++ salt.utils.yaml.safe_dump({"schedule": current_schedule}) ++ ) ++ ) ++ except OSError: ++ log.error( ++ "Failed to persist the updated schedule", ++ exc_info_on_loglevel=logging.DEBUG, ++ ) ++ ++ ret["result"] = True + ret["comment"] = "Modified job: {} in schedule.".format(name) ++ + else: +- ret["comment"] = "Failed to modify job {} in schedule.".format(name) +- ret["result"] = False ++ persist = kwargs.get("persist", True) ++ if name in list_(show_all=True, where="opts", return_yaml=False): ++ event_data = { ++ "name": name, ++ "schedule": _new, ++ "func": "modify", ++ "persist": persist, ++ } ++ elif name in list_(show_all=True, where="pillar", return_yaml=False): ++ event_data = { ++ "name": name, ++ "schedule": _new, ++ "where": "pillar", ++ "func": "modify", ++ "persist": False, ++ } ++ ++ out = __salt__["event.fire"](event_data, "manage_schedule") ++ if out: ++ ret["comment"] = "Modified job: {} in schedule.".format(name) ++ else: ++ ret["comment"] = "Failed to modify job {} in schedule.".format(name) ++ ret["result"] = False + return ret + + +diff --git a/tests/pytests/unit/modules/test_schedule.py b/tests/pytests/unit/modules/test_schedule.py +index e6cb134982..02914be82f 100644 +--- a/tests/pytests/unit/modules/test_schedule.py ++++ b/tests/pytests/unit/modules/test_schedule.py +@@ -8,7 +8,8 @@ import pytest + import salt.modules.schedule as schedule + import salt.utils.odict + from salt.utils.event import SaltEvent +-from tests.support.mock import MagicMock, patch ++from salt.utils.odict import OrderedDict ++from tests.support.mock import MagicMock, call, mock_open, patch + + log = logging.getLogger(__name__) + +@@ -29,6 +30,11 @@ def sock_dir(tmp_path): + return str(tmp_path / "test-socks") + + ++@pytest.fixture ++def schedule_config_file(tmp_path): ++ return "/etc/salt/minion.d/_schedule.conf" ++ ++ + @pytest.fixture + def configure_loader_modules(): + return {schedule: {}} +@@ -36,24 +42,56 @@ def configure_loader_modules(): + + # 'purge' function tests: 1 + @pytest.mark.slow_test +-def test_purge(sock_dir): ++def test_purge(sock_dir, job1, schedule_config_file): + """ + Test if it purge all the jobs currently scheduled on the minion. + """ ++ _schedule_data = {"job1": job1} + with patch.dict(schedule.__opts__, {"schedule": {}, "sock_dir": sock_dir}): + mock = MagicMock(return_value=True) + with patch.dict(schedule.__salt__, {"event.fire": mock}): + _ret_value = {"complete": True, "schedule": {}} + with patch.object(SaltEvent, "get_event", return_value=_ret_value): +- assert schedule.purge() == { +- "comment": ["Deleted job: schedule from schedule."], ++ with patch.object( ++ schedule, "list_", MagicMock(return_value=_schedule_data) ++ ): ++ assert schedule.purge() == { ++ "comment": ["Deleted job: job1 from schedule."], ++ "changes": {"job1": "removed"}, ++ "result": True, ++ } ++ ++ _schedule_data = {"job1": job1, "job2": job1, "job3": job1} ++ comm = [ ++ "Deleted job: job1 from schedule.", ++ "Deleted job: job2 from schedule.", ++ "Deleted job: job3 from schedule.", ++ ] ++ ++ changes = {"job1": "removed", "job2": "removed", "job3": "removed"} ++ ++ with patch.dict( ++ schedule.__opts__, {"schedule": {"job1": "salt"}, "sock_dir": sock_dir} ++ ): ++ with patch("salt.utils.files.fopen", mock_open(read_data="")) as fopen_mock: ++ with patch.object( ++ schedule, "list_", MagicMock(return_value=_schedule_data) ++ ): ++ assert schedule.purge(offline=True) == { ++ "comment": comm, ++ "changes": changes, + "result": True, + } ++ _call = call(b"schedule: {}\n") ++ write_calls = fopen_mock.filehandles[schedule_config_file][ ++ 0 ++ ].write._mock_mock_calls ++ assert _call in write_calls + + + # 'delete' function tests: 1 + @pytest.mark.slow_test +-def test_delete(sock_dir): ++def test_delete(sock_dir, job1, schedule_config_file): + """ + Test if it delete a job from the minion's schedule. + """ +@@ -68,6 +106,28 @@ def test_delete(sock_dir): + "result": False, + } + ++ _schedule_data = {"job1": job1} ++ comm = "Deleted Job job1 from schedule." ++ changes = {"job1": "removed"} ++ with patch.dict( ++ schedule.__opts__, {"schedule": {"job1": "salt"}, "sock_dir": sock_dir} ++ ): ++ with patch("salt.utils.files.fopen", mock_open(read_data="")) as fopen_mock: ++ with patch.object( ++ schedule, "list_", MagicMock(return_value=_schedule_data) ++ ): ++ assert schedule.delete("job1", offline="True") == { ++ "comment": comm, ++ "changes": changes, ++ "result": True, ++ } ++ ++ _call = call(b"schedule: {}\n") ++ write_calls = fopen_mock.filehandles[schedule_config_file][ ++ 0 ++ ].write._mock_mock_calls ++ assert _call in write_calls ++ + + # 'build_schedule_item' function tests: 1 + def test_build_schedule_item(sock_dir): +@@ -120,7 +180,7 @@ def test_build_schedule_item_invalid_when(sock_dir): + + + @pytest.mark.slow_test +-def test_add(sock_dir): ++def test_add(sock_dir, schedule_config_file): + """ + Test if it add a job to the schedule. + """ +@@ -163,6 +223,24 @@ def test_add(sock_dir): + "result": True, + } + ++ comm1 = "Added job: job3 to schedule." ++ changes1 = {"job3": "added"} ++ with patch.dict( ++ schedule.__opts__, {"schedule": {"job1": "salt"}, "sock_dir": sock_dir} ++ ): ++ with patch("salt.utils.files.fopen", mock_open(read_data="")) as fopen_mock: ++ assert schedule.add( ++ "job3", function="test.ping", seconds=3600, offline="True" ++ ) == {"comment": comm1, "changes": changes1, "result": True} ++ ++ _call = call( ++ b"schedule:\n job3: {function: test.ping, seconds: 3600, maxrunning: 1, name: job3, enabled: true,\n jid_include: true}\n" ++ ) ++ write_calls = fopen_mock.filehandles[schedule_config_file][ ++ 1 ++ ].write._mock_mock_calls ++ assert _call in write_calls ++ + + # 'run_job' function tests: 1 + +@@ -444,7 +522,7 @@ def test_copy(sock_dir, job1): + + + @pytest.mark.slow_test +-def test_modify(sock_dir): ++def test_modify(sock_dir, job1, schedule_config_file): + """ + Test if modifying job to the schedule. + """ +@@ -564,7 +642,6 @@ def test_modify(sock_dir): + for key in [ + "maxrunning", + "function", +- "seconds", + "jid_include", + "name", + "enabled", +@@ -586,6 +663,51 @@ def test_modify(sock_dir): + ret = schedule.modify("job2", function="test.version", test=True) + assert ret == expected5 + ++ _schedule_data = {"job1": job1} ++ comm = "Modified job: job1 in schedule." ++ changes = {"job1": "removed"} ++ ++ changes = { ++ "job1": { ++ "new": OrderedDict( ++ [ ++ ("function", "test.version"), ++ ("maxrunning", 1), ++ ("name", "job1"), ++ ("enabled", True), ++ ("jid_include", True), ++ ] ++ ), ++ "old": OrderedDict( ++ [ ++ ("function", "test.ping"), ++ ("maxrunning", 1), ++ ("name", "job1"), ++ ("jid_include", True), ++ ("enabled", True), ++ ] ++ ), ++ } ++ } ++ with patch.dict( ++ schedule.__opts__, {"schedule": {"job1": "salt"}, "sock_dir": sock_dir} ++ ): ++ with patch("salt.utils.files.fopen", mock_open(read_data="")) as fopen_mock: ++ with patch.object( ++ schedule, "list_", MagicMock(return_value=_schedule_data) ++ ): ++ assert schedule.modify( ++ "job1", function="test.version", offline="True" ++ ) == {"comment": comm, "changes": changes, "result": True} ++ ++ _call = call( ++ b"schedule:\n job1: {enabled: true, function: test.version, jid_include: true, maxrunning: 1,\n name: job1}\n" ++ ) ++ write_calls = fopen_mock.filehandles[schedule_config_file][ ++ 0 ++ ].write._mock_mock_calls ++ assert _call in write_calls ++ + + # 'is_enabled' function tests: 1 + +-- +2.37.2 + + diff --git a/fix-traceback.print_exc-calls-for-test_pip_state-432.patch b/fix-traceback.print_exc-calls-for-test_pip_state-432.patch index 822521f..9bcf3f5 100644 --- a/fix-traceback.print_exc-calls-for-test_pip_state-432.patch +++ b/fix-traceback.print_exc-calls-for-test_pip_state-432.patch @@ -1,4 +1,4 @@ -From 8b07b529458e1fc007c8ee848d66035b3403a22d Mon Sep 17 00:00:00 2001 +From 929942b15f377df21ae076ef9e25cf83639b1850 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> Date: Mon, 8 Nov 2021 17:43:02 +0300 Subject: [PATCH] Fix traceback.print_exc calls for test_pip_state (#432) @@ -8,10 +8,10 @@ Subject: [PATCH] Fix traceback.print_exc calls for test_pip_state (#432) 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/states/test_pip_state.py b/tests/unit/states/test_pip_state.py -index 93b8b3bd71..9531b6d2be 100644 +index 9f5be295be..1074a1989f 100644 --- a/tests/unit/states/test_pip_state.py +++ b/tests/unit/states/test_pip_state.py -@@ -441,7 +441,7 @@ class PipStateInstallationErrorTest(TestCase): +@@ -445,7 +445,7 @@ class PipStateInstallationErrorTest(TestCase): sys.stdout.flush() sys.exit(2) except Exception as exc: @@ -21,6 +21,6 @@ index 93b8b3bd71..9531b6d2be 100644 sys.exit(3) sys.exit(0) -- -2.37.3 +2.34.1 diff --git a/fix-wrong-test_mod_del_repo_multiline_values-test-af.patch b/fix-wrong-test_mod_del_repo_multiline_values-test-af.patch new file mode 100644 index 0000000..9e4c705 --- /dev/null +++ b/fix-wrong-test_mod_del_repo_multiline_values-test-af.patch @@ -0,0 +1,82 @@ +From e3ef9165b66c3d74a3c3dbfe82ba58f7fa1613e2 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= + +Date: Thu, 12 Mar 2020 13:26:51 +0000 +Subject: [PATCH] Fix wrong test_mod_del_repo_multiline_values test after + rebase + +--- + tests/integration/modules/test_pkg.py | 34 +++++++++++++++++++++------ + 1 file changed, 27 insertions(+), 7 deletions(-) + +diff --git a/tests/integration/modules/test_pkg.py b/tests/integration/modules/test_pkg.py +index ccf69998fc..6a84ea0bc3 100644 +--- a/tests/integration/modules/test_pkg.py ++++ b/tests/integration/modules/test_pkg.py +@@ -138,6 +138,10 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin): + self.run_function("pkg.del_repo", [repo]) + + @pytest.mark.slow_test ++ @pytest.mark.destructive_test ++ @pytest.mark.requires_salt_modules("pkg.mod_repo", "pkg.del_repo", "pkg.get_repo") ++ @pytest.mark.requires_network() ++ @requires_system_grains + def test_mod_del_repo_multiline_values(self): + """ + test modifying and deleting a software repository defined with multiline values +@@ -145,10 +149,13 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin): + os_grain = self.run_function("grains.item", ["os"])["os"] + repo = None + try: +- if os_grain in ["CentOS", "RedHat", "VMware Photon OS"]: ++ if os_grain in ["CentOS", "RedHat", "VMware Photon OS", "SUSE"]: + my_baseurl = ( + "http://my.fake.repo/foo/bar/\n http://my.fake.repo.alt/foo/bar/" + ) ++ expected_get_repo_baseurl_zypp = ( ++ "http://my.fake.repo/foo/bar/%0A%20http://my.fake.repo.alt/foo/bar/" ++ ) + expected_get_repo_baseurl = ( + "http://my.fake.repo/foo/bar/\nhttp://my.fake.repo.alt/foo/bar/" + ) +@@ -174,17 +181,30 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin): + enabled=enabled, + failovermethod=failovermethod, + ) +- # return data from pkg.mod_repo contains the file modified at +- # the top level, so use next(iter(ret)) to get that key + self.assertNotEqual(ret, {}) +- repo_info = ret[next(iter(ret))] ++ repo_info = {repo: ret} + self.assertIn(repo, repo_info) +- self.assertEqual(repo_info[repo]["baseurl"], my_baseurl) ++ if os_grain == "SUSE": ++ self.assertEqual( ++ repo_info[repo]["baseurl"], expected_get_repo_baseurl_zypp ++ ) ++ else: ++ self.assertEqual(repo_info[repo]["baseurl"], my_baseurl) + ret = self.run_function("pkg.get_repo", [repo]) +- self.assertEqual(ret["baseurl"], expected_get_repo_baseurl) ++ if os_grain == "SUSE": ++ self.assertEqual( ++ repo_info[repo]["baseurl"], expected_get_repo_baseurl_zypp ++ ) ++ else: ++ self.assertEqual(ret["baseurl"], expected_get_repo_baseurl) + self.run_function("pkg.mod_repo", [repo]) + ret = self.run_function("pkg.get_repo", [repo]) +- self.assertEqual(ret["baseurl"], expected_get_repo_baseurl) ++ if os_grain == "SUSE": ++ self.assertEqual( ++ repo_info[repo]["baseurl"], expected_get_repo_baseurl_zypp ++ ) ++ else: ++ self.assertEqual(ret["baseurl"], expected_get_repo_baseurl) + finally: + if repo is not None: + self.run_function("pkg.del_repo", [repo]) +-- +2.33.0 + + diff --git a/fixes-56144-to-enable-hotadd-profile-support.patch b/fixes-56144-to-enable-hotadd-profile-support.patch new file mode 100644 index 0000000..f2d6edf --- /dev/null +++ b/fixes-56144-to-enable-hotadd-profile-support.patch @@ -0,0 +1,63 @@ +From 0def15837c3470f20ce85ec81e2c1d42cd933c23 Mon Sep 17 00:00:00 2001 +From: nicholasmhughes +Date: Fri, 14 Feb 2020 22:03:42 -0500 +Subject: [PATCH] fixes #56144 to enable hotadd profile support + +--- + doc/topics/cloud/vmware.rst | 8 ++++++++ + salt/cloud/clouds/vmware.py | 12 ++++++++++++ + 2 files changed, 20 insertions(+) + +diff --git a/doc/topics/cloud/vmware.rst b/doc/topics/cloud/vmware.rst +index bbc5cdff11..1a18ebf226 100644 +--- a/doc/topics/cloud/vmware.rst ++++ b/doc/topics/cloud/vmware.rst +@@ -457,6 +457,14 @@ Set up an initial profile at ``/etc/salt/cloud.profiles`` or + Specifies whether the new virtual machine should be powered on or not. If + ``template: True`` is set, this field is ignored. Default is ``power_on: True``. + ++``cpu_hot_add`` ++ Boolean value that enables hot-add support for modifying CPU resources while ++ the guest is powered on. ++ ++``mem_hot_add`` ++ Boolean value that enables hot-add support for modifying memory resources while ++ the guest is powered on. ++ + ``extra_config`` + Specifies the additional configuration information for the virtual machine. This + describes a set of modifications to the additional options. If the key is already +diff --git a/salt/cloud/clouds/vmware.py b/salt/cloud/clouds/vmware.py +index 1e9943ad78..4999ca089f 100644 +--- a/salt/cloud/clouds/vmware.py ++++ b/salt/cloud/clouds/vmware.py +@@ -2821,6 +2821,12 @@ def create(vm_): + win_run_once = config.get_cloud_config_value( + "win_run_once", vm_, __opts__, search_global=False, default=None + ) ++ cpu_hot_add = config.get_cloud_config_value( ++ 'cpu_hot_add', vm_, __opts__, search_global=False, default=None ++ ) ++ mem_hot_add = config.get_cloud_config_value( ++ 'mem_hot_add', vm_, __opts__, search_global=False, default=None ++ ) + + # Get service instance object + si = _get_si() +@@ -3039,6 +3045,12 @@ def create(vm_): + ) + config_spec.deviceChange = specs["device_specs"] + ++ if cpu_hot_add and hasattr(config_spec, 'cpuHotAddEnabled'): ++ config_spec.cpuHotAddEnabled = bool(cpu_hot_add) ++ ++ if mem_hot_add and hasattr(config_spec, 'memoryHotAddEnabled'): ++ config_spec.memoryHotAddEnabled = bool(mem_hot_add) ++ + if extra_config: + for key, value in extra_config.items(): + option = vim.option.OptionValue(key=key, value=value) +-- +2.33.0 + + diff --git a/fixes-for-python-3.10-502.patch b/fixes-for-python-3.10-502.patch index 053ed45..39ab5a6 100644 --- a/fixes-for-python-3.10-502.patch +++ b/fixes-for-python-3.10-502.patch @@ -1,15 +1,65 @@ -From d5fae6bd4a4f243115ee220393e05440f0d48b5a Mon Sep 17 00:00:00 2001 +From efcf52ad6b7edf64e6a2eaead99c8df5894ab613 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Tue, 5 Apr 2022 12:04:46 +0300 Subject: [PATCH] Fixes for Python 3.10 (#502) -* Use collections.abc.Mapping instead collections.Mapping in state ---- - salt/state.py | 5 +++-- - 1 file changed, 3 insertions(+), 2 deletions(-) +* Fix _compat.py importlib logic for Python 3.10 +Use the same logic in _compat.py and entrypoints.py to load +the same importlib.metadata. Python's built in implementation for +Python >= 3.10 and the Salt one for others. + +* Use collections.abc.Mapping instead collections.Mapping in state + +Co-authored-by: piterpunk +--- + salt/_compat.py | 30 +++++++++++++++++------------- + salt/state.py | 5 +++-- + 2 files changed, 20 insertions(+), 15 deletions(-) + +diff --git a/salt/_compat.py b/salt/_compat.py +index 8149657bea..a402f17a3c 100644 +--- a/salt/_compat.py ++++ b/salt/_compat.py +@@ -11,19 +11,23 @@ if sys.version_info >= (3, 9, 5): + else: + import salt.ext.ipaddress as ipaddress + ++if sys.version_info >= (3, 10): ++ # Python 3.10 will include a fix in importlib.metadata which allows us to ++ # get the distribution of a loaded entry-point ++ import importlib.metadata # pylint: disable=no-member,no-name-in-module ++else: ++ # importlib_metadata before version 3.3.0 does not include the functionality we need. ++ try: ++ import importlib_metadata + +-# importlib_metadata before version 3.3.0 does not include the functionality we need. +-try: +- import importlib_metadata +- +- importlib_metadata_version = [ +- int(part) +- for part in importlib_metadata.version("importlib_metadata").split(".") +- if part.isdigit() +- ] +- if tuple(importlib_metadata_version) < (3, 3, 0): ++ importlib_metadata_version = [ ++ int(part) ++ for part in importlib_metadata.version("importlib_metadata").split(".") ++ if part.isdigit() ++ ] ++ if tuple(importlib_metadata_version) < (3, 3, 0): ++ # Use the vendored importlib_metadata ++ import salt.ext.importlib_metadata as importlib_metadata ++ except ImportError: + # Use the vendored importlib_metadata + import salt.ext.importlib_metadata as importlib_metadata +-except ImportError: +- # Use the vendored importlib_metadata +- import salt.ext.importlib_metadata as importlib_metadata diff --git a/salt/state.py b/salt/state.py -index f5579fbb69..61519d5042 100644 +index 3361a537cd..b759c8e0ee 100644 --- a/salt/state.py +++ b/salt/state.py @@ -12,7 +12,6 @@ The data sent to the state calls is as follows: @@ -26,10 +76,10 @@ index f5579fbb69..61519d5042 100644 +from collections.abc import Mapping + - import salt.channel.client import salt.fileclient import salt.loader -@@ -3405,7 +3406,7 @@ class State: + import salt.minion +@@ -3276,7 +3277,7 @@ class State: """ for chunk in high: state = high[chunk] @@ -39,6 +89,6 @@ index f5579fbb69..61519d5042 100644 for state_ref in state: needs_default = True -- -2.37.3 +2.35.1 diff --git a/fopen-workaround-bad-buffering-for-binary-mode-563.patch b/fopen-workaround-bad-buffering-for-binary-mode-563.patch index e68ddd2..c936a05 100644 --- a/fopen-workaround-bad-buffering-for-binary-mode-563.patch +++ b/fopen-workaround-bad-buffering-for-binary-mode-563.patch @@ -1,8 +1,9 @@ -From 7d5b1d2178d0573f137b9481ded85419a36998ff Mon Sep 17 00:00:00 2001 +From 6c1c81aba71711632a14b725426077f9183065e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Thu, 6 Oct 2022 10:55:50 +0100 -Subject: [PATCH] fopen: Workaround bad buffering for binary mode (#563) +Subject: [PATCH] fopen: Workaround bad buffering for binary mode + (#563) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit diff --git a/force-zyppnotify-to-prefer-packages.db-than-packages.patch b/force-zyppnotify-to-prefer-packages.db-than-packages.patch new file mode 100644 index 0000000..6203d03 --- /dev/null +++ b/force-zyppnotify-to-prefer-packages.db-than-packages.patch @@ -0,0 +1,29 @@ +From 36b107fb5108fe4e52e9ef522765d6ada588c50d Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov +Date: Wed, 9 Dec 2020 14:58:55 +0300 +Subject: [PATCH] Force zyppnotify to prefer Packages.db than Packages + if it exists + +--- + scripts/suse/zypper/plugins/commit/zyppnotify | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/scripts/suse/zypper/plugins/commit/zyppnotify b/scripts/suse/zypper/plugins/commit/zyppnotify +index 51ac02254e..d6a1bef42b 100755 +--- a/scripts/suse/zypper/plugins/commit/zyppnotify ++++ b/scripts/suse/zypper/plugins/commit/zyppnotify +@@ -20,7 +20,9 @@ class DriftDetector(Plugin): + def __init__(self): + Plugin.__init__(self) + self.ck_path = "/var/cache/salt/minion/rpmdb.cookie" +- self.rpm_path = "/var/lib/rpm/Packages" ++ self.rpm_path = "/var/lib/rpm/Packages.db" ++ if not os.path.exists(self.rpm_path): ++ self.rpm_path = "/var/lib/rpm/Packages" + + def _get_mtime(self): + """ +-- +2.29.2 + + diff --git a/html.tar.bz2 b/html.tar.bz2 index 4eabcb6..ea9acfd 100644 --- a/html.tar.bz2 +++ b/html.tar.bz2 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:82795a90cebe98e39a7fc513144a45686bcbe13806c1ad233cb371c120af2bc7 -size 10525087 +oid sha256:2b1610ccab5866f29f7d1934f315006954ba60a31bbc3c60c07b44a5ea018a06 +size 10429711 diff --git a/ignore-erros-on-reading-license-files-with-dpkg_lowp.patch b/ignore-erros-on-reading-license-files-with-dpkg_lowp.patch index 3b9c808..6bba194 100644 --- a/ignore-erros-on-reading-license-files-with-dpkg_lowp.patch +++ b/ignore-erros-on-reading-license-files-with-dpkg_lowp.patch @@ -1,4 +1,4 @@ -From 7cac5f67eb0d586314f9e7c987b8a620e28eeac3 Mon Sep 17 00:00:00 2001 +From 90d0e3ce40e46a9bff3e477b61e02cf3e87e8b9f Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Mon, 27 Jun 2022 17:55:49 +0300 Subject: [PATCH] Ignore erros on reading license files with dpkg_lowpkg @@ -51,6 +51,6 @@ index 0000000000..1a89660c02 + assert license_read_mock.calls[0].args[0] == "/usr/share/doc/bash/copyright" + assert license_read_mock.calls[0].kwargs["errors"] == "ignore" -- -2.37.3 +2.36.1 diff --git a/ignore-extend-declarations-from-excluded-sls-files.patch b/ignore-extend-declarations-from-excluded-sls-files.patch deleted file mode 100644 index 619ac0a..0000000 --- a/ignore-extend-declarations-from-excluded-sls-files.patch +++ /dev/null @@ -1,250 +0,0 @@ -From e4aff9ca68ce142c87ec875846d8916b9df8e6c5 Mon Sep 17 00:00:00 2001 -From: Alexander Graul -Date: Fri, 21 Oct 2022 14:39:52 +0200 -Subject: [PATCH] Ignore extend declarations from excluded sls files - -* Use both test sls files - -(cherry picked from commit 3cb5f5a14ff68d0bd809a4adba7d820534d0f7c7) - -* Test that excluded sls files can't extend others - -(cherry picked from commit e91c1a608b3c016b2c30bf324e969cd097ddf776) - -* Ignore extend declarations from excluded sls files - -sls files that are excluded should not affect other sls files by -extending their states. Exclude statements are processed very late in -the state processing pipeline to ensure they are not overridden. By that -time, extend declarations are already processed. - -Luckily, it's not necessary to change much, during the extend -declarations processing it is easy to check if the sls file that -contains a given extend declaration is excluded. - -(cherry picked from commit 856b23c45dd3be78d8879a0b0c4aa6356afea3cf) ---- - changelog/62082.fixed | 1 + - salt/state.py | 19 +++ - .../unit/state/test_state_highstate.py | 152 +++++++++++++++++- - 3 files changed, 171 insertions(+), 1 deletion(-) - create mode 100644 changelog/62082.fixed - -diff --git a/changelog/62082.fixed b/changelog/62082.fixed -new file mode 100644 -index 0000000000..02e5f5ff40 ---- /dev/null -+++ b/changelog/62082.fixed -@@ -0,0 +1 @@ -+Ignore extend declarations in sls files that are excluded. -diff --git a/salt/state.py b/salt/state.py -index 316dcdec63..f5579fbb69 100644 ---- a/salt/state.py -+++ b/salt/state.py -@@ -1680,6 +1680,25 @@ class State: - else: - name = ids[0][0] - -+ sls_excludes = [] -+ # excluded sls are plain list items or dicts with an "sls" key -+ for exclude in high.get("__exclude__", []): -+ if isinstance(exclude, str): -+ sls_excludes.append(exclude) -+ elif exclude.get("sls"): -+ sls_excludes.append(exclude["sls"]) -+ -+ if body.get("__sls__") in sls_excludes: -+ log.debug( -+ "Cannot extend ID '%s' in '%s:%s' because '%s:%s' is excluded.", -+ name, -+ body.get("__env__", "base"), -+ body.get("__sls__", "base"), -+ body.get("__env__", "base"), -+ body.get("__sls__", "base"), -+ ) -+ continue -+ - for state, run in body.items(): - if state.startswith("__"): - continue -diff --git a/tests/pytests/unit/state/test_state_highstate.py b/tests/pytests/unit/state/test_state_highstate.py -index 059f83fd9f..7c72cc8e09 100644 ---- a/tests/pytests/unit/state/test_state_highstate.py -+++ b/tests/pytests/unit/state/test_state_highstate.py -@@ -3,9 +3,11 @@ - """ - - import logging -+import textwrap - - import pytest # pylint: disable=unused-import - import salt.state -+from salt.utils.odict import OrderedDict - - log = logging.getLogger(__name__) - -@@ -180,7 +182,7 @@ def test_find_sls_ids_with_exclude(highstate, state_tree_dir): - with pytest.helpers.temp_file( - "slsfile1.sls", slsfile1, sls_dir - ), pytest.helpers.temp_file( -- "slsfile1.sls", slsfile1, sls_dir -+ "slsfile2.sls", slsfile2, sls_dir - ), pytest.helpers.temp_file( - "stateB.sls", stateB, sls_dir - ), pytest.helpers.temp_file( -@@ -196,3 +198,151 @@ def test_find_sls_ids_with_exclude(highstate, state_tree_dir): - high, _ = highstate.render_highstate(matches) - ret = salt.state.find_sls_ids("issue-47182.stateA.newer", high) - assert ret == [("somestuff", "cmd")] -+ -+ -+def test_dont_extend_in_excluded_sls_file(highstate, state_tree_dir): -+ """ -+ See https://github.com/saltstack/salt/issues/62082#issuecomment-1245461333 -+ """ -+ top_sls = textwrap.dedent( -+ """\ -+ base: -+ '*': -+ - test1 -+ - exclude -+ """ -+ ) -+ exclude_sls = textwrap.dedent( -+ """\ -+ exclude: -+ - sls: test2 -+ """ -+ ) -+ test1_sls = textwrap.dedent( -+ """\ -+ include: -+ - test2 -+ -+ test1: -+ cmd.run: -+ - name: echo test1 -+ """ -+ ) -+ test2_sls = textwrap.dedent( -+ """\ -+ extend: -+ test1: -+ cmd.run: -+ - name: echo "override test1 in test2" -+ -+ test2-id: -+ cmd.run: -+ - name: echo test2 -+ """ -+ ) -+ sls_dir = str(state_tree_dir) -+ with pytest.helpers.temp_file( -+ "top.sls", top_sls, sls_dir -+ ), pytest.helpers.temp_file( -+ "test1.sls", test1_sls, sls_dir -+ ), pytest.helpers.temp_file( -+ "test2.sls", test2_sls, sls_dir -+ ), pytest.helpers.temp_file( -+ "exclude.sls", exclude_sls, sls_dir -+ ): -+ # manually compile the high data, error checking is not needed in this -+ # test case. -+ top = highstate.get_top() -+ matches = highstate.top_matches(top) -+ high, _ = highstate.render_highstate(matches) -+ -+ # high is mutated by call_high and the different "pipeline steps" -+ assert high == OrderedDict( -+ [ -+ ( -+ "__extend__", -+ [ -+ { -+ "test1": OrderedDict( -+ [ -+ ("__sls__", "test2"), -+ ("__env__", "base"), -+ ( -+ "cmd", -+ [ -+ OrderedDict( -+ [ -+ ( -+ "name", -+ 'echo "override test1 in test2"', -+ ) -+ ] -+ ), -+ "run", -+ ], -+ ), -+ ] -+ ) -+ } -+ ], -+ ), -+ ( -+ "test1", -+ OrderedDict( -+ [ -+ ( -+ "cmd", -+ [ -+ OrderedDict([("name", "echo test1")]), -+ "run", -+ {"order": 10001}, -+ ], -+ ), -+ ("__sls__", "test1"), -+ ("__env__", "base"), -+ ] -+ ), -+ ), -+ ( -+ "test2-id", -+ OrderedDict( -+ [ -+ ( -+ "cmd", -+ [ -+ OrderedDict([("name", "echo test2")]), -+ "run", -+ {"order": 10000}, -+ ], -+ ), -+ ("__sls__", "test2"), -+ ("__env__", "base"), -+ ] -+ ), -+ ), -+ ("__exclude__", [OrderedDict([("sls", "test2")])]), -+ ] -+ ) -+ highstate.state.call_high(high) -+ # assert that the extend declaration was not applied -+ assert high == OrderedDict( -+ [ -+ ( -+ "test1", -+ OrderedDict( -+ [ -+ ( -+ "cmd", -+ [ -+ OrderedDict([("name", "echo test1")]), -+ "run", -+ {"order": 10001}, -+ ], -+ ), -+ ("__sls__", "test1"), -+ ("__env__", "base"), -+ ] -+ ), -+ ) -+ ] -+ ) --- -2.37.3 - - diff --git a/ignore-non-utf8-characters-while-reading-files-with-.patch b/ignore-non-utf8-characters-while-reading-files-with-.patch index 4fb2242..65942b1 100644 --- a/ignore-non-utf8-characters-while-reading-files-with-.patch +++ b/ignore-non-utf8-characters-while-reading-files-with-.patch @@ -1,4 +1,4 @@ -From 3f1b1180ba34e9ab3a4453248c733f11aa193f1b Mon Sep 17 00:00:00 2001 +From b4945a0608b3d8996e8b5593dcc458c15b11d6ba Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Wed, 14 Sep 2022 14:57:29 +0300 Subject: [PATCH] Ignore non utf8 characters while reading files with @@ -32,10 +32,10 @@ index 0000000000..1ab74f9122 @@ -0,0 +1 @@ +Prevent possible tracebacks in core grains module by ignoring non utf8 characters in /proc/1/environ, /proc/1/cmdline, /proc/cmdline diff --git a/salt/grains/core.py b/salt/grains/core.py -index 047c33ffd3..76f3767ddf 100644 +index 9530a43fc5..b543144da2 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py -@@ -1089,7 +1089,9 @@ def _virtual(osdata): +@@ -1093,7 +1093,9 @@ def _virtual(osdata): if ("virtual_subtype" not in grains) or (grains["virtual_subtype"] != "LXC"): if os.path.isfile("/proc/1/environ"): try: @@ -46,7 +46,7 @@ index 047c33ffd3..76f3767ddf 100644 fhr_contents = fhr.read() if "container=lxc" in fhr_contents: grains["virtual"] = "container" -@@ -1909,7 +1911,9 @@ def os_data(): +@@ -1911,7 +1913,9 @@ def os_data(): grains["init"] = "systemd" except OSError: try: @@ -57,7 +57,7 @@ index 047c33ffd3..76f3767ddf 100644 init_cmdline = fhr.read().replace("\x00", " ").split() except OSError: pass -@@ -3154,7 +3158,9 @@ def kernelparams(): +@@ -3160,7 +3164,9 @@ def kernelparams(): return {} else: try: @@ -69,7 +69,7 @@ index 047c33ffd3..76f3767ddf 100644 grains = {"kernelparams": []} for data in [ diff --git a/tests/pytests/unit/grains/test_core.py b/tests/pytests/unit/grains/test_core.py -index 7c4ea1f17f..c06cdb2db0 100644 +index 84dd97d62f..e640a07f76 100644 --- a/tests/pytests/unit/grains/test_core.py +++ b/tests/pytests/unit/grains/test_core.py @@ -11,6 +11,7 @@ import os @@ -80,7 +80,7 @@ index 7c4ea1f17f..c06cdb2db0 100644 import textwrap from collections import namedtuple -@@ -2738,6 +2739,38 @@ def test_kernelparams_return_linux(cmdline, expectation): +@@ -2635,6 +2636,38 @@ def test_kernelparams_return_linux(cmdline, expectation): assert core.kernelparams() == expectation @@ -119,7 +119,7 @@ index 7c4ea1f17f..c06cdb2db0 100644 def test_linux_gpus(): """ Test GPU detection on Linux systems -@@ -2940,3 +2973,88 @@ def test_virtual_set_virtual_ec2(): +@@ -2837,3 +2870,88 @@ def test_virtual_set_virtual_ec2(): assert virtual_grains["virtual"] == "kvm" assert "virtual_subtype" not in virtual_grains @@ -211,4 +211,3 @@ index 7c4ea1f17f..c06cdb2db0 100644 -- 2.37.3 - diff --git a/implementation-of-held-unheld-functions-for-state-pk.patch b/implementation-of-held-unheld-functions-for-state-pk.patch new file mode 100644 index 0000000..0b834d5 --- /dev/null +++ b/implementation-of-held-unheld-functions-for-state-pk.patch @@ -0,0 +1,813 @@ +From 8e5295ef9047a9afdd2323508c633ab0356ef603 Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Wed, 19 Jan 2022 15:34:24 +0100 +Subject: [PATCH] Implementation of held/unheld functions for state pkg + (#387) + +* Implementation of held/unheld functions for state pkg +--- + salt/modules/zypperpkg.py | 119 ++++++- + salt/states/pkg.py | 310 +++++++++++++++++++ + tests/pytests/unit/modules/test_zypperpkg.py | 133 ++++++++ + tests/pytests/unit/states/test_pkg.py | 137 ++++++++ + 4 files changed, 686 insertions(+), 13 deletions(-) + +diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py +index 4fc045c313..ac6c36a09f 100644 +--- a/salt/modules/zypperpkg.py ++++ b/salt/modules/zypperpkg.py +@@ -2103,6 +2103,76 @@ def purge( + return _uninstall(inclusion_detection, name=name, pkgs=pkgs, root=root) + + ++def list_holds(pattern=None, full=True, root=None, **kwargs): ++ """ ++ List information on locked packages. ++ ++ .. note:: ++ This function returns the computed output of ``list_locks`` ++ to show exact locked packages. ++ ++ pattern ++ Regular expression used to match the package name ++ ++ full : True ++ Show the full hold definition including version and epoch. Set to ++ ``False`` to return just the name of the package(s) being held. ++ ++ root ++ Operate on a different root directory. ++ ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' pkg.list_holds ++ salt '*' pkg.list_holds full=False ++ """ ++ locks = list_locks(root=root) ++ ret = [] ++ inst_pkgs = {} ++ for solv_name, lock in locks.items(): ++ if lock.get("type", "package") != "package": ++ continue ++ try: ++ found_pkgs = search( ++ solv_name, ++ root=root, ++ match=None if "*" in solv_name else "exact", ++ case_sensitive=(lock.get("case_sensitive", "on") == "on"), ++ installed_only=True, ++ details=True, ++ all_versions=True, ++ ignore_no_matching_item=True, ++ ) ++ except CommandExecutionError: ++ continue ++ if found_pkgs: ++ for pkg in found_pkgs: ++ if pkg not in inst_pkgs: ++ inst_pkgs.update( ++ info_installed( ++ pkg, root=root, attr="edition,epoch", all_versions=True ++ ) ++ ) ++ ++ ptrn_re = re.compile(r"{}-\S+".format(pattern)) if pattern else None ++ for pkg_name, pkg_editions in inst_pkgs.items(): ++ for pkg_info in pkg_editions: ++ pkg_ret = ( ++ "{}-{}:{}.*".format( ++ pkg_name, pkg_info.get("epoch", 0), pkg_info.get("edition") ++ ) ++ if full ++ else pkg_name ++ ) ++ if pkg_ret not in ret and (not ptrn_re or ptrn_re.match(pkg_ret)): ++ ret.append(pkg_ret) ++ ++ return ret ++ ++ + def list_locks(root=None): + """ + List current package locks. +@@ -2173,7 +2243,7 @@ def clean_locks(root=None): + return out + + +-def unhold(name=None, pkgs=None, **kwargs): ++def unhold(name=None, pkgs=None, root=None, **kwargs): + """ + .. versionadded:: 3003 + +@@ -2187,6 +2257,9 @@ def unhold(name=None, pkgs=None, **kwargs): + A list of packages to unhold. The ``name`` parameter will be ignored if + this option is passed. + ++ root ++ Operate on a different root directory. ++ + CLI Example: + + .. code-block:: bash +@@ -2201,24 +2274,38 @@ def unhold(name=None, pkgs=None, **kwargs): + + targets = [] + if pkgs: +- for pkg in salt.utils.data.repack_dictlist(pkgs): +- targets.append(pkg) ++ targets.extend(pkgs) + else: + targets.append(name) + + locks = list_locks() + removed = [] +- missing = [] + + for target in targets: ++ version = None ++ if isinstance(target, dict): ++ (target, version) = next(iter(target.items())) + ret[target] = {"name": target, "changes": {}, "result": True, "comment": ""} + if locks.get(target): +- removed.append(target) +- ret[target]["changes"]["new"] = "" +- ret[target]["changes"]["old"] = "hold" +- ret[target]["comment"] = "Package {} is no longer held.".format(target) ++ lock_ver = None ++ if "version" in locks.get(target): ++ lock_ver = locks.get(target)["version"] ++ lock_ver = lock_ver.lstrip("= ") ++ if version and lock_ver != version: ++ ret[target]["result"] = False ++ ret[target][ ++ "comment" ++ ] = "Unable to unhold package {} as it is held with the other version.".format( ++ target ++ ) ++ else: ++ removed.append( ++ target if not lock_ver else "{}={}".format(target, lock_ver) ++ ) ++ ret[target]["changes"]["new"] = "" ++ ret[target]["changes"]["old"] = "hold" ++ ret[target]["comment"] = "Package {} is no longer held.".format(target) + else: +- missing.append(target) + ret[target]["comment"] = "Package {} was already unheld.".format(target) + + if removed: +@@ -2271,7 +2358,7 @@ def remove_lock(name, root=None, **kwargs): + return {"removed": len(removed), "not_found": missing} + + +-def hold(name=None, pkgs=None, **kwargs): ++def hold(name=None, pkgs=None, root=None, **kwargs): + """ + .. versionadded:: 3003 + +@@ -2285,6 +2372,10 @@ def hold(name=None, pkgs=None, **kwargs): + A list of packages to hold. The ``name`` parameter will be ignored if + this option is passed. + ++ root ++ Operate on a different root directory. ++ ++ + CLI Example: + + .. code-block:: bash +@@ -2299,8 +2390,7 @@ def hold(name=None, pkgs=None, **kwargs): + + targets = [] + if pkgs: +- for pkg in salt.utils.data.repack_dictlist(pkgs): +- targets.append(pkg) ++ targets.extend(pkgs) + else: + targets.append(name) + +@@ -2308,9 +2398,12 @@ def hold(name=None, pkgs=None, **kwargs): + added = [] + + for target in targets: ++ version = None ++ if isinstance(target, dict): ++ (target, version) = next(iter(target.items())) + ret[target] = {"name": target, "changes": {}, "result": True, "comment": ""} + if not locks.get(target): +- added.append(target) ++ added.append(target if not version else "{}={}".format(target, version)) + ret[target]["changes"]["new"] = "hold" + ret[target]["changes"]["old"] = "" + ret[target]["comment"] = "Package {} is now being held.".format(target) +diff --git a/salt/states/pkg.py b/salt/states/pkg.py +index f71f61e720..0d601e1aaf 100644 +--- a/salt/states/pkg.py ++++ b/salt/states/pkg.py +@@ -3644,3 +3644,313 @@ def mod_beacon(name, **kwargs): + ), + "result": False, + } ++ ++ ++def held(name, version=None, pkgs=None, replace=False, **kwargs): ++ """ ++ Set package in 'hold' state, meaning it will not be changed. ++ ++ :param str name: ++ The name of the package to be held. This parameter is ignored ++ if ``pkgs`` is used. ++ ++ :param str version: ++ Hold a specific version of a package. ++ Full description of this parameter is in `installed` function. ++ ++ .. note:: ++ ++ This parameter make sense for Zypper-based systems. ++ Ignored for YUM/DNF and APT ++ ++ :param list pkgs: ++ A list of packages to be held. All packages listed under ``pkgs`` ++ will be held. ++ ++ .. code-block:: yaml ++ ++ mypkgs: ++ pkg.held: ++ - pkgs: ++ - foo ++ - bar: 1.2.3-4 ++ - baz ++ ++ .. note:: ++ ++ For Zypper-based systems the package could be held for ++ the version specified. YUM/DNF and APT ingore it. ++ ++ :param bool replace: ++ Force replacement of existings holds with specified. ++ By default, this parameter is set to ``False``. ++ """ ++ ++ if isinstance(pkgs, list) and len(pkgs) == 0 and not replace: ++ return { ++ "name": name, ++ "changes": {}, ++ "result": True, ++ "comment": "No packages to be held provided", ++ } ++ ++ # If just a name (and optionally a version) is passed, just pack them into ++ # the pkgs argument. ++ if name and pkgs is None: ++ if version: ++ pkgs = [{name: version}] ++ version = None ++ else: ++ pkgs = [name] ++ ++ locks = {} ++ vr_lock = False ++ if "pkg.list_locks" in __salt__: ++ locks = __salt__["pkg.list_locks"]() ++ vr_lock = True ++ elif "pkg.list_holds" in __salt__: ++ _locks = __salt__["pkg.list_holds"](full=True) ++ lock_re = re.compile(r"^(.+)-(\d+):(.*)\.\*") ++ for lock in _locks: ++ match = lock_re.match(lock) ++ if match: ++ epoch = match.group(2) ++ if epoch == "0": ++ epoch = "" ++ else: ++ epoch = "{}:".format(epoch) ++ locks.update( ++ {match.group(1): {"version": "{}{}".format(epoch, match.group(3))}} ++ ) ++ else: ++ locks.update({lock: {}}) ++ elif "pkg.get_selections" in __salt__: ++ _locks = __salt__["pkg.get_selections"](state="hold") ++ for lock in _locks.get("hold", []): ++ locks.update({lock: {}}) ++ else: ++ return { ++ "name": name, ++ "changes": {}, ++ "result": False, ++ "comment": "No any function to get the list of held packages available.\n" ++ "Check if the package manager supports package locking.", ++ } ++ ++ if "pkg.hold" not in __salt__: ++ return { ++ "name": name, ++ "changes": {}, ++ "result": False, ++ "comment": "`hold` function is not implemented for the package manager.", ++ } ++ ++ ret = {"name": name, "changes": {}, "result": True, "comment": ""} ++ comments = [] ++ ++ held_pkgs = set() ++ for pkg in pkgs: ++ if isinstance(pkg, dict): ++ (pkg_name, pkg_ver) = next(iter(pkg.items())) ++ else: ++ pkg_name = pkg ++ pkg_ver = None ++ lock_ver = None ++ if pkg_name in locks and "version" in locks[pkg_name]: ++ lock_ver = locks[pkg_name]["version"] ++ lock_ver = lock_ver.lstrip("= ") ++ held_pkgs.add(pkg_name) ++ if pkg_name not in locks or (vr_lock and lock_ver != pkg_ver): ++ if __opts__["test"]: ++ if pkg_name in locks: ++ comments.append( ++ "The following package's hold rule would be updated: {}{}".format( ++ pkg_name, ++ "" if not pkg_ver else " (version = {})".format(pkg_ver), ++ ) ++ ) ++ else: ++ comments.append( ++ "The following package would be held: {}{}".format( ++ pkg_name, ++ "" if not pkg_ver else " (version = {})".format(pkg_ver), ++ ) ++ ) ++ else: ++ unhold_ret = None ++ if pkg_name in locks: ++ unhold_ret = __salt__["pkg.unhold"](name=name, pkgs=[pkg_name]) ++ hold_ret = __salt__["pkg.hold"](name=name, pkgs=[pkg]) ++ if not hold_ret.get(pkg_name, {}).get("result", False): ++ ret["result"] = False ++ if ( ++ unhold_ret ++ and unhold_ret.get(pkg_name, {}).get("result", False) ++ and hold_ret ++ and hold_ret.get(pkg_name, {}).get("result", False) ++ ): ++ comments.append( ++ "Package {} was updated with hold rule".format(pkg_name) ++ ) ++ elif hold_ret and hold_ret.get(pkg_name, {}).get("result", False): ++ comments.append("Package {} is now being held".format(pkg_name)) ++ else: ++ comments.append("Package {} was not held".format(pkg_name)) ++ ret["changes"].update(hold_ret) ++ ++ if replace: ++ for pkg_name in locks: ++ if locks[pkg_name].get("type", "package") != "package": ++ continue ++ if __opts__["test"]: ++ if pkg_name not in held_pkgs: ++ comments.append( ++ "The following package would be unheld: {}".format(pkg_name) ++ ) ++ else: ++ if pkg_name not in held_pkgs: ++ unhold_ret = __salt__["pkg.unhold"](name=name, pkgs=[pkg_name]) ++ if not unhold_ret.get(pkg_name, {}).get("result", False): ++ ret["result"] = False ++ if unhold_ret and unhold_ret.get(pkg_name, {}).get("comment"): ++ comments.append(unhold_ret.get(pkg_name).get("comment")) ++ ret["changes"].update(unhold_ret) ++ ++ ret["comment"] = "\n".join(comments) ++ if not (ret["changes"] or ret["comment"]): ++ ret["comment"] = "No changes made" ++ ++ return ret ++ ++ ++def unheld(name, version=None, pkgs=None, all=False, **kwargs): ++ """ ++ Unset package from 'hold' state, to allow operations with the package. ++ ++ :param str name: ++ The name of the package to be unheld. This parameter is ignored if "pkgs" ++ is used. ++ ++ :param str version: ++ Unhold a specific version of a package. ++ Full description of this parameter is in `installed` function. ++ ++ .. note:: ++ ++ This parameter make sense for Zypper-based systems. ++ Ignored for YUM/DNF and APT. ++ ++ :param list pkgs: ++ A list of packages to be unheld. All packages listed under ``pkgs`` ++ will be unheld. ++ ++ .. code-block:: yaml ++ ++ mypkgs: ++ pkg.unheld: ++ - pkgs: ++ - foo ++ - bar: 1.2.3-4 ++ - baz ++ ++ .. note:: ++ ++ For Zypper-based systems the package could be held for ++ the version specified. YUM/DNF and APT ingore it. ++ For ``unheld`` there is no need to specify the exact version ++ to be unheld. ++ ++ :param bool all: ++ Force removing of all existings locks. ++ By default, this parameter is set to ``False``. ++ """ ++ ++ if isinstance(pkgs, list) and len(pkgs) == 0 and not all: ++ return { ++ "name": name, ++ "changes": {}, ++ "result": True, ++ "comment": "No packages to be unheld provided", ++ } ++ ++ # If just a name (and optionally a version) is passed, just pack them into ++ # the pkgs argument. ++ if name and pkgs is None: ++ pkgs = [{name: version}] ++ version = None ++ ++ locks = {} ++ vr_lock = False ++ if "pkg.list_locks" in __salt__: ++ locks = __salt__["pkg.list_locks"]() ++ vr_lock = True ++ elif "pkg.list_holds" in __salt__: ++ _locks = __salt__["pkg.list_holds"](full=True) ++ lock_re = re.compile(r"^(.+)-(\d+):(.*)\.\*") ++ for lock in _locks: ++ match = lock_re.match(lock) ++ if match: ++ epoch = match.group(2) ++ if epoch == "0": ++ epoch = "" ++ else: ++ epoch = "{}:".format(epoch) ++ locks.update( ++ {match.group(1): {"version": "{}{}".format(epoch, match.group(3))}} ++ ) ++ else: ++ locks.update({lock: {}}) ++ elif "pkg.get_selections" in __salt__: ++ _locks = __salt__["pkg.get_selections"](state="hold") ++ for lock in _locks.get("hold", []): ++ locks.update({lock: {}}) ++ else: ++ return { ++ "name": name, ++ "changes": {}, ++ "result": False, ++ "comment": "No any function to get the list of held packages available.\n" ++ "Check if the package manager supports package locking.", ++ } ++ ++ dpkgs = {} ++ for pkg in pkgs: ++ if isinstance(pkg, dict): ++ (pkg_name, pkg_ver) = next(iter(pkg.items())) ++ dpkgs.update({pkg_name: pkg_ver}) ++ else: ++ dpkgs.update({pkg: None}) ++ ++ ret = {"name": name, "changes": {}, "result": True, "comment": ""} ++ comments = [] ++ ++ for pkg_name in locks: ++ if locks[pkg_name].get("type", "package") != "package": ++ continue ++ lock_ver = None ++ if vr_lock and "version" in locks[pkg_name]: ++ lock_ver = locks[pkg_name]["version"] ++ lock_ver = lock_ver.lstrip("= ") ++ if all or (pkg_name in dpkgs and (not lock_ver or lock_ver == dpkgs[pkg_name])): ++ if __opts__["test"]: ++ comments.append( ++ "The following package would be unheld: {}{}".format( ++ pkg_name, ++ "" ++ if not dpkgs.get(pkg_name) ++ else " (version = {})".format(lock_ver), ++ ) ++ ) ++ else: ++ unhold_ret = __salt__["pkg.unhold"](name=name, pkgs=[pkg_name]) ++ if not unhold_ret.get(pkg_name, {}).get("result", False): ++ ret["result"] = False ++ if unhold_ret and unhold_ret.get(pkg_name, {}).get("comment"): ++ comments.append(unhold_ret.get(pkg_name).get("comment")) ++ ret["changes"].update(unhold_ret) ++ ++ ret["comment"] = "\n".join(comments) ++ if not (ret["changes"] or ret["comment"]): ++ ret["comment"] = "No changes made" ++ ++ return ret +diff --git a/tests/pytests/unit/modules/test_zypperpkg.py b/tests/pytests/unit/modules/test_zypperpkg.py +index eb1e63f6d7..bfc1558c9a 100644 +--- a/tests/pytests/unit/modules/test_zypperpkg.py ++++ b/tests/pytests/unit/modules/test_zypperpkg.py +@@ -121,3 +121,136 @@ def test_del_repo_key(): + with patch.dict(zypper.__salt__, salt_mock): + assert zypper.del_repo_key(keyid="keyid", root="/mnt") + salt_mock["lowpkg.remove_gpg_key"].assert_called_once_with("keyid", "/mnt") ++ ++ ++def test_pkg_hold(): ++ """ ++ Tests holding packages with Zypper ++ """ ++ ++ # Test openSUSE 15.3 ++ list_locks_mock = { ++ "bar": {"type": "package", "match_type": "glob", "case_sensitive": "on"}, ++ "minimal_base": { ++ "type": "pattern", ++ "match_type": "glob", ++ "case_sensitive": "on", ++ }, ++ "baz": {"type": "package", "match_type": "glob", "case_sensitive": "on"}, ++ } ++ ++ cmd = MagicMock( ++ return_value={ ++ "pid": 1234, ++ "retcode": 0, ++ "stdout": "Specified lock has been successfully added.", ++ "stderr": "", ++ } ++ ) ++ with patch.object( ++ zypper, "list_locks", MagicMock(return_value=list_locks_mock) ++ ), patch.dict(zypper.__salt__, {"cmd.run_all": cmd}): ++ ret = zypper.hold("foo") ++ assert ret["foo"]["changes"]["old"] == "" ++ assert ret["foo"]["changes"]["new"] == "hold" ++ assert ret["foo"]["comment"] == "Package foo is now being held." ++ cmd.assert_called_once_with( ++ ["zypper", "--non-interactive", "--no-refresh", "al", "foo"], ++ env={}, ++ output_loglevel="trace", ++ python_shell=False, ++ ) ++ cmd.reset_mock() ++ ret = zypper.hold(pkgs=["foo", "bar"]) ++ assert ret["foo"]["changes"]["old"] == "" ++ assert ret["foo"]["changes"]["new"] == "hold" ++ assert ret["foo"]["comment"] == "Package foo is now being held." ++ assert ret["bar"]["changes"] == {} ++ assert ret["bar"]["comment"] == "Package bar is already set to be held." ++ cmd.assert_called_once_with( ++ ["zypper", "--non-interactive", "--no-refresh", "al", "foo"], ++ env={}, ++ output_loglevel="trace", ++ python_shell=False, ++ ) ++ ++ ++def test_pkg_unhold(): ++ """ ++ Tests unholding packages with Zypper ++ """ ++ ++ # Test openSUSE 15.3 ++ list_locks_mock = { ++ "bar": {"type": "package", "match_type": "glob", "case_sensitive": "on"}, ++ "minimal_base": { ++ "type": "pattern", ++ "match_type": "glob", ++ "case_sensitive": "on", ++ }, ++ "baz": {"type": "package", "match_type": "glob", "case_sensitive": "on"}, ++ } ++ ++ cmd = MagicMock( ++ return_value={ ++ "pid": 1234, ++ "retcode": 0, ++ "stdout": "1 lock has been successfully removed.", ++ "stderr": "", ++ } ++ ) ++ with patch.object( ++ zypper, "list_locks", MagicMock(return_value=list_locks_mock) ++ ), patch.dict(zypper.__salt__, {"cmd.run_all": cmd}): ++ ret = zypper.unhold("foo") ++ assert ret["foo"]["comment"] == "Package foo was already unheld." ++ cmd.assert_not_called() ++ cmd.reset_mock() ++ ret = zypper.unhold(pkgs=["foo", "bar"]) ++ assert ret["foo"]["changes"] == {} ++ assert ret["foo"]["comment"] == "Package foo was already unheld." ++ assert ret["bar"]["changes"]["old"] == "hold" ++ assert ret["bar"]["changes"]["new"] == "" ++ assert ret["bar"]["comment"] == "Package bar is no longer held." ++ cmd.assert_called_once_with( ++ ["zypper", "--non-interactive", "--no-refresh", "rl", "bar"], ++ env={}, ++ output_loglevel="trace", ++ python_shell=False, ++ ) ++ ++ ++def test_pkg_list_holds(): ++ """ ++ Tests listing of calculated held packages with Zypper ++ """ ++ ++ # Test openSUSE 15.3 ++ list_locks_mock = { ++ "bar": {"type": "package", "match_type": "glob", "case_sensitive": "on"}, ++ "minimal_base": { ++ "type": "pattern", ++ "match_type": "glob", ++ "case_sensitive": "on", ++ }, ++ "baz": {"type": "package", "match_type": "glob", "case_sensitive": "on"}, ++ } ++ installed_pkgs = { ++ "foo": [{"edition": "1.2.3-1.1"}], ++ "bar": [{"edition": "2.3.4-2.1", "epoch": "2"}], ++ } ++ ++ def zypper_search_mock(name, *_args, **_kwargs): ++ if name in installed_pkgs: ++ return {name: installed_pkgs.get(name)} ++ ++ with patch.object( ++ zypper, "list_locks", MagicMock(return_value=list_locks_mock) ++ ), patch.object( ++ zypper, "search", MagicMock(side_effect=zypper_search_mock) ++ ), patch.object( ++ zypper, "info_installed", MagicMock(side_effect=zypper_search_mock) ++ ): ++ ret = zypper.list_holds() ++ assert len(ret) == 1 ++ assert "bar-2:2.3.4-2.1.*" in ret +diff --git a/tests/pytests/unit/states/test_pkg.py b/tests/pytests/unit/states/test_pkg.py +index 7e667d36fd..17b91bcb39 100644 +--- a/tests/pytests/unit/states/test_pkg.py ++++ b/tests/pytests/unit/states/test_pkg.py +@@ -578,3 +578,140 @@ def test_removed_purged_with_changes_test_true(list_pkgs, action): + ret = pkg_actions[action]("pkga", test=True) + assert ret["result"] is None + assert ret["changes"] == expected ++ ++ ++@pytest.mark.parametrize( ++ "package_manager", [("Zypper"), ("YUM/DNF"), ("APT")], ++) ++def test_held_unheld(package_manager): ++ """ ++ Test pkg.held and pkg.unheld with Zypper, YUM/DNF and APT ++ """ ++ ++ if package_manager == "Zypper": ++ list_holds_func = "pkg.list_locks" ++ list_holds_mock = MagicMock( ++ return_value={ ++ "bar": { ++ "type": "package", ++ "match_type": "glob", ++ "case_sensitive": "on", ++ }, ++ "minimal_base": { ++ "type": "pattern", ++ "match_type": "glob", ++ "case_sensitive": "on", ++ }, ++ "baz": { ++ "type": "package", ++ "match_type": "glob", ++ "case_sensitive": "on", ++ }, ++ } ++ ) ++ elif package_manager == "YUM/DNF": ++ list_holds_func = "pkg.list_holds" ++ list_holds_mock = MagicMock( ++ return_value=["bar-0:1.2.3-1.1.*", "baz-0:2.3.4-2.1.*"] ++ ) ++ elif package_manager == "APT": ++ list_holds_func = "pkg.get_selections" ++ list_holds_mock = MagicMock(return_value={"hold": ["bar", "baz"]}) ++ ++ def pkg_hold(name, pkgs=None, *_args, **__kwargs): ++ if name and pkgs is None: ++ pkgs = [name] ++ ret = {} ++ for pkg in pkgs: ++ ret.update( ++ { ++ pkg: { ++ "name": pkg, ++ "changes": {"new": "hold", "old": ""}, ++ "result": True, ++ "comment": "Package {} is now being held.".format(pkg), ++ } ++ } ++ ) ++ return ret ++ ++ def pkg_unhold(name, pkgs=None, *_args, **__kwargs): ++ if name and pkgs is None: ++ pkgs = [name] ++ ret = {} ++ for pkg in pkgs: ++ ret.update( ++ { ++ pkg: { ++ "name": pkg, ++ "changes": {"new": "", "old": "hold"}, ++ "result": True, ++ "comment": "Package {} is no longer held.".format(pkg), ++ } ++ } ++ ) ++ return ret ++ ++ hold_mock = MagicMock(side_effect=pkg_hold) ++ unhold_mock = MagicMock(side_effect=pkg_unhold) ++ ++ # Testing with Zypper ++ with patch.dict( ++ pkg.__salt__, ++ { ++ list_holds_func: list_holds_mock, ++ "pkg.hold": hold_mock, ++ "pkg.unhold": unhold_mock, ++ }, ++ ): ++ # Holding one of two packages ++ ret = pkg.held("held-test", pkgs=["foo", "bar"]) ++ assert "foo" in ret["changes"] ++ assert len(ret["changes"]) == 1 ++ hold_mock.assert_called_once_with(name="held-test", pkgs=["foo"]) ++ unhold_mock.assert_not_called() ++ ++ hold_mock.reset_mock() ++ unhold_mock.reset_mock() ++ ++ # Holding one of two packages and replacing all the rest held packages ++ ret = pkg.held("held-test", pkgs=["foo", "bar"], replace=True) ++ assert "foo" in ret["changes"] ++ assert "baz" in ret["changes"] ++ assert len(ret["changes"]) == 2 ++ hold_mock.assert_called_once_with(name="held-test", pkgs=["foo"]) ++ unhold_mock.assert_called_once_with(name="held-test", pkgs=["baz"]) ++ ++ hold_mock.reset_mock() ++ unhold_mock.reset_mock() ++ ++ # Remove all holds ++ ret = pkg.held("held-test", pkgs=[], replace=True) ++ assert "bar" in ret["changes"] ++ assert "baz" in ret["changes"] ++ assert len(ret["changes"]) == 2 ++ hold_mock.assert_not_called() ++ unhold_mock.assert_any_call(name="held-test", pkgs=["baz"]) ++ unhold_mock.assert_any_call(name="held-test", pkgs=["bar"]) ++ ++ hold_mock.reset_mock() ++ unhold_mock.reset_mock() ++ ++ # Unolding one of two packages ++ ret = pkg.unheld("held-test", pkgs=["foo", "bar"]) ++ assert "bar" in ret["changes"] ++ assert len(ret["changes"]) == 1 ++ unhold_mock.assert_called_once_with(name="held-test", pkgs=["bar"]) ++ hold_mock.assert_not_called() ++ ++ hold_mock.reset_mock() ++ unhold_mock.reset_mock() ++ ++ # Remove all holds ++ ret = pkg.unheld("held-test", all=True) ++ assert "bar" in ret["changes"] ++ assert "baz" in ret["changes"] ++ assert len(ret["changes"]) == 2 ++ hold_mock.assert_not_called() ++ unhold_mock.assert_any_call(name="held-test", pkgs=["baz"]) ++ unhold_mock.assert_any_call(name="held-test", pkgs=["bar"]) +-- +2.34.1 + + diff --git a/implementation-of-suse_ip-execution-module-bsc-10999.patch b/implementation-of-suse_ip-execution-module-bsc-10999.patch new file mode 100644 index 0000000..330b543 --- /dev/null +++ b/implementation-of-suse_ip-execution-module-bsc-10999.patch @@ -0,0 +1,1360 @@ +From ebf90aaad969a61708673a9681d0d534134e16f8 Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> +Date: Thu, 18 Feb 2021 15:56:01 +0300 +Subject: [PATCH] Implementation of suse_ip execution module + (bsc#1099976) (#323) + +--- + salt/modules/linux_ip.py | 2 + + salt/modules/rh_ip.py | 2 +- + salt/modules/suse_ip.py | 1151 ++++++++++++++++++++++++++ + salt/states/network.py | 28 +- + salt/templates/suse_ip/ifcfg.jinja | 34 + + salt/templates/suse_ip/ifroute.jinja | 8 + + salt/templates/suse_ip/network.jinja | 30 + + setup.py | 1 + + 8 files changed, 1248 insertions(+), 8 deletions(-) + create mode 100644 salt/modules/suse_ip.py + create mode 100644 salt/templates/suse_ip/ifcfg.jinja + create mode 100644 salt/templates/suse_ip/ifroute.jinja + create mode 100644 salt/templates/suse_ip/network.jinja + +diff --git a/salt/modules/linux_ip.py b/salt/modules/linux_ip.py +index bac0665de2..e7a268694d 100644 +--- a/salt/modules/linux_ip.py ++++ b/salt/modules/linux_ip.py +@@ -21,6 +21,8 @@ def __virtual__(): + """ + if salt.utils.platform.is_windows(): + return (False, "Module linux_ip: Windows systems are not supported.") ++ if __grains__['os_family'] == "Suse": ++ return (False, "Module linux_ip: SUSE systems are not supported.") + if __grains__["os_family"] == "RedHat": + return (False, "Module linux_ip: RedHat systems are not supported.") + if __grains__["os_family"] == "Debian": +diff --git a/salt/modules/rh_ip.py b/salt/modules/rh_ip.py +index d3bab3a1f8..790241a82e 100644 +--- a/salt/modules/rh_ip.py ++++ b/salt/modules/rh_ip.py +@@ -551,7 +551,7 @@ def _parse_settings_eth(opts, iface_type, enabled, iface): + """ + result = {"name": iface} + if "proto" in opts: +- valid = ["none", "bootp", "dhcp"] ++ valid = ["none", "static", "bootp", "dhcp"] + if opts["proto"] in valid: + result["proto"] = opts["proto"] + else: +diff --git a/salt/modules/suse_ip.py b/salt/modules/suse_ip.py +new file mode 100644 +index 0000000000..92dad50351 +--- /dev/null ++++ b/salt/modules/suse_ip.py +@@ -0,0 +1,1151 @@ ++# -*- coding: utf-8 -*- ++""" ++The networking module for SUSE based distros ++""" ++from __future__ import absolute_import, print_function, unicode_literals ++ ++# Import python libs ++import logging ++import os ++ ++# Import third party libs ++import jinja2 ++import jinja2.exceptions ++ ++# Import salt libs ++import salt.utils.files ++import salt.utils.stringutils ++import salt.utils.templates ++import salt.utils.validate.net ++from salt.exceptions import CommandExecutionError ++from salt.ext import six ++ ++# Set up logging ++log = logging.getLogger(__name__) ++ ++# Set up template environment ++JINJA = jinja2.Environment( ++ loader=jinja2.FileSystemLoader( ++ os.path.join(salt.utils.templates.TEMPLATE_DIRNAME, "suse_ip") ++ ) ++) ++ ++# Define the module's virtual name ++__virtualname__ = "ip" ++ ++# Default values for bonding ++_BOND_DEFAULTS = { ++ # 803.ad aggregation selection logic ++ # 0 for stable (default) ++ # 1 for bandwidth ++ # 2 for count ++ "ad_select": "0", ++ # Max number of transmit queues (default = 16) ++ "tx_queues": "16", ++ # lacp_rate 0: Slow - every 30 seconds ++ # lacp_rate 1: Fast - every 1 second ++ "lacp_rate": "0", ++ # Max bonds for this driver ++ "max_bonds": "1", ++ # Used with miimon. ++ # On: driver sends mii ++ # Off: ethtool sends mii ++ "use_carrier": "0", ++ # Default. Don't change unless you know what you are doing. ++ "xmit_hash_policy": "layer2", ++} ++_SUSE_NETWORK_SCRIPT_DIR = "/etc/sysconfig/network" ++_SUSE_NETWORK_FILE = "/etc/sysconfig/network/config" ++_SUSE_NETWORK_ROUTES_FILE = "/etc/sysconfig/network/routes" ++_CONFIG_TRUE = ("yes", "on", "true", "1", True) ++_CONFIG_FALSE = ("no", "off", "false", "0", False) ++_IFACE_TYPES = ( ++ "eth", ++ "bond", ++ "alias", ++ "clone", ++ "ipsec", ++ "dialup", ++ "bridge", ++ "slave", ++ "vlan", ++ "ipip", ++ "ib", ++) ++ ++ ++def __virtual__(): ++ """ ++ Confine this module to SUSE based distros ++ """ ++ if __grains__["os_family"] == "Suse": ++ return __virtualname__ ++ return ( ++ False, ++ "The suse_ip execution module cannot be loaded: this module is only available on SUSE based distributions.", ++ ) ++ ++ ++def _error_msg_iface(iface, option, expected): ++ """ ++ Build an appropriate error message from a given option and ++ a list of expected values. ++ """ ++ if isinstance(expected, six.string_types): ++ expected = (expected,) ++ msg = "Invalid option -- Interface: {0}, Option: {1}, Expected: [{2}]" ++ return msg.format(iface, option, "|".join(str(e) for e in expected)) ++ ++ ++def _error_msg_routes(iface, option, expected): ++ """ ++ Build an appropriate error message from a given option and ++ a list of expected values. ++ """ ++ msg = "Invalid option -- Route interface: {0}, Option: {1}, Expected: [{2}]" ++ return msg.format(iface, option, expected) ++ ++ ++def _log_default_iface(iface, opt, value): ++ log.info( ++ "Using default option -- Interface: %s Option: %s Value: %s", iface, opt, value ++ ) ++ ++ ++def _error_msg_network(option, expected): ++ """ ++ Build an appropriate error message from a given option and ++ a list of expected values. ++ """ ++ if isinstance(expected, six.string_types): ++ expected = (expected,) ++ msg = "Invalid network setting -- Setting: {0}, Expected: [{1}]" ++ return msg.format(option, "|".join(str(e) for e in expected)) ++ ++ ++def _log_default_network(opt, value): ++ log.info("Using existing setting -- Setting: %s Value: %s", opt, value) ++ ++ ++def _parse_suse_config(path): ++ suse_config = _read_file(path) ++ cv_suse_config = {} ++ if suse_config: ++ for line in suse_config: ++ line = line.strip() ++ if len(line) == 0 or line.startswith("!") or line.startswith("#"): ++ continue ++ pair = [p.rstrip() for p in line.split("=", 1)] ++ if len(pair) != 2: ++ continue ++ name, value = pair ++ cv_suse_config[name.upper()] = salt.utils.stringutils.dequote(value) ++ ++ return cv_suse_config ++ ++ ++def _parse_ethtool_opts(opts, iface): ++ """ ++ Filters given options and outputs valid settings for ETHTOOLS_OPTS ++ If an option has a value that is not expected, this ++ function will log what the Interface, Setting and what it was ++ expecting. ++ """ ++ config = {} ++ ++ if "autoneg" in opts: ++ if opts["autoneg"] in _CONFIG_TRUE: ++ config.update({"autoneg": "on"}) ++ elif opts["autoneg"] in _CONFIG_FALSE: ++ config.update({"autoneg": "off"}) ++ else: ++ _raise_error_iface(iface, "autoneg", _CONFIG_TRUE + _CONFIG_FALSE) ++ ++ if "duplex" in opts: ++ valid = ["full", "half"] ++ if opts["duplex"] in valid: ++ config.update({"duplex": opts["duplex"]}) ++ else: ++ _raise_error_iface(iface, "duplex", valid) ++ ++ if "speed" in opts: ++ valid = ["10", "100", "1000", "10000"] ++ if six.text_type(opts["speed"]) in valid: ++ config.update({"speed": opts["speed"]}) ++ else: ++ _raise_error_iface(iface, opts["speed"], valid) ++ ++ if "advertise" in opts: ++ valid = [ ++ "0x001", ++ "0x002", ++ "0x004", ++ "0x008", ++ "0x010", ++ "0x020", ++ "0x20000", ++ "0x8000", ++ "0x1000", ++ "0x40000", ++ "0x80000", ++ "0x200000", ++ "0x400000", ++ "0x800000", ++ "0x1000000", ++ "0x2000000", ++ "0x4000000", ++ ] ++ if six.text_type(opts["advertise"]) in valid: ++ config.update({"advertise": opts["advertise"]}) ++ else: ++ _raise_error_iface(iface, "advertise", valid) ++ ++ valid = _CONFIG_TRUE + _CONFIG_FALSE ++ for option in ("rx", "tx", "sg", "tso", "ufo", "gso", "gro", "lro"): ++ if option in opts: ++ if opts[option] in _CONFIG_TRUE: ++ config.update({option: "on"}) ++ elif opts[option] in _CONFIG_FALSE: ++ config.update({option: "off"}) ++ else: ++ _raise_error_iface(iface, option, valid) ++ ++ return config ++ ++ ++def _parse_settings_bond(opts, iface): ++ """ ++ Filters given options and outputs valid settings for requested ++ operation. If an option has a value that is not expected, this ++ function will log what the Interface, Setting and what it was ++ expecting. ++ """ ++ if opts["mode"] in ("balance-rr", "0"): ++ log.info("Device: %s Bonding Mode: load balancing (round-robin)", iface) ++ return _parse_settings_bond_0(opts, iface) ++ elif opts["mode"] in ("active-backup", "1"): ++ log.info("Device: %s Bonding Mode: fault-tolerance (active-backup)", iface) ++ return _parse_settings_bond_1(opts, iface) ++ elif opts["mode"] in ("balance-xor", "2"): ++ log.info("Device: %s Bonding Mode: load balancing (xor)", iface) ++ return _parse_settings_bond_2(opts, iface) ++ elif opts["mode"] in ("broadcast", "3"): ++ log.info("Device: %s Bonding Mode: fault-tolerance (broadcast)", iface) ++ return _parse_settings_bond_3(opts, iface) ++ elif opts["mode"] in ("802.3ad", "4"): ++ log.info( ++ "Device: %s Bonding Mode: IEEE 802.3ad Dynamic link " "aggregation", iface ++ ) ++ return _parse_settings_bond_4(opts, iface) ++ elif opts["mode"] in ("balance-tlb", "5"): ++ log.info("Device: %s Bonding Mode: transmit load balancing", iface) ++ return _parse_settings_bond_5(opts, iface) ++ elif opts["mode"] in ("balance-alb", "6"): ++ log.info("Device: %s Bonding Mode: adaptive load balancing", iface) ++ return _parse_settings_bond_6(opts, iface) ++ else: ++ valid = ( ++ "0", ++ "1", ++ "2", ++ "3", ++ "4", ++ "5", ++ "6", ++ "balance-rr", ++ "active-backup", ++ "balance-xor", ++ "broadcast", ++ "802.3ad", ++ "balance-tlb", ++ "balance-alb", ++ ) ++ _raise_error_iface(iface, "mode", valid) ++ ++ ++def _parse_settings_miimon(opts, iface): ++ """ ++ Add shared settings for miimon support used by balance-rr, balance-xor ++ bonding types. ++ """ ++ ret = {} ++ for binding in ("miimon", "downdelay", "updelay"): ++ if binding in opts: ++ try: ++ int(opts[binding]) ++ ret.update({binding: opts[binding]}) ++ except Exception: # pylint: disable=broad-except ++ _raise_error_iface(iface, binding, "integer") ++ ++ if "miimon" in opts: ++ if not opts["miimon"]: ++ _raise_error_iface(iface, "miimon", "nonzero integer") ++ ++ for binding in ("downdelay", "updelay"): ++ if binding in ret: ++ if ret[binding] % ret["miimon"]: ++ _raise_error_iface( ++ iface, ++ binding, ++ "0 or a multiple of miimon ({0})".format(ret["miimon"]), ++ ) ++ ++ if "use_carrier" in opts: ++ if opts["use_carrier"] in _CONFIG_TRUE: ++ ret.update({"use_carrier": "1"}) ++ elif opts["use_carrier"] in _CONFIG_FALSE: ++ ret.update({"use_carrier": "0"}) ++ else: ++ valid = _CONFIG_TRUE + _CONFIG_FALSE ++ _raise_error_iface(iface, "use_carrier", valid) ++ else: ++ _log_default_iface(iface, "use_carrier", _BOND_DEFAULTS["use_carrier"]) ++ ret.update({"use_carrier": _BOND_DEFAULTS["use_carrier"]}) ++ ++ return ret ++ ++ ++def _parse_settings_arp(opts, iface): ++ """ ++ Add shared settings for arp used by balance-rr, balance-xor bonding types. ++ """ ++ ret = {} ++ if "arp_interval" in opts: ++ try: ++ int(opts["arp_interval"]) ++ ret.update({"arp_interval": opts["arp_interval"]}) ++ except Exception: # pylint: disable=broad-except ++ _raise_error_iface(iface, "arp_interval", "integer") ++ ++ # ARP targets in n.n.n.n form ++ valid = "list of ips (up to 16)" ++ if "arp_ip_target" in opts: ++ if isinstance(opts["arp_ip_target"], list): ++ if 1 <= len(opts["arp_ip_target"]) <= 16: ++ ret.update({"arp_ip_target": ",".join(opts["arp_ip_target"])}) ++ else: ++ _raise_error_iface(iface, "arp_ip_target", valid) ++ else: ++ _raise_error_iface(iface, "arp_ip_target", valid) ++ else: ++ _raise_error_iface(iface, "arp_ip_target", valid) ++ ++ return ret ++ ++ ++def _parse_settings_bond_0(opts, iface): ++ """ ++ Filters given options and outputs valid settings for bond0. ++ If an option has a value that is not expected, this ++ function will log what the Interface, Setting and what it was ++ expecting. ++ """ ++ bond = {"mode": "0"} ++ bond.update(_parse_settings_miimon(opts, iface)) ++ bond.update(_parse_settings_arp(opts, iface)) ++ ++ if "miimon" not in opts and "arp_interval" not in opts: ++ _raise_error_iface( ++ iface, "miimon or arp_interval", "at least one of these is required" ++ ) ++ ++ return bond ++ ++ ++def _parse_settings_bond_1(opts, iface): ++ ++ """ ++ Filters given options and outputs valid settings for bond1. ++ If an option has a value that is not expected, this ++ function will log what the Interface, Setting and what it was ++ expecting. ++ """ ++ bond = {"mode": "1"} ++ bond.update(_parse_settings_miimon(opts, iface)) ++ ++ if "miimon" not in opts: ++ _raise_error_iface(iface, "miimon", "integer") ++ ++ if "primary" in opts: ++ bond.update({"primary": opts["primary"]}) ++ ++ return bond ++ ++ ++def _parse_settings_bond_2(opts, iface): ++ """ ++ Filters given options and outputs valid settings for bond2. ++ If an option has a value that is not expected, this ++ function will log what the Interface, Setting and what it was ++ expecting. ++ """ ++ bond = {"mode": "2"} ++ bond.update(_parse_settings_miimon(opts, iface)) ++ bond.update(_parse_settings_arp(opts, iface)) ++ ++ if "miimon" not in opts and "arp_interval" not in opts: ++ _raise_error_iface( ++ iface, "miimon or arp_interval", "at least one of these is required" ++ ) ++ ++ if "hashing-algorithm" in opts: ++ valid = ("layer2", "layer2+3", "layer3+4") ++ if opts["hashing-algorithm"] in valid: ++ bond.update({"xmit_hash_policy": opts["hashing-algorithm"]}) ++ else: ++ _raise_error_iface(iface, "hashing-algorithm", valid) ++ ++ return bond ++ ++ ++def _parse_settings_bond_3(opts, iface): ++ ++ """ ++ Filters given options and outputs valid settings for bond3. ++ If an option has a value that is not expected, this ++ function will log what the Interface, Setting and what it was ++ expecting. ++ """ ++ bond = {"mode": "3"} ++ bond.update(_parse_settings_miimon(opts, iface)) ++ ++ if "miimon" not in opts: ++ _raise_error_iface(iface, "miimon", "integer") ++ ++ return bond ++ ++ ++def _parse_settings_bond_4(opts, iface): ++ """ ++ Filters given options and outputs valid settings for bond4. ++ If an option has a value that is not expected, this ++ function will log what the Interface, Setting and what it was ++ expecting. ++ """ ++ bond = {"mode": "4"} ++ bond.update(_parse_settings_miimon(opts, iface)) ++ ++ if "miimon" not in opts: ++ _raise_error_iface(iface, "miimon", "integer") ++ ++ for binding in ("lacp_rate", "ad_select"): ++ if binding in opts: ++ if binding == "lacp_rate": ++ valid = ("fast", "1", "slow", "0") ++ if opts[binding] not in valid: ++ _raise_error_iface(iface, binding, valid) ++ if opts[binding] == "fast": ++ opts.update({binding: "1"}) ++ if opts[binding] == "slow": ++ opts.update({binding: "0"}) ++ else: ++ valid = "integer" ++ try: ++ int(opts[binding]) ++ bond.update({binding: opts[binding]}) ++ except Exception: # pylint: disable=broad-except ++ _raise_error_iface(iface, binding, valid) ++ else: ++ _log_default_iface(iface, binding, _BOND_DEFAULTS[binding]) ++ bond.update({binding: _BOND_DEFAULTS[binding]}) ++ ++ if "hashing-algorithm" in opts: ++ valid = ("layer2", "layer2+3", "layer3+4") ++ if opts["hashing-algorithm"] in valid: ++ bond.update({"xmit_hash_policy": opts["hashing-algorithm"]}) ++ else: ++ _raise_error_iface(iface, "hashing-algorithm", valid) ++ ++ return bond ++ ++ ++def _parse_settings_bond_5(opts, iface): ++ ++ """ ++ Filters given options and outputs valid settings for bond5. ++ If an option has a value that is not expected, this ++ function will log what the Interface, Setting and what it was ++ expecting. ++ """ ++ bond = {"mode": "5"} ++ bond.update(_parse_settings_miimon(opts, iface)) ++ ++ if "miimon" not in opts: ++ _raise_error_iface(iface, "miimon", "integer") ++ ++ if "primary" in opts: ++ bond.update({"primary": opts["primary"]}) ++ ++ return bond ++ ++ ++def _parse_settings_bond_6(opts, iface): ++ ++ """ ++ Filters given options and outputs valid settings for bond6. ++ If an option has a value that is not expected, this ++ function will log what the Interface, Setting and what it was ++ expecting. ++ """ ++ bond = {"mode": "6"} ++ bond.update(_parse_settings_miimon(opts, iface)) ++ ++ if "miimon" not in opts: ++ _raise_error_iface(iface, "miimon", "integer") ++ ++ if "primary" in opts: ++ bond.update({"primary": opts["primary"]}) ++ ++ return bond ++ ++ ++def _parse_settings_vlan(opts, iface): ++ ++ """ ++ Filters given options and outputs valid settings for a vlan ++ """ ++ vlan = {} ++ if "reorder_hdr" in opts: ++ if opts["reorder_hdr"] in _CONFIG_TRUE + _CONFIG_FALSE: ++ vlan.update({"reorder_hdr": opts["reorder_hdr"]}) ++ else: ++ valid = _CONFIG_TRUE + _CONFIG_FALSE ++ _raise_error_iface(iface, "reorder_hdr", valid) ++ ++ if "vlan_id" in opts: ++ if opts["vlan_id"] > 0: ++ vlan.update({"vlan_id": opts["vlan_id"]}) ++ else: ++ _raise_error_iface(iface, "vlan_id", "Positive integer") ++ ++ if "phys_dev" in opts: ++ if len(opts["phys_dev"]) > 0: ++ vlan.update({"phys_dev": opts["phys_dev"]}) ++ else: ++ _raise_error_iface(iface, "phys_dev", "Non-empty string") ++ ++ return vlan ++ ++ ++def _parse_settings_eth(opts, iface_type, enabled, iface): ++ """ ++ Filters given options and outputs valid settings for a ++ network interface. ++ """ ++ result = {"name": iface} ++ if "proto" in opts: ++ valid = ["static", "dhcp", "dhcp4", "dhcp6", "autoip", "dhcp+autoip", "auto6", "6to4", "none"] ++ if opts["proto"] in valid: ++ result["proto"] = opts["proto"] ++ else: ++ _raise_error_iface(iface, opts["proto"], valid) ++ ++ if "mtu" in opts: ++ try: ++ result["mtu"] = int(opts["mtu"]) ++ except ValueError: ++ _raise_error_iface(iface, "mtu", ["integer"]) ++ ++ if "hwaddr" in opts and "macaddr" in opts: ++ msg = "Cannot pass both hwaddr and macaddr. Must use either hwaddr or macaddr" ++ log.error(msg) ++ raise AttributeError(msg) ++ ++ if iface_type not in ("bridge",): ++ ethtool = _parse_ethtool_opts(opts, iface) ++ if ethtool: ++ result["ethtool"] = " ".join( ++ ["{0} {1}".format(x, y) for x, y in ethtool.items()] ++ ) ++ ++ if iface_type == "slave": ++ result["proto"] = "none" ++ ++ ++ if iface_type == "bond": ++ if "mode" not in opts: ++ msg = "Missing required option 'mode'" ++ log.error("%s for bond interface '%s'", msg, iface) ++ raise AttributeError(msg) ++ bonding = _parse_settings_bond(opts, iface) ++ if bonding: ++ result["bonding"] = " ".join( ++ ["{0}={1}".format(x, y) for x, y in bonding.items()] ++ ) ++ result["devtype"] = "Bond" ++ if "slaves" in opts: ++ if isinstance(opts["slaves"], list): ++ result["slaves"] = opts["slaves"] ++ else: ++ result["slaves"] = opts["slaves"].split() ++ ++ if iface_type == "vlan": ++ vlan = _parse_settings_vlan(opts, iface) ++ if vlan: ++ result["devtype"] = "Vlan" ++ for opt in vlan: ++ result[opt] = opts[opt] ++ ++ if iface_type == "eth": ++ result["devtype"] = "Ethernet" ++ ++ if iface_type == "bridge": ++ result["devtype"] = "Bridge" ++ bypassfirewall = True ++ valid = _CONFIG_TRUE + _CONFIG_FALSE ++ for opt in ("bypassfirewall",): ++ if opt in opts: ++ if opts[opt] in _CONFIG_TRUE: ++ bypassfirewall = True ++ elif opts[opt] in _CONFIG_FALSE: ++ bypassfirewall = False ++ else: ++ _raise_error_iface(iface, opts[opt], valid) ++ ++ bridgectls = [ ++ "net.bridge.bridge-nf-call-ip6tables", ++ "net.bridge.bridge-nf-call-iptables", ++ "net.bridge.bridge-nf-call-arptables", ++ ] ++ ++ if bypassfirewall: ++ sysctl_value = 0 ++ else: ++ sysctl_value = 1 ++ ++ for sysctl in bridgectls: ++ try: ++ __salt__["sysctl.persist"](sysctl, sysctl_value) ++ except CommandExecutionError: ++ log.warning("Failed to set sysctl: %s", sysctl) ++ ++ else: ++ if "bridge" in opts: ++ result["bridge"] = opts["bridge"] ++ ++ if iface_type == "ipip": ++ result["devtype"] = "IPIP" ++ for opt in ("my_inner_ipaddr", "my_outer_ipaddr"): ++ if opt not in opts: ++ _raise_error_iface(iface, opt, "1.2.3.4") ++ else: ++ result[opt] = opts[opt] ++ if iface_type == "ib": ++ result["devtype"] = "InfiniBand" ++ ++ if "prefix" in opts: ++ if "netmask" in opts: ++ msg = "Cannot use prefix and netmask together" ++ log.error(msg) ++ raise AttributeError(msg) ++ result["prefix"] = opts["prefix"] ++ elif "netmask" in opts: ++ result["netmask"] = opts["netmask"] ++ ++ for opt in ( ++ "ipaddr", ++ "master", ++ "srcaddr", ++ "delay", ++ "domain", ++ "gateway", ++ "uuid", ++ "nickname", ++ "zone", ++ ): ++ if opt in opts: ++ result[opt] = opts[opt] ++ ++ if "ipaddrs" in opts or "ipv6addr" in opts or "ipv6addrs" in opts: ++ result["ipaddrs"] = [] ++ addrs = list ++ for opt in opts["ipaddrs"]: ++ if salt.utils.validate.net.ipv4_addr(opt) or salt.utils.validate.net.ipv6_addr(opt): ++ result['ipaddrs'].append(opt) ++ else: ++ msg = "{0} is invalid ipv4 or ipv6 CIDR" ++ log.error(msg) ++ raise AttributeError(msg) ++ if salt.utils.validate.net.ipv6_addr(opts["ipv6addr"]): ++ result['ipaddrs'].append(opts["ipv6addr"]) ++ else: ++ msg = "{0} is invalid ipv6 CIDR" ++ log.error(msg) ++ raise AttributeError(msg) ++ for opt in opts["ipv6addrs"]: ++ if salt.utils.validate.net.ipv6_addr(opt): ++ result['ipaddrs'].append(opt) ++ else: ++ msg = "{0} is invalid ipv6 CIDR" ++ log.error(msg) ++ raise AttributeError(msg) ++ ++ if "enable_ipv6" in opts: ++ result["enable_ipv6"] = opts["enable_ipv6"] ++ ++ valid = _CONFIG_TRUE + _CONFIG_FALSE ++ for opt in ( ++ "onparent", ++ "peerdns", ++ "peerroutes", ++ "slave", ++ "vlan", ++ "defroute", ++ "stp", ++ "ipv6_peerdns", ++ "ipv6_defroute", ++ "ipv6_peerroutes", ++ "ipv6_autoconf", ++ "ipv4_failure_fatal", ++ "dhcpv6c", ++ ): ++ if opt in opts: ++ if opts[opt] in _CONFIG_TRUE: ++ result[opt] = "yes" ++ elif opts[opt] in _CONFIG_FALSE: ++ result[opt] = "no" ++ else: ++ _raise_error_iface(iface, opts[opt], valid) ++ ++ if "onboot" in opts: ++ log.warning( ++ "The 'onboot' option is controlled by the 'enabled' option. " ++ "Interface: %s Enabled: %s", ++ iface, ++ enabled, ++ ) ++ ++ if "startmode" in opts: ++ valid = ("manual", "auto", "nfsroot", "hotplug", "off") ++ if opts["startmode"] in valid: ++ result["startmode"] = opts["startmode"] ++ else: ++ _raise_error_iface(iface, opts["startmode"], valid) ++ else: ++ if enabled: ++ result["startmode"] = "auto" ++ else: ++ result["startmode"] = "off" ++ ++ # This vlan is in opts, and should be only used in range interface ++ # will affect jinja template for interface generating ++ if "vlan" in opts: ++ if opts["vlan"] in _CONFIG_TRUE: ++ result["vlan"] = "yes" ++ elif opts["vlan"] in _CONFIG_FALSE: ++ result["vlan"] = "no" ++ else: ++ _raise_error_iface(iface, opts["vlan"], valid) ++ ++ if "arpcheck" in opts: ++ if opts["arpcheck"] in _CONFIG_FALSE: ++ result["arpcheck"] = "no" ++ ++ if "ipaddr_start" in opts: ++ result["ipaddr_start"] = opts["ipaddr_start"] ++ ++ if "ipaddr_end" in opts: ++ result["ipaddr_end"] = opts["ipaddr_end"] ++ ++ if "clonenum_start" in opts: ++ result["clonenum_start"] = opts["clonenum_start"] ++ ++ if "hwaddr" in opts: ++ result["hwaddr"] = opts["hwaddr"] ++ ++ if "macaddr" in opts: ++ result["macaddr"] = opts["macaddr"] ++ ++ # If NetworkManager is available, we can control whether we use ++ # it or not ++ if "nm_controlled" in opts: ++ if opts["nm_controlled"] in _CONFIG_TRUE: ++ result["nm_controlled"] = "yes" ++ elif opts["nm_controlled"] in _CONFIG_FALSE: ++ result["nm_controlled"] = "no" ++ else: ++ _raise_error_iface(iface, opts["nm_controlled"], valid) ++ else: ++ result["nm_controlled"] = "no" ++ ++ return result ++ ++ ++def _parse_routes(iface, opts): ++ """ ++ Filters given options and outputs valid settings for ++ the route settings file. ++ """ ++ # Normalize keys ++ opts = dict((k.lower(), v) for (k, v) in six.iteritems(opts)) ++ result = {} ++ if "routes" not in opts: ++ _raise_error_routes(iface, "routes", "List of routes") ++ ++ for opt in opts: ++ result[opt] = opts[opt] ++ ++ return result ++ ++ ++def _parse_network_settings(opts, current): ++ """ ++ Filters given options and outputs valid settings for ++ the global network settings file. ++ """ ++ # Normalize keys ++ opts = dict((k.lower(), v) for (k, v) in six.iteritems(opts)) ++ current = dict((k.lower(), v) for (k, v) in six.iteritems(current)) ++ ++ # Check for supported parameters ++ retain_settings = opts.get("retain_settings", False) ++ result = {} ++ if retain_settings: ++ for opt in current: ++ nopt = opt ++ if opt == "netconfig_dns_static_servers": ++ nopt = "dns" ++ result[nopt] = current[opt].split() ++ elif opt == "netconfig_dns_static_searchlist": ++ nopt = "dns_search" ++ result[nopt] = current[opt].split() ++ elif opt.startswith("netconfig_") and opt not in ("netconfig_modules_order", "netconfig_verbose", "netconfig_force_replace"): ++ nopt = opt[10:] ++ result[nopt] = current[opt] ++ else: ++ result[nopt] = current[opt] ++ _log_default_network(nopt, current[opt]) ++ ++ for opt in opts: ++ if opt in ("dns", "dns_search") and not isinstance(opts[opt], list): ++ result[opt] = opts[opt].split() ++ else: ++ result[opt] = opts[opt] ++ return result ++ ++ ++def _raise_error_iface(iface, option, expected): ++ """ ++ Log and raise an error with a logical formatted message. ++ """ ++ msg = _error_msg_iface(iface, option, expected) ++ log.error(msg) ++ raise AttributeError(msg) ++ ++ ++def _raise_error_network(option, expected): ++ """ ++ Log and raise an error with a logical formatted message. ++ """ ++ msg = _error_msg_network(option, expected) ++ log.error(msg) ++ raise AttributeError(msg) ++ ++ ++def _raise_error_routes(iface, option, expected): ++ """ ++ Log and raise an error with a logical formatted message. ++ """ ++ msg = _error_msg_routes(iface, option, expected) ++ log.error(msg) ++ raise AttributeError(msg) ++ ++ ++def _read_file(path): ++ """ ++ Reads and returns the contents of a file ++ """ ++ try: ++ with salt.utils.files.fopen(path, "rb") as rfh: ++ lines = salt.utils.stringutils.to_unicode(rfh.read()).splitlines() ++ try: ++ lines.remove("") ++ except ValueError: ++ pass ++ return lines ++ except Exception: # pylint: disable=broad-except ++ return [] # Return empty list for type consistency ++ ++ ++def _write_file_iface(iface, data, folder, pattern): ++ ''' ++ Writes a file to disk ++ ''' ++ filename = os.path.join(folder, pattern.format(iface)) ++ if not os.path.exists(folder): ++ msg = '{0} cannot be written. {1} does not exist' ++ msg = msg.format(filename, folder) ++ log.error(msg) ++ raise AttributeError(msg) ++ with salt.utils.files.fopen(filename, 'w') as fp_: ++ fp_.write(salt.utils.stringutils.to_str(data)) ++ ++ ++def _write_file_network(data, filename): ++ """ ++ Writes a file to disk ++ """ ++ with salt.utils.files.fopen(filename, "w") as fp_: ++ fp_.write(salt.utils.stringutils.to_str(data)) ++ ++ ++def _read_temp(data): ++ lines = data.splitlines() ++ try: # Discard newlines if they exist ++ lines.remove("") ++ except ValueError: ++ pass ++ return lines ++ ++ ++def build_interface(iface, iface_type, enabled, **settings): ++ """ ++ Build an interface script for a network interface. ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' ip.build_interface eth0 eth ++ """ ++ iface_type = iface_type.lower() ++ ++ if iface_type not in _IFACE_TYPES: ++ _raise_error_iface(iface, iface_type, _IFACE_TYPES) ++ ++ if iface_type == "slave": ++ settings["slave"] = "yes" ++ if "master" not in settings: ++ msg = "master is a required setting for slave interfaces" ++ log.error(msg) ++ raise AttributeError(msg) ++ ++ if iface_type == "bond": ++ if "mode" not in settings: ++ msg = "mode is required for bond interfaces" ++ log.error(msg) ++ raise AttributeError(msg) ++ settings["mode"] = str(settings["mode"]) ++ ++ if iface_type == "vlan": ++ settings["vlan"] = "yes" ++ ++ if iface_type == "bridge" and not __salt__["pkg.version"]("bridge-utils"): ++ __salt__["pkg.install"]("bridge-utils") ++ ++ if iface_type in ( ++ "eth", ++ "bond", ++ "bridge", ++ "slave", ++ "vlan", ++ "ipip", ++ "ib", ++ "alias", ++ ): ++ opts = _parse_settings_eth(settings, iface_type, enabled, iface) ++ try: ++ template = JINJA.get_template("ifcfg.jinja") ++ except jinja2.exceptions.TemplateNotFound: ++ log.error("Could not load template ifcfg.jinja") ++ return "" ++ log.debug("Interface opts: \n %s", opts) ++ ifcfg = template.render(opts) ++ ++ if settings.get("test"): ++ return _read_temp(ifcfg) ++ ++ _write_file_iface(iface, ifcfg, _SUSE_NETWORK_SCRIPT_DIR, "ifcfg-{0}") ++ path = os.path.join(_SUSE_NETWORK_SCRIPT_DIR, "ifcfg-{0}".format(iface)) ++ ++ return _read_file(path) ++ ++ ++def build_routes(iface, **settings): ++ """ ++ Build a route script for a network interface. ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' ip.build_routes eth0 ++ """ ++ ++ template = "ifroute.jinja" ++ log.debug("Template name: %s", template) ++ ++ opts = _parse_routes(iface, settings) ++ log.debug("Opts: \n %s", opts) ++ try: ++ template = JINJA.get_template(template) ++ except jinja2.exceptions.TemplateNotFound: ++ log.error("Could not load template %s", template) ++ return "" ++ log.debug("IP routes:\n%s", opts["routes"]) ++ ++ if iface == "routes": ++ routecfg = template.render(routes=opts["routes"]) ++ else: ++ routecfg = template.render(routes=opts["routes"], iface=iface) ++ ++ if settings["test"]: ++ return _read_temp(routecfg) ++ ++ if iface == "routes": ++ path = _SUSE_NETWORK_ROUTES_FILE ++ else: ++ path = os.path.join(_SUSE_NETWORK_SCRIPT_DIR, "ifroute-{0}".format(iface)) ++ ++ _write_file_network(routecfg, path) ++ ++ return _read_file(path) ++ ++ ++def down(iface, iface_type=None): ++ """ ++ Shutdown a network interface ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' ip.down eth0 ++ """ ++ # Slave devices are controlled by the master. ++ if not iface_type or iface_type.lower() != "slave": ++ return __salt__["cmd.run"]("ifdown {0}".format(iface)) ++ return None ++ ++ ++def get_interface(iface): ++ """ ++ Return the contents of an interface script ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' ip.get_interface eth0 ++ """ ++ path = os.path.join(_SUSE_NETWORK_SCRIPT_DIR, "ifcfg-{0}".format(iface)) ++ return _read_file(path) ++ ++ ++def up(iface, iface_type=None): ++ """ ++ Start up a network interface ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' ip.up eth0 ++ """ ++ # Slave devices are controlled by the master. ++ if not iface_type or iface_type.lower() != "slave": ++ return __salt__["cmd.run"]("ifup {0}".format(iface)) ++ return None ++ ++ ++def get_routes(iface): ++ """ ++ Return the contents of the interface routes script. ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' ip.get_routes eth0 ++ """ ++ if iface == "routes": ++ path = _SUSE_NETWORK_ROUTES_FILE ++ else: ++ path = os.path.join(_SUSE_NETWORK_SCRIPT_DIR, "ifroute-{0}".format(iface)) ++ return _read_file(path) ++ ++ ++def get_network_settings(): ++ """ ++ Return the contents of the global network script. ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' ip.get_network_settings ++ """ ++ return _read_file(_SUSE_NETWORK_FILE) ++ ++ ++def apply_network_settings(**settings): ++ """ ++ Apply global network configuration. ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' ip.apply_network_settings ++ """ ++ if "require_reboot" not in settings: ++ settings["require_reboot"] = False ++ ++ if "apply_hostname" not in settings: ++ settings["apply_hostname"] = False ++ ++ hostname_res = True ++ if settings["apply_hostname"] in _CONFIG_TRUE: ++ if "hostname" in settings: ++ hostname_res = __salt__["network.mod_hostname"](settings["hostname"]) ++ else: ++ log.warning( ++ "The network state sls is trying to apply hostname " ++ "changes but no hostname is defined." ++ ) ++ hostname_res = False ++ ++ res = True ++ if settings["require_reboot"] in _CONFIG_TRUE: ++ log.warning( ++ "The network state sls is requiring a reboot of the system to " ++ "properly apply network configuration." ++ ) ++ res = True ++ else: ++ res = __salt__["service.reload"]("network") ++ ++ return hostname_res and res ++ ++ ++def build_network_settings(**settings): ++ """ ++ Build the global network script. ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt '*' ip.build_network_settings ++ """ ++ # Read current configuration and store default values ++ current_network_settings = _parse_suse_config(_SUSE_NETWORK_FILE) ++ ++ # Build settings ++ opts = _parse_network_settings(settings, current_network_settings) ++ try: ++ template = JINJA.get_template("network.jinja") ++ except jinja2.exceptions.TemplateNotFound: ++ log.error("Could not load template network.jinja") ++ return "" ++ network = template.render(opts) ++ ++ if settings["test"]: ++ return _read_temp(network) ++ ++ # Write settings ++ _write_file_network(network, _SUSE_NETWORK_FILE) ++ ++ __salt__["cmd.run"]("netconfig update -f") ++ ++ return _read_file(_SUSE_NETWORK_FILE) +diff --git a/salt/states/network.py b/salt/states/network.py +index f20863113b..49d7857f1d 100644 +--- a/salt/states/network.py ++++ b/salt/states/network.py +@@ -504,6 +504,8 @@ def managed(name, enabled=True, **kwargs): + msg += " Update your SLS file to get rid of this warning." + ret.setdefault("warnings", []).append(msg) + ++ is_suse = (__grains__["os_family"] == "Suse") ++ + # Build interface + try: + old = __salt__["ip.get_interface"](name) +@@ -649,17 +651,29 @@ def managed(name, enabled=True, **kwargs): + present_slaves = __salt__["cmd.run"]( + ["cat", "/sys/class/net/{}/bonding/slaves".format(name)] + ).split() +- desired_slaves = kwargs["slaves"].split() ++ if isinstance(kwargs['slaves'], list): ++ desired_slaves = kwargs['slaves'] ++ else: ++ desired_slaves = kwargs['slaves'].split() + missing_slaves = set(desired_slaves) - set(present_slaves) + + # Enslave only slaves missing in master + if missing_slaves: +- ifenslave_path = __salt__["cmd.run"](["which", "ifenslave"]).strip() +- if ifenslave_path: +- log.info( +- "Adding slaves '%s' to the master %s", +- " ".join(missing_slaves), +- name, ++ log.debug("Missing slaves of {0}: {1}".format(name, missing_slaves)) ++ if not is_suse: ++ ifenslave_path = __salt__["cmd.run"](["which", "ifenslave"]).strip() ++ if ifenslave_path: ++ log.info( ++ "Adding slaves '%s' to the master %s", ++ " ".join(missing_slaves), ++ name, ++ ) ++ cmd = [ifenslave_path, name] + list(missing_slaves) ++ __salt__["cmd.run"](cmd, python_shell=False) ++ else: ++ log.error("Command 'ifenslave' not found") ++ ret["changes"]["enslave"] = "Added slaves '{0}' to master '{1}'".format( ++ " ".join(missing_slaves), name + ) + cmd = [ifenslave_path, name] + list(missing_slaves) + __salt__["cmd.run"](cmd, python_shell=False) +diff --git a/salt/templates/suse_ip/ifcfg.jinja b/salt/templates/suse_ip/ifcfg.jinja +new file mode 100644 +index 0000000000..8384d0eab7 +--- /dev/null ++++ b/salt/templates/suse_ip/ifcfg.jinja +@@ -0,0 +1,34 @@ ++{% if nickname %}NAME='{{nickname}}' ++{%endif%}{% if startmode %}STARTMODE='{{startmode}}' ++{%endif%}{% if proto %}BOOTPROTO='{{proto}}' ++{%endif%}{% if uuid %}UUID='{{uuid}}' ++{%endif%}{% if vlan %}VLAN='{{vlan}}' ++{%endif%}{% if team_config %}TEAM_CONFIG='{{team_config}}' ++{%endif%}{% if team_port_config %}TEAM_PORT_CONFIG='{{team_port_config}}' ++{%endif%}{% if team_master %}TEAM_MASTER='{{team_master}}' ++{%endif%}{% if ipaddr %}IPADDR='{{ipaddr}}' ++{%endif%}{% if netmask %}NETMASK='{{netmask}}' ++{%endif%}{% if prefix %}PREFIXLEN="{{prefix}}" ++{%endif%}{% if ipaddrs %}{% for i in ipaddrs -%} ++IPADDR{{loop.index}}='{{i}}' ++{% endfor -%} ++{%endif%}{% if clonenum_start %}CLONENUM_START="{{clonenum_start}}" ++{%endif%}{% if gateway %}GATEWAY="{{gateway}}" ++{%endif%}{% if arpcheck %}ARPCHECK="{{arpcheck}}" ++{%endif%}{% if srcaddr %}SRCADDR="{{srcaddr}}" ++{%endif%}{% if defroute %}DEFROUTE="{{defroute}}" ++{%endif%}{% if bridge %}BRIDGE="{{bridge}}" ++{%endif%}{% if stp %}STP="{{stp}}" ++{%endif%}{% if delay or delay == 0 %}DELAY="{{delay}}" ++{%endif%}{% if mtu %}MTU='{{mtu}}' ++{%endif%}{% if zone %}ZONE='{{zone}}' ++{%endif%}{% if bonding %}BONDING_MODULE_OPTS='{{bonding}}' ++BONDING_MASTER='yes' ++{% for sl in slaves -%} ++BONDING_SLAVE{{loop.index}}='{{sl}}' ++{% endfor -%} ++{%endif%}{% if ethtool %}ETHTOOL_OPTIONS='{{ethtool}}' ++{%endif%}{% if phys_dev %}ETHERDEVICE='{{phys_dev}}' ++{%endif%}{% if vlan_id %}VLAN_ID='{{vlan_id}}' ++{%endif%}{% if userctl %}USERCONTROL='{{userctl}}' ++{%endif%} +diff --git a/salt/templates/suse_ip/ifroute.jinja b/salt/templates/suse_ip/ifroute.jinja +new file mode 100644 +index 0000000000..0081e4c688 +--- /dev/null ++++ b/salt/templates/suse_ip/ifroute.jinja +@@ -0,0 +1,8 @@ ++{%- for route in routes -%} ++{% if route.name %}# {{route.name}} {%- endif %} ++{{ route.ipaddr }} ++{%- if route.gateway %} {{route.gateway}}{% else %} -{% endif %} ++{%- if route.netmask %} {{route.netmask}}{% else %} -{% endif %} ++{%- if route.dev %} {{route.dev}}{% else %}{%- if iface and iface != "routes" %} {{iface}}{% else %} -{% endif %}{% endif %} ++{%- if route.metric %} metric {{route.metric}} {%- endif %} ++{% endfor -%} +diff --git a/salt/templates/suse_ip/network.jinja b/salt/templates/suse_ip/network.jinja +new file mode 100644 +index 0000000000..64ae911271 +--- /dev/null ++++ b/salt/templates/suse_ip/network.jinja +@@ -0,0 +1,30 @@ ++{% if auto6_wait_at_boot %}AUTO6_WAIT_AT_BOOT="{{auto6_wait_at_boot}}" ++{%endif%}{% if auto6_update %}AUTO6_UPDATE="{{auto6_update}}" ++{%endif%}{% if link_required %}LINK_REQUIRED="{{link_required}}" ++{%endif%}{% if wicked_debug %}WICKED_DEBUG="{{wicked_debug}}" ++{%endif%}{% if wicked_log_level %}WICKED_LOG_LEVEL="{{wicked_log_level}}" ++{%endif%}{% if check_duplicate_ip %}CHECK_DUPLICATE_IP="{{check_duplicate_ip}}" ++{%endif%}{% if send_gratuitous_arp %}SEND_GRATUITOUS_ARP="{{send_gratuitous_arp}}" ++{%endif%}{% if debug %}DEBUG="{{debug}}" ++{%endif%}{% if wait_for_interfaces %}WAIT_FOR_INTERFACES="{{wait_for_interfaces}}" ++{%endif%}{% if firewall %}FIREWALL="{{firewall}}" ++{%endif%}{% if nm_online_timeout %}NM_ONLINE_TIMEOUT="{{nm_online_timeout}}" ++{%endif%}{% if netconfig_modules_order %}NETCONFIG_MODULES_ORDER="{{netconfig_modules_order}}" ++{%endif%}{% if netconfig_verbose %}NETCONFIG_VERBOSE="{{netconfig_verbose}}" ++{%endif%}{% if netconfig_force_replace %}NETCONFIG_FORCE_REPLACE="{{netconfig_force_replace}}" ++{%endif%}{% if dns_policy %}NETCONFIG_DNS_POLICY="{{dns_policy}}" ++{%endif%}{% if dns_forwarder %}NETCONFIG_DNS_FORWARDER="{{dns_forwarder}}" ++{%endif%}{% if dns_forwarder_fallback %}NETCONFIG_DNS_FORWARDER_FALLBACK="{{dns_forwarder_fallback}}" ++{%endif%}{% if dns_search %}NETCONFIG_DNS_STATIC_SEARCHLIST="{{ dns_search|join(' ') }}" ++{%endif%}{% if dns %}NETCONFIG_DNS_STATIC_SERVERS="{{ dns|join(' ') }}" ++{%endif%}{% if dns_ranking %}NETCONFIG_DNS_RANKING="{{dns_ranking}}" ++{%endif%}{% if dns_resolver_options %}NETCONFIG_DNS_RESOLVER_OPTIONS="{{dns_resolver_options}}" ++{%endif%}{% if dns_resolver_sortlist %}NETCONFIG_DNS_RESOLVER_SORTLIST="{{dns_resolver_sortlist}}" ++{%endif%}{% if ntp_policy %}NETCONFIG_NTP_POLICY="{{ntp_policy}}" ++{%endif%}{% if ntp_static_servers %}NETCONFIG_NTP_STATIC_SERVERS="{{ntp_static_servers}}" ++{%endif%}{% if nis_policy %}NETCONFIG_NIS_POLICY="{{nis_policy}}" ++{%endif%}{% if nis_setdomainname %}NETCONFIG_NIS_SETDOMAINNAME="{{nis_setdomainname}}" ++{%endif%}{% if nis_static_domain %}NETCONFIG_NIS_STATIC_DOMAIN="{{nis_static_domain}}" ++{%endif%}{% if nis_static_servers %}NETCONFIG_NIS_STATIC_SERVERS="{{nis_static_servers}}" ++{%endif%}{% if wireless_regulatory_domain %}WIRELESS_REGULATORY_DOMAIN="{{wireless_regulatory_domain}}" ++{%endif%} +diff --git a/setup.py b/setup.py +index e13e5485ed..866e8d91f9 100755 +--- a/setup.py ++++ b/setup.py +@@ -1106,6 +1106,7 @@ class SaltDistribution(distutils.dist.Distribution): + package_data = { + "salt.templates": [ + "rh_ip/*.jinja", ++ "suse_ip/*.jinja", + "debian_ip/*.jinja", + "virt/*.jinja", + "git/*", +-- +2.33.0 + + diff --git a/improvements-on-ansiblegate-module-354.patch b/improvements-on-ansiblegate-module-354.patch new file mode 100644 index 0000000..af82f26 --- /dev/null +++ b/improvements-on-ansiblegate-module-354.patch @@ -0,0 +1,556 @@ +From b58056da2f5a12e3d614650904039c0655ce1221 Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Tue, 18 Jan 2022 19:41:03 +0100 +Subject: [PATCH] Improvements on "ansiblegate" module (#354) + +* Allow collecting Ansible Inventory from a minion + +* Prevent crashing if ansible-playbook doesn't return JSON + +* Add new 'ansible.discover_playbooks' method + +* Include custom inventory when discovering Ansible playbooks + +* Enhance 'ansible.discover_playbooks' to accept a list of locations + +* Remove unused constants from Ansible utils + +* Avoid string concatenation to calculate extra cmd args + +* Add unit test for ansible.targets + +* Improve Ansible roster targetting + +* Add tests for new ansiblegate module functions + +* Fix issue dealing with ungrouped targets on inventory + +* Enable ansible utils for ansible roster tests + +* Remove unnecessary code from Ansible utils + +* Fix pylint issue + +* Fix issue in documentation + +Fix issue parsing errors in ansiblegate state module +--- + salt/modules/ansiblegate.py | 167 +++++++++++++++++- + salt/roster/ansible.py | 17 +- + salt/states/ansiblegate.py | 12 +- + salt/utils/ansible.py | 41 +++++ + .../pytests/unit/modules/test_ansiblegate.py | 99 ++++++++++- + .../example_playbooks/example-playbook2/hosts | 7 + + .../example-playbook2/site.yml | 28 +++ + .../playbooks/example_playbooks/playbook1.yml | 5 + + tests/unit/roster/test_ansible.py | 2 +- + 9 files changed, 367 insertions(+), 11 deletions(-) + create mode 100644 salt/utils/ansible.py + create mode 100644 tests/unit/files/playbooks/example_playbooks/example-playbook2/hosts + create mode 100644 tests/unit/files/playbooks/example_playbooks/example-playbook2/site.yml + create mode 100644 tests/unit/files/playbooks/example_playbooks/playbook1.yml + +diff --git a/salt/modules/ansiblegate.py b/salt/modules/ansiblegate.py +index 328d9b7b0a..f33be6a00e 100644 +--- a/salt/modules/ansiblegate.py ++++ b/salt/modules/ansiblegate.py +@@ -17,6 +17,7 @@ any Ansible module to respond. + import fnmatch + import json + import logging ++import os + import subprocess + import sys + from tempfile import NamedTemporaryFile +@@ -365,7 +366,171 @@ def playbooks( + } + ret = __salt__["cmd.run_all"](**cmd_kwargs) + log.debug("Ansible Playbook Return: %s", ret) +- retdata = json.loads(ret["stdout"]) ++ try: ++ retdata = json.loads(ret["stdout"]) ++ except ValueError: ++ retdata = ret + if "retcode" in ret: + __context__["retcode"] = retdata["retcode"] = ret["retcode"] + return retdata ++ ++ ++def targets(**kwargs): ++ """ ++ Return the inventory from an Ansible inventory_file ++ ++ :param inventory: ++ The inventory file to read the inventory from. Default: "/etc/ansible/hosts" ++ ++ :param yaml: ++ Return the inventory as yaml output. Default: False ++ ++ :param export: ++ Return inventory as export format. Default: False ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt 'ansiblehost' ansible.targets ++ salt 'ansiblehost' ansible.targets inventory=my_custom_inventory ++ ++ """ ++ return __utils__["ansible.targets"](**kwargs) ++ ++ ++def discover_playbooks(path=None, ++ locations=None, ++ playbook_extension=None, ++ hosts_filename=None, ++ syntax_check=False): ++ """ ++ Discover Ansible playbooks stored under the given path or from multiple paths (locations) ++ ++ This will search for files matching with the playbook file extension under the given ++ root path and will also look for files inside the first level of directories in this path. ++ ++ The return of this function would be a dict like this: ++ ++ .. code-block:: python ++ ++ { ++ "/home/foobar/": { ++ "my_ansible_playbook.yml": { ++ "fullpath": "/home/foobar/playbooks/my_ansible_playbook.yml", ++ "custom_inventory": "/home/foobar/playbooks/hosts" ++ }, ++ "another_playbook.yml": { ++ "fullpath": "/home/foobar/playbooks/another_playbook.yml", ++ "custom_inventory": "/home/foobar/playbooks/hosts" ++ }, ++ "lamp_simple/site.yml": { ++ "fullpath": "/home/foobar/playbooks/lamp_simple/site.yml", ++ "custom_inventory": "/home/foobar/playbooks/lamp_simple/hosts" ++ }, ++ "lamp_proxy/site.yml": { ++ "fullpath": "/home/foobar/playbooks/lamp_proxy/site.yml", ++ "custom_inventory": "/home/foobar/playbooks/lamp_proxy/hosts" ++ } ++ }, ++ "/srv/playbooks/": { ++ "example_playbook/example.yml": { ++ "fullpath": "/srv/playbooks/example_playbook/example.yml", ++ "custom_inventory": "/srv/playbooks/example_playbook/hosts" ++ } ++ } ++ } ++ ++ :param path: ++ Path to discover playbooks from. ++ ++ :param locations: ++ List of paths to discover playbooks from. ++ ++ :param playbook_extension: ++ File extension of playbooks file to search for. Default: "yml" ++ ++ :param hosts_filename: ++ Filename of custom playbook inventory to search for. Default: "hosts" ++ ++ :param syntax_check: ++ Skip playbooks that do not pass "ansible-playbook --syntax-check" validation. Default: False ++ ++ :return: ++ The discovered playbooks under the given paths ++ ++ CLI Example: ++ ++ .. code-block:: bash ++ ++ salt 'ansiblehost' ansible.discover_playbooks path=/srv/playbooks/ ++ salt 'ansiblehost' ansible.discover_playbooks locations='["/srv/playbooks/", "/srv/foobar"]' ++ ++ """ ++ ++ if not path and not locations: ++ raise CommandExecutionError("You have to specify either 'path' or 'locations' arguments") ++ ++ if path and locations: ++ raise CommandExecutionError("You cannot specify 'path' and 'locations' at the same time") ++ ++ if not playbook_extension: ++ playbook_extension = "yml" ++ if not hosts_filename: ++ hosts_filename = "hosts" ++ ++ if path: ++ if not os.path.isabs(path): ++ raise CommandExecutionError("The given path is not an absolute path: {}".format(path)) ++ if not os.path.isdir(path): ++ raise CommandExecutionError("The given path is not a directory: {}".format(path)) ++ return {path: _explore_path(path, playbook_extension, hosts_filename, syntax_check)} ++ ++ if locations: ++ all_ret = {} ++ for location in locations: ++ all_ret[location] = _explore_path(location, playbook_extension, hosts_filename, syntax_check) ++ return all_ret ++ ++ ++def _explore_path(path, playbook_extension, hosts_filename, syntax_check): ++ ret = {} ++ ++ if not os.path.isabs(path): ++ log.error("The given path is not an absolute path: {}".format(path)) ++ return ret ++ if not os.path.isdir(path): ++ log.error("The given path is not a directory: {}".format(path)) ++ return ret ++ ++ try: ++ # Check files in the given path ++ for _f in os.listdir(path): ++ _path = os.path.join(path, _f) ++ if os.path.isfile(_path) and _path.endswith("." + playbook_extension): ++ ret[_f] = {"fullpath": _path} ++ # Check for custom inventory file ++ if os.path.isfile(os.path.join(path, hosts_filename)): ++ ret[_f].update({"custom_inventory": os.path.join(path, hosts_filename)}) ++ elif os.path.isdir(_path): ++ # Check files in the 1st level of subdirectories ++ for _f2 in os.listdir(_path): ++ _path2 = os.path.join(_path, _f2) ++ if os.path.isfile(_path2) and _path2.endswith("." + playbook_extension): ++ ret[os.path.join(_f, _f2)] = {"fullpath": _path2} ++ # Check for custom inventory file ++ if os.path.isfile(os.path.join(_path, hosts_filename)): ++ ret[os.path.join(_f, _f2)].update({"custom_inventory": os.path.join(_path, hosts_filename)}) ++ except Exception as exc: ++ raise CommandExecutionError("There was an exception while discovering playbooks: {}".format(exc)) ++ ++ # Run syntax check validation ++ if syntax_check: ++ check_command = ["ansible-playbook", "--syntax-check"] ++ try: ++ for pb in list(ret): ++ if __salt__["cmd.retcode"](check_command + [ret[pb]]): ++ del ret[pb] ++ except Exception as exc: ++ raise CommandExecutionError("There was an exception while checking syntax of playbooks: {}".format(exc)) ++ return ret +diff --git a/salt/roster/ansible.py b/salt/roster/ansible.py +index 7beaaf2075..d3b352de27 100644 +--- a/salt/roster/ansible.py ++++ b/salt/roster/ansible.py +@@ -117,27 +117,32 @@ def targets(tgt, tgt_type="glob", **kwargs): + Return the targets from the ansible inventory_file + Default: /etc/salt/roster + """ +- inventory = __runner__["salt.cmd"]( +- "cmd.run", "ansible-inventory -i {} --list".format(get_roster_file(__opts__)) +- ) +- __context__["inventory"] = __utils__["json.loads"]( +- __utils__["stringutils.to_str"](inventory) ++ __context__["inventory"] = __utils__["ansible.targets"]( ++ inventory=get_roster_file(__opts__), **kwargs + ) + + if tgt_type == "glob": + hosts = [ + host for host in _get_hosts_from_group("all") if fnmatch.fnmatch(host, tgt) + ] ++ elif tgt_type == "list": ++ hosts = [host for host in _get_hosts_from_group("all") if host in tgt] + elif tgt_type == "nodegroup": + hosts = _get_hosts_from_group(tgt) ++ else: ++ hosts = [] ++ + return {host: _get_hostvars(host) for host in hosts} + + + def _get_hosts_from_group(group): + inventory = __context__["inventory"] ++ if group not in inventory: ++ return [] + hosts = [host for host in inventory[group].get("hosts", [])] + for child in inventory[group].get("children", []): +- if child != "ungrouped": ++ child_info = _get_hosts_from_group(child) ++ if child_info not in hosts: + hosts.extend(_get_hosts_from_group(child)) + return hosts + +diff --git a/salt/states/ansiblegate.py b/salt/states/ansiblegate.py +index 4afe6a020d..af5cb0f0e5 100644 +--- a/salt/states/ansiblegate.py ++++ b/salt/states/ansiblegate.py +@@ -184,7 +184,11 @@ def playbooks(name, rundir=None, git_repo=None, git_kwargs=None, ansible_kwargs= + checks = __salt__["ansible.playbooks"]( + name, rundir=rundir, check=True, diff=True, **ansible_kwargs + ) +- if all( ++ if "stats" not in checks: ++ ret["comment"] = checks.get("stderr", checks) ++ ret["result"] = False ++ ret["changes"] = {} ++ elif all( + not check["changed"] + and not check["failures"] + and not check["unreachable"] +@@ -213,7 +217,11 @@ def playbooks(name, rundir=None, git_repo=None, git_kwargs=None, ansible_kwargs= + results = __salt__["ansible.playbooks"]( + name, rundir=rundir, diff=True, **ansible_kwargs + ) +- if all( ++ if "stats" not in results: ++ ret["comment"] = results.get("stderr", results) ++ ret["result"] = False ++ ret["changes"] = {} ++ elif all( + not check["changed"] + and not check["failures"] + and not check["unreachable"] +diff --git a/salt/utils/ansible.py b/salt/utils/ansible.py +new file mode 100644 +index 0000000000..1e14037fd3 +--- /dev/null ++++ b/salt/utils/ansible.py +@@ -0,0 +1,41 @@ ++import logging ++import os ++ ++# Import Salt libs ++import salt.utils.json ++import salt.utils.path ++import salt.utils.stringutils ++import salt.modules.cmdmod ++from salt.exceptions import CommandExecutionError ++ ++__virtualname__ = "ansible" ++ ++log = logging.getLogger(__name__) ++ ++ ++def __virtual__(): # pylint: disable=expected-2-blank-lines-found-0 ++ if salt.utils.path.which("ansible-inventory"): ++ return __virtualname__ ++ return (False, "Install `ansible` to use inventory") ++ ++ ++def targets(inventory="/etc/ansible/hosts", **kwargs): ++ """ ++ Return the targets from the ansible inventory_file ++ Default: /etc/salt/roster ++ """ ++ if not os.path.isfile(inventory): ++ raise CommandExecutionError("Inventory file not found: {}".format(inventory)) ++ ++ extra_cmd = [] ++ if "export" in kwargs: ++ extra_cmd.append("--export") ++ if "yaml" in kwargs: ++ extra_cmd.append("--yaml") ++ inv = salt.modules.cmdmod.run( ++ "ansible-inventory -i {} --list {}".format(inventory, " ".join(extra_cmd)) ++ ) ++ if kwargs.get("yaml", False): ++ return salt.utils.stringutils.to_str(inv) ++ else: ++ return salt.utils.json.loads(salt.utils.stringutils.to_str(inv)) +diff --git a/tests/pytests/unit/modules/test_ansiblegate.py b/tests/pytests/unit/modules/test_ansiblegate.py +index 44c9b12acb..f357133000 100644 +--- a/tests/pytests/unit/modules/test_ansiblegate.py ++++ b/tests/pytests/unit/modules/test_ansiblegate.py +@@ -1,9 +1,15 @@ + # Author: Bo Maryniuk ++import os + + import pytest ++ ++import salt.config ++import salt.loader + import salt.modules.ansiblegate as ansiblegate + import salt.utils.json ++import salt.utils.path + from tests.support.mock import ANY, MagicMock, patch ++from tests.support.runtests import RUNTIME_VARS + + pytestmark = [ + pytest.mark.skip_on_windows(reason="Not supported on Windows"), +@@ -12,7 +18,7 @@ pytestmark = [ + + @pytest.fixture + def configure_loader_modules(): +- return {ansiblegate: {}} ++ return {ansiblegate: {"__utils__": {}}} + + + def test_ansible_module_help(): +@@ -133,3 +139,94 @@ def test_ansible_playbooks_return_retcode(): + ): + ret = ansiblegate.playbooks("fake-playbook.yml") + assert "retcode" in ret ++ ++ ++def test_ansible_targets(): ++ """ ++ Test ansible.targets execution module function. ++ :return: ++ """ ++ ansible_inventory_ret = """ ++{ ++ "_meta": { ++ "hostvars": { ++ "uyuni-stable-ansible-centos7-1.tf.local": { ++ "ansible_ssh_private_key_file": "/etc/ansible/my_ansible_private_key" ++ }, ++ "uyuni-stable-ansible-centos7-2.tf.local": { ++ "ansible_ssh_private_key_file": "/etc/ansible/my_ansible_private_key" ++ } ++ } ++ }, ++ "all": { ++ "children": [ ++ "ungrouped" ++ ] ++ }, ++ "ungrouped": { ++ "hosts": [ ++ "uyuni-stable-ansible-centos7-1.tf.local", ++ "uyuni-stable-ansible-centos7-2.tf.local" ++ ] ++ } ++} ++ """ ++ ansible_inventory_mock = MagicMock(return_value=ansible_inventory_ret) ++ with patch("salt.utils.path.which", MagicMock(return_value=True)): ++ opts = salt.config.DEFAULT_MINION_OPTS.copy() ++ utils = salt.loader.utils(opts, whitelist=["ansible"]) ++ with patch("salt.modules.cmdmod.run", ansible_inventory_mock), patch.dict( ++ ansiblegate.__utils__, utils ++ ), patch("os.path.isfile", MagicMock(return_value=True)): ++ ret = ansiblegate.targets() ++ assert ansible_inventory_mock.call_args ++ assert "_meta" in ret ++ assert "uyuni-stable-ansible-centos7-1.tf.local" in ret["_meta"]["hostvars"] ++ assert ( ++ "ansible_ssh_private_key_file" ++ in ret["_meta"]["hostvars"]["uyuni-stable-ansible-centos7-1.tf.local"] ++ ) ++ assert "all" in ret ++ assert len(ret["ungrouped"]["hosts"]) == 2 ++ ++ ++def test_ansible_discover_playbooks_single_path(): ++ playbooks_dir = os.path.join( ++ RUNTIME_VARS.TESTS_DIR, "unit/files/playbooks/example_playbooks/" ++ ) ++ ret = ansiblegate.discover_playbooks(playbooks_dir) ++ assert playbooks_dir in ret ++ assert ret[playbooks_dir]["playbook1.yml"] == { ++ "fullpath": os.path.join(playbooks_dir, "playbook1.yml") ++ } ++ assert ret[playbooks_dir]["example-playbook2/site.yml"] == { ++ "fullpath": os.path.join(playbooks_dir, "example-playbook2/site.yml"), ++ "custom_inventory": os.path.join(playbooks_dir, "example-playbook2/hosts"), ++ } ++ ++ ++def test_ansible_discover_playbooks_single_path_using_parameters(): ++ playbooks_dir = os.path.join( ++ RUNTIME_VARS.TESTS_DIR, "unit/files/playbooks/example_playbooks/" ++ ) ++ ret = ansiblegate.discover_playbooks( ++ playbooks_dir, playbook_extension="foobar", hosts_filename="deadbeaf" ++ ) ++ assert playbooks_dir in ret ++ assert ret[playbooks_dir] == {} ++ ++ ++def test_ansible_discover_playbooks_multiple_locations(): ++ playbooks_dir = os.path.join( ++ RUNTIME_VARS.TESTS_DIR, "unit/files/playbooks/example_playbooks/" ++ ) ++ ret = ansiblegate.discover_playbooks(locations=[playbooks_dir, "/tmp/foobar"]) ++ assert playbooks_dir in ret ++ assert "/tmp/foobar" in ret ++ assert ret[playbooks_dir]["playbook1.yml"] == { ++ "fullpath": os.path.join(playbooks_dir, "playbook1.yml") ++ } ++ assert ret[playbooks_dir]["example-playbook2/site.yml"] == { ++ "fullpath": os.path.join(playbooks_dir, "example-playbook2/site.yml"), ++ "custom_inventory": os.path.join(playbooks_dir, "example-playbook2/hosts"), ++ } +diff --git a/tests/unit/files/playbooks/example_playbooks/example-playbook2/hosts b/tests/unit/files/playbooks/example_playbooks/example-playbook2/hosts +new file mode 100644 +index 0000000000..75783285f6 +--- /dev/null ++++ b/tests/unit/files/playbooks/example_playbooks/example-playbook2/hosts +@@ -0,0 +1,7 @@ ++[databases] ++host1 ++host2 ++ ++[webservers] ++host3 ++host4 +diff --git a/tests/unit/files/playbooks/example_playbooks/example-playbook2/site.yml b/tests/unit/files/playbooks/example_playbooks/example-playbook2/site.yml +new file mode 100644 +index 0000000000..a64ebd5e18 +--- /dev/null ++++ b/tests/unit/files/playbooks/example_playbooks/example-playbook2/site.yml +@@ -0,0 +1,28 @@ ++--- ++- name: update web servers ++ hosts: webservers ++ remote_user: root ++ ++ tasks: ++ - name: ensure apache is at the latest version ++ yum: ++ name: httpd ++ state: latest ++ - name: write the apache config file ++ template: ++ src: /srv/httpd.j2 ++ dest: /etc/httpd.conf ++ ++- name: update db servers ++ hosts: databases ++ remote_user: root ++ ++ tasks: ++ - name: ensure postgresql is at the latest version ++ yum: ++ name: postgresql ++ state: latest ++ - name: ensure that postgresql is started ++ service: ++ name: postgresql ++ state: started +diff --git a/tests/unit/files/playbooks/example_playbooks/playbook1.yml b/tests/unit/files/playbooks/example_playbooks/playbook1.yml +new file mode 100644 +index 0000000000..e258a101e1 +--- /dev/null ++++ b/tests/unit/files/playbooks/example_playbooks/playbook1.yml +@@ -0,0 +1,5 @@ ++--- ++- hosts: all ++ gather_facts: false ++ tasks: ++ - ping: +diff --git a/tests/unit/roster/test_ansible.py b/tests/unit/roster/test_ansible.py +index 7f1144454b..c4ab8b7639 100644 +--- a/tests/unit/roster/test_ansible.py ++++ b/tests/unit/roster/test_ansible.py +@@ -63,7 +63,7 @@ class AnsibleRosterTestCase(TestCase, mixins.LoaderModuleMockMixin): + opts = salt.config.master_config( + os.path.join(RUNTIME_VARS.TMP_CONF_DIR, "master") + ) +- utils = salt.loader.utils(opts, whitelist=["json", "stringutils"]) ++ utils = salt.loader.utils(opts, whitelist=["json", "stringutils", "ansible"]) + runner = salt.loader.runner(opts, utils=utils, whitelist=["salt"]) + return {ansible: {"__utils__": utils, "__opts__": {}, "__runner__": runner}} + +-- +2.34.1 + + diff --git a/include-aliases-in-the-fqdns-grains.patch b/include-aliases-in-the-fqdns-grains.patch index 602e78d..90ca818 100644 --- a/include-aliases-in-the-fqdns-grains.patch +++ b/include-aliases-in-the-fqdns-grains.patch @@ -1,4 +1,4 @@ -From 797b256548cbcda0f3828c6d182c44a3815dd313 Mon Sep 17 00:00:00 2001 +From 834defc8e38c4495ed51bb549d86727dd8b812b3 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 18 Jan 2022 17:10:37 +0100 Subject: [PATCH] Include aliases in the fqdns grains @@ -24,19 +24,18 @@ Implement network.fqdns module function (bsc#1134860) (#172) Co-authored-by: Eric Siebigteroth --- - salt/modules/network.py | 5 +++- - salt/utils/network.py | 16 +++++++++++ - tests/pytests/unit/modules/test_network.py | 4 +-- - tests/unit/utils/test_network.py | 32 ++++++++++++++++++++++ - 4 files changed, 54 insertions(+), 3 deletions(-) + salt/modules/network.py | 5 ++++- + salt/utils/network.py | 16 ++++++++++++++ + tests/unit/utils/test_network.py | 37 ++++++++++++++++++++++++++++++++ + 3 files changed, 57 insertions(+), 1 deletion(-) diff --git a/salt/modules/network.py b/salt/modules/network.py -index 1149c96097..8c2d188903 100644 +index 08c20b99f9..53ebfe4bc7 100644 --- a/salt/modules/network.py +++ b/salt/modules/network.py -@@ -2100,7 +2100,10 @@ def fqdns(): - # https://sourceware.org/bugzilla/show_bug.cgi?id=19329 - time.sleep(random.randint(5, 25) / 1000) +@@ -2089,7 +2089,10 @@ def fqdns(): + + def _lookup_fqdn(ip): try: - return [socket.getfqdn(socket.gethostbyaddr(ip)[0])] + name, aliaslist, addresslist = socket.gethostbyaddr(ip) @@ -47,10 +46,10 @@ index 1149c96097..8c2d188903 100644 if err.errno in (0, HOST_NOT_FOUND, NO_DATA): # No FQDN for this IP address, so we don't need to know this all the time. diff --git a/salt/utils/network.py b/salt/utils/network.py -index d042cd177d..bccda01556 100644 +index 22075066fd..8867041e0e 100644 --- a/salt/utils/network.py +++ b/salt/utils/network.py -@@ -2332,3 +2332,19 @@ def filter_by_networks(values, networks): +@@ -2302,3 +2302,19 @@ def filter_by_networks(values, networks): raise ValueError("Do not know how to filter a {}".format(type(values))) else: return values @@ -70,37 +69,20 @@ index d042cd177d..bccda01556 100644 + and len(hostname) < 0xFF + and all(compliant.match(x) for x in hostname.rstrip(".").split(".")) + ) -diff --git a/tests/pytests/unit/modules/test_network.py b/tests/pytests/unit/modules/test_network.py -index 15fd5545a0..b948e578bb 100644 ---- a/tests/pytests/unit/modules/test_network.py -+++ b/tests/pytests/unit/modules/test_network.py -@@ -28,7 +28,7 @@ def fake_fqdn(): - with patch("socket.getfqdn", autospec=True, return_value=fqdn), patch( - "socket.gethostbyaddr", - autospec=True, -- return_value=("fnord", "fnord fnord"), -+ return_value=("fnord", ["fnord fnord"], []), - ): - yield fqdn - -@@ -88,7 +88,7 @@ def test_fqdns_should_return_sorted_unique_domains(fake_ips): - with patch("socket.getfqdn", autospec=True, side_effect=fake_domains), patch( - "socket.gethostbyaddr", - autospec=True, -- return_value=("fnord", "fnord fnord"), -+ return_value=("fnord", ["fnord fnord"], []), - ): - actual_fqdns = networkmod.fqdns() - assert actual_fqdns == { diff --git a/tests/unit/utils/test_network.py b/tests/unit/utils/test_network.py -index 422f85d68c..3060aba0aa 100644 +index 6863ccd0c9..637d5e9811 100644 --- a/tests/unit/utils/test_network.py +++ b/tests/unit/utils/test_network.py -@@ -1269,3 +1269,35 @@ class NetworkTestCase(TestCase): +@@ -1273,3 +1273,40 @@ class NetworkTestCase(TestCase): ), ): self.assertEqual(network.get_fqhostname(), host) + ++ def test_netlink_tool_remote_on(self): ++ with patch("subprocess.check_output", return_value=NETLINK_SS): ++ remotes = network._netlink_tool_remote_on("4505", "remote") ++ self.assertEqual(remotes, {"127.0.0.1", "::ffff:1.2.3.4"}) ++ + def test_is_fqdn(self): + """ + Test is_fqdn function passes possible FQDN names. @@ -133,6 +115,6 @@ index 422f85d68c..3060aba0aa 100644 + ]: + assert not network.is_fqdn(fqdn) -- -2.37.3 +2.34.1 diff --git a/include-stdout-in-error-message-for-zypperpkg-559.patch b/include-stdout-in-error-message-for-zypperpkg-559.patch deleted file mode 100644 index 79615fe..0000000 --- a/include-stdout-in-error-message-for-zypperpkg-559.patch +++ /dev/null @@ -1,63 +0,0 @@ -From f9fe9ea009915478ea8f7896dff2c281e68b5d36 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Yeray=20Guti=C3=A9rrez=20Cedr=C3=A9s?= - -Date: Fri, 14 Oct 2022 08:41:40 +0100 -Subject: [PATCH] Include stdout in error message for zypperpkg (#559) - ---- - salt/modules/zypperpkg.py | 5 +++++ - tests/unit/modules/test_zypperpkg.py | 17 ++++++++++++++++- - 2 files changed, 21 insertions(+), 1 deletion(-) - -diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py -index c787d4009d..5d745c432d 100644 ---- a/salt/modules/zypperpkg.py -+++ b/salt/modules/zypperpkg.py -@@ -339,6 +339,11 @@ class _Zypper: - and self.__call_result["stderr"].strip() - or "" - ) -+ msg += ( -+ self.__call_result["stdout"] -+ and self.__call_result["stdout"].strip() -+ or "" -+ ) - if msg: - _error_msg.append(msg) - else: -diff --git a/tests/unit/modules/test_zypperpkg.py b/tests/unit/modules/test_zypperpkg.py -index 37d555844c..bcd001cd85 100644 ---- a/tests/unit/modules/test_zypperpkg.py -+++ b/tests/unit/modules/test_zypperpkg.py -@@ -207,11 +207,26 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin): - ): - zypper.__zypper__.xml.call("crashme") - -+ output_to_user_stdout = "Output to user to stdout" -+ output_to_user_stderr = "Output to user to stderr" -+ sniffer = RunSniffer( -+ stdout=output_to_user_stdout, stderr=output_to_user_stderr, retcode=1 -+ ) -+ with patch.dict( -+ "salt.modules.zypperpkg.__salt__", {"cmd.run_all": sniffer} -+ ), patch.object(zypper.__zypper__, "_is_rpm_lock", return_value=False): - with self.assertRaisesRegex( -- CommandExecutionError, "^Zypper command failure: Check Zypper's logs.$" -+ CommandExecutionError, -+ "^Zypper command failure: {}$".format( -+ output_to_user_stderr + output_to_user_stdout -+ ), - ): - zypper.__zypper__.call("crashme again") - -+ sniffer = RunSniffer(retcode=1) -+ with patch.dict( -+ "salt.modules.zypperpkg.__salt__", {"cmd.run_all": sniffer} -+ ), patch.object(zypper.__zypper__, "_is_rpm_lock", return_value=False): - zypper.__zypper__.noraise.call("stay quiet") - self.assertEqual(zypper.__zypper__.error_msg, "Check Zypper's logs.") - --- -2.37.3 - - diff --git a/info_installed-works-without-status-attr-now.patch b/info_installed-works-without-status-attr-now.patch index 45581d7..cf4872c 100644 --- a/info_installed-works-without-status-attr-now.patch +++ b/info_installed-works-without-status-attr-now.patch @@ -1,4 +1,4 @@ -From 55fc248aabd486f8ae4ff2bc755d653cdc39a4bb Mon Sep 17 00:00:00 2001 +From 3f8e937d938f19dd40fde527497f7775bbffe353 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 25 Jan 2022 17:12:47 +0100 Subject: [PATCH] info_installed works without status attr now @@ -12,10 +12,10 @@ detect if a package was installed or not. Now info_installed adds the 2 files changed, 27 insertions(+) diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py -index 4a2281c47f..544143d286 100644 +index 3c3fbf4970..0d378355ab 100644 --- a/salt/modules/aptpkg.py +++ b/salt/modules/aptpkg.py -@@ -3479,6 +3479,15 @@ def info_installed(*names, **kwargs): +@@ -3056,6 +3056,15 @@ def info_installed(*names, **kwargs): failhard = kwargs.pop("failhard", True) kwargs.pop("errors", None) # Only for compatibility with RPM attr = kwargs.pop("attr", None) # Package attributes to return @@ -32,10 +32,10 @@ index 4a2281c47f..544143d286 100644 "all_versions", False ) # This is for backward compatible structure only diff --git a/tests/pytests/unit/modules/test_aptpkg.py b/tests/pytests/unit/modules/test_aptpkg.py -index 8e404a673c..76b59d8604 100644 +index 51b7ffbe4d..8c64c8c9c1 100644 --- a/tests/pytests/unit/modules/test_aptpkg.py +++ b/tests/pytests/unit/modules/test_aptpkg.py -@@ -384,6 +384,24 @@ def test_info_installed_attr(lowpkg_info_var): +@@ -361,6 +361,24 @@ def test_info_installed_attr(lowpkg_info_var): assert ret["wget"] == expected_pkg @@ -61,6 +61,6 @@ index 8e404a673c..76b59d8604 100644 """ Test info_installed 'all_versions'. -- -2.37.3 +2.34.1 diff --git a/let-salt-ssh-use-platform-python-binary-in-rhel8-191.patch b/let-salt-ssh-use-platform-python-binary-in-rhel8-191.patch index 94b669b..ebbc276 100644 --- a/let-salt-ssh-use-platform-python-binary-in-rhel8-191.patch +++ b/let-salt-ssh-use-platform-python-binary-in-rhel8-191.patch @@ -1,4 +1,4 @@ -From c68a988ebcc7bae06231c9a9ebc3d654a7d6ffbc Mon Sep 17 00:00:00 2001 +From a6e490d8cede6e66bb5f22f314e1ec4e898dfa3c Mon Sep 17 00:00:00 2001 From: Can Bulut Bayburt <1103552+cbbayburt@users.noreply.github.com> Date: Wed, 4 Dec 2019 15:59:46 +0100 Subject: [PATCH] Let salt-ssh use 'platform-python' binary in RHEL8 @@ -14,11 +14,11 @@ creating the sh shim. 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py -index 5e557c51da..ef162d2270 100644 +index 287d0b8c4c..ef9eb0c07e 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py -@@ -146,7 +146,7 @@ if [ "$SUDO" ] && [ "$SUDO_USER" ] - then SUDO="$SUDO -u $SUDO_USER" +@@ -147,7 +147,7 @@ elif [ "$SUDO" ] && [ -n "$SUDO_USER" ] + then SUDO="sudo " fi EX_PYTHON_INVALID={EX_THIN_PYTHON_INVALID} -PYTHON_CMDS="python3 python27 python2.7 python26 python2.6 python2 python" @@ -27,6 +27,6 @@ index 5e557c51da..ef162d2270 100644 do if command -v "$py_cmd" >/dev/null 2>&1 && "$py_cmd" -c "import sys; sys.exit(not (sys.version_info >= (2, 6)));" -- -2.37.3 +2.29.2 diff --git a/make-aptpkg.list_repos-compatible-on-enabled-disable.patch b/make-aptpkg.list_repos-compatible-on-enabled-disable.patch index 8b51dcb..fb9324a 100644 --- a/make-aptpkg.list_repos-compatible-on-enabled-disable.patch +++ b/make-aptpkg.list_repos-compatible-on-enabled-disable.patch @@ -1,4 +1,4 @@ -From f113f94d40ee37919aa974a4fab95e482c5a0631 Mon Sep 17 00:00:00 2001 +From f31ab712a0838709bee0ba2420c99caa6700fbf4 Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Fri, 16 Nov 2018 10:54:12 +0100 Subject: [PATCH] Make aptpkg.list_repos compatible on enabled/disabled @@ -9,10 +9,10 @@ Subject: [PATCH] Make aptpkg.list_repos compatible on enabled/disabled 1 file changed, 3 insertions(+) diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py -index 94e97a0b5b..0cbd611b71 100644 +index 0a1c3b347c..1b4e311cee 100644 --- a/salt/modules/aptpkg.py +++ b/salt/modules/aptpkg.py -@@ -1933,6 +1933,9 @@ def list_repos(**kwargs): +@@ -1691,6 +1691,9 @@ def list_repos(**kwargs): repo["file"] = source.file repo["comps"] = getattr(source, "comps", []) repo["disabled"] = source.disabled @@ -23,6 +23,6 @@ index 94e97a0b5b..0cbd611b71 100644 repo["type"] = source.type repo["uri"] = source.uri -- -2.37.3 +2.33.0 diff --git a/make-pass-renderer-configurable-other-fixes-532.patch b/make-pass-renderer-configurable-other-fixes-532.patch index a99c412..bb79dbf 100644 --- a/make-pass-renderer-configurable-other-fixes-532.patch +++ b/make-pass-renderer-configurable-other-fixes-532.patch @@ -1,4 +1,4 @@ -From 030e2cb20af09673d5f38d68bcb257c6c839a2f3 Mon Sep 17 00:00:00 2001 +From 7b4f5007b7e6a35386d197afe53d02c8d7b41d53 Mon Sep 17 00:00:00 2001 From: Daniel Mach Date: Thu, 6 Oct 2022 11:58:23 +0200 Subject: [PATCH] Make pass renderer configurable & other fixes (#532) @@ -71,10 +71,10 @@ index 0000000000..22a9711383 +Only trailing newlines are stripped from the fetched secret. +Pass process arguments are handled in a secure way. diff --git a/salt/config/__init__.py b/salt/config/__init__.py -index 7cdee12c4d..0cc0deb874 100644 +index 2c42290598..9e72a5b4b7 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py -@@ -967,6 +967,14 @@ VALID_OPTS = immutabletypes.freeze( +@@ -960,6 +960,14 @@ VALID_OPTS = immutabletypes.freeze( # Use Adler32 hashing algorithm for server_id (default False until Sodium, "adler32" after) # Possible values are: False, adler32, crc32 "server_id_use_crc": (bool, str), @@ -89,7 +89,7 @@ index 7cdee12c4d..0cc0deb874 100644 } ) -@@ -1608,6 +1616,10 @@ DEFAULT_MASTER_OPTS = immutabletypes.freeze( +@@ -1601,6 +1609,10 @@ DEFAULT_MASTER_OPTS = immutabletypes.freeze( "fips_mode": False, "detect_remote_minions": False, "remote_minions_port": 22, diff --git a/make-setup.py-script-to-not-require-setuptools-9.1.patch b/make-setup.py-script-to-not-require-setuptools-9.1.patch index eda211e..67bf87c 100644 --- a/make-setup.py-script-to-not-require-setuptools-9.1.patch +++ b/make-setup.py-script-to-not-require-setuptools-9.1.patch @@ -1,4 +1,4 @@ -From 33e45a7ced8a3cfc0a8c37cdc5d7a29d6f6833c3 Mon Sep 17 00:00:00 2001 +From 64c2735b64a074acc1ef05a82f9fcf342426f87e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Wed, 25 Mar 2020 13:09:52 +0000 @@ -9,10 +9,10 @@ Subject: [PATCH] Make setup.py script to not require setuptools > 9.1 1 file changed, 8 deletions(-) diff --git a/setup.py b/setup.py -index d633af35ec..586842972d 100755 +index 39a66fefba..d9c3d6e303 100755 --- a/setup.py +++ b/setup.py -@@ -718,14 +718,6 @@ class Install(install): +@@ -805,14 +805,6 @@ class Install(install): install.finalize_options(self) def run(self): @@ -28,6 +28,6 @@ index d633af35ec..586842972d 100755 # _version.py in the build command self.distribution.running_salt_install = True -- -2.37.3 +2.29.2 diff --git a/make-sure-saltcacheloader-use-correct-fileclient-519.patch b/make-sure-saltcacheloader-use-correct-fileclient-519.patch index 51b8686..d2199a8 100644 --- a/make-sure-saltcacheloader-use-correct-fileclient-519.patch +++ b/make-sure-saltcacheloader-use-correct-fileclient-519.patch @@ -1,4 +1,4 @@ -From 03ce925098fb96ad2f2f4b7d4c151ef63aede75f Mon Sep 17 00:00:00 2001 +From cdd5edaa40233d83e3ed2eb61de3fbf70bc29dfb Mon Sep 17 00:00:00 2001 From: Witek Bedyk Date: Thu, 19 May 2022 12:52:12 +0200 Subject: [PATCH] Make sure SaltCacheLoader use correct fileclient (#519) @@ -11,10 +11,10 @@ Signed-off-by: Witek Bedyk 1 file changed, 3 insertions(+) diff --git a/salt/state.py b/salt/state.py -index db228228a7..316dcdec63 100644 +index b759c8e0ee..2c785233c5 100644 --- a/salt/state.py +++ b/salt/state.py -@@ -4170,6 +4170,9 @@ class BaseHighState: +@@ -4061,6 +4061,9 @@ class BaseHighState: ) else: try: @@ -25,6 +25,6 @@ index db228228a7..316dcdec63 100644 fn_, self.state.rend, -- -2.37.3 +2.36.0 diff --git a/mock-ip_addrs-in-utils-minions.py-unit-test-443.patch b/mock-ip_addrs-in-utils-minions.py-unit-test-443.patch new file mode 100644 index 0000000..d27ad17 --- /dev/null +++ b/mock-ip_addrs-in-utils-minions.py-unit-test-443.patch @@ -0,0 +1,90 @@ +From a363596e5e02307680859432da9935905b749846 Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Wed, 19 Jan 2022 17:33:01 +0100 +Subject: [PATCH] Mock ip_addrs() in utils/minions.py unit test (#443) +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +Previously the test used `salt.utils.network.ip_addrs()' in the same way +that the tested code did. This worked well as long as at least one IP +address was returned by `salt.utils.network.ip_addrs()'. + +Since this is a unit test, it should not depend on the environment, +it should just work™, even if there are no real IP addresses assigned to +the system (or container) that runs the test. + +Co-authored-by: Pablo Suárez Hernández +--- + tests/pytests/unit/utils/test_minions.py | 36 +++++++++++++----------- + 1 file changed, 20 insertions(+), 16 deletions(-) + +diff --git a/tests/pytests/unit/utils/test_minions.py b/tests/pytests/unit/utils/test_minions.py +index a9eee20ea1..6bc6c80bbd 100644 +--- a/tests/pytests/unit/utils/test_minions.py ++++ b/tests/pytests/unit/utils/test_minions.py +@@ -8,18 +8,22 @@ def test_connected_ids(): + test ckminion connected_ids when + local_port_tcp returns 127.0.0.1 + """ +- opts = {"publish_port": 4505, "detect_remote_minions": False} ++ opts = { ++ "publish_port": 4505, ++ "detect_remote_minions": False, ++ "minion_data_cache": True, ++ } + minion = "minion" +- ip = salt.utils.network.ip_addrs() +- mdata = {"grains": {"ipv4": ip, "ipv6": []}} +- ckminions = salt.utils.minions.CkMinions({"minion_data_cache": True}) ++ ips = {"203.0.113.1", "203.0.113.2"} ++ mdata = {"grains": {"ipv4": ips, "ipv6": []}} ++ patch_ip_addrs = patch("salt.utils.network.local_port_tcp", return_value=ips) + patch_net = patch("salt.utils.network.local_port_tcp", return_value={"127.0.0.1"}) + patch_list = patch("salt.cache.Cache.list", return_value=[minion]) + patch_fetch = patch("salt.cache.Cache.fetch", return_value=mdata) +- with patch.dict(ckminions.opts, opts): +- with patch_net, patch_list, patch_fetch: +- ret = ckminions.connected_ids() +- assert ret == {minion} ++ ckminions = salt.utils.minions.CkMinions(opts) ++ with patch_net, patch_ip_addrs, patch_list, patch_fetch: ++ ret = ckminions.connected_ids() ++ assert ret == {minion} + + + def test_connected_ids_remote_minions(): +@@ -31,21 +35,21 @@ def test_connected_ids_remote_minions(): + "publish_port": 4505, + "detect_remote_minions": True, + "remote_minions_port": 22, ++ "minion_data_cache": True, + } + minion = "minion" + minion2 = "minion2" + minion2_ip = "192.168.2.10" +- ip = salt.utils.network.ip_addrs() +- mdata = {"grains": {"ipv4": ip, "ipv6": []}} ++ minion_ips = {"203.0.113.1", "203.0.113.2", "127.0.0.1"} ++ mdata = {"grains": {"ipv4": minion_ips, "ipv6": []}} + mdata2 = {"grains": {"ipv4": [minion2_ip], "ipv6": []}} +- ckminions = salt.utils.minions.CkMinions({"minion_data_cache": True}) +- patch_net = patch("salt.utils.network.local_port_tcp", return_value={"127.0.0.1"}) ++ patch_net = patch("salt.utils.network.local_port_tcp", return_value=minion_ips) + patch_remote_net = patch( + "salt.utils.network.remote_port_tcp", return_value={minion2_ip} + ) + patch_list = patch("salt.cache.Cache.list", return_value=[minion, minion2]) + patch_fetch = patch("salt.cache.Cache.fetch", side_effect=[mdata, mdata2]) +- with patch.dict(ckminions.opts, opts): +- with patch_net, patch_list, patch_fetch, patch_remote_net: +- ret = ckminions.connected_ids() +- assert ret == {minion2, minion} ++ ckminions = salt.utils.minions.CkMinions(opts) ++ with patch_net, patch_list, patch_fetch, patch_remote_net: ++ ret = ckminions.connected_ids() ++ assert ret == {minion2, minion} +-- +2.34.1 + + diff --git a/normalize-package-names-once-with-pkg.installed-remo.patch b/normalize-package-names-once-with-pkg.installed-remo.patch index 76fd684..144af20 100644 --- a/normalize-package-names-once-with-pkg.installed-remo.patch +++ b/normalize-package-names-once-with-pkg.installed-remo.patch @@ -1,4 +1,4 @@ -From f2dc43cf1db3fee41e328c68545ccac2576021ca Mon Sep 17 00:00:00 2001 +From 09afcd0d04788ec4351c1c0b73a0c6eb3b0fd8c9 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Mon, 27 Jun 2022 18:01:21 +0300 Subject: [PATCH] Normalize package names once with pkg.installed/removed @@ -18,10 +18,10 @@ Subject: [PATCH] Normalize package names once with pkg.installed/removed 3 files changed, 192 insertions(+), 6 deletions(-) diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py -index 46f0b1f613..f52e084346 100644 +index 9f8f548e5f..3138ac2e59 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py -@@ -1460,7 +1460,12 @@ def install( +@@ -1449,7 +1449,12 @@ def install( try: pkg_params, pkg_type = __salt__["pkg_resource.parse_targets"]( @@ -35,7 +35,7 @@ index 46f0b1f613..f52e084346 100644 ) except MinionError as exc: raise CommandExecutionError(exc) -@@ -1612,7 +1617,10 @@ def install( +@@ -1603,7 +1608,10 @@ def install( except ValueError: pass else: @@ -47,7 +47,7 @@ index 46f0b1f613..f52e084346 100644 arch = "." + archpart pkgname = namepart -@@ -2143,11 +2151,13 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613 +@@ -2134,11 +2142,13 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613 arch = "" pkgname = target try: @@ -64,10 +64,10 @@ index 46f0b1f613..f52e084346 100644 pkgname = namepart # Since we don't always have the arch info, epoch information has to parsed out. But diff --git a/salt/states/pkg.py b/salt/states/pkg.py -index ef4e062145..cda966a1e8 100644 +index 0d601e1aaf..71298e6c7a 100644 --- a/salt/states/pkg.py +++ b/salt/states/pkg.py -@@ -1873,6 +1873,7 @@ def installed( +@@ -1901,6 +1901,7 @@ def installed( normalize=normalize, update_holds=update_holds, ignore_epoch=ignore_epoch, @@ -75,7 +75,7 @@ index ef4e062145..cda966a1e8 100644 **kwargs ) except CommandExecutionError as exc: -@@ -2940,7 +2941,7 @@ def _uninstall( +@@ -2973,7 +2974,7 @@ def _uninstall( } changes = __salt__["pkg.{}".format(action)]( @@ -85,7 +85,7 @@ index ef4e062145..cda966a1e8 100644 new = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs) failed = [] diff --git a/tests/pytests/unit/states/test_pkg.py b/tests/pytests/unit/states/test_pkg.py -index cba8201bda..ecb841e8ec 100644 +index 17b91bcb39..10acae9f88 100644 --- a/tests/pytests/unit/states/test_pkg.py +++ b/tests/pytests/unit/states/test_pkg.py @@ -2,6 +2,8 @@ import logging @@ -122,7 +122,7 @@ index cba8201bda..ecb841e8ec 100644 } -@@ -726,3 +737,167 @@ def test_held_unheld(package_manager): +@@ -715,3 +726,167 @@ def test_held_unheld(package_manager): hold_mock.assert_not_called() unhold_mock.assert_any_call(name="held-test", pkgs=["baz"]) unhold_mock.assert_any_call(name="held-test", pkgs=["bar"]) @@ -291,6 +291,6 @@ index cba8201bda..ecb841e8ec 100644 + assert ret["result"] + assert ret["changes"] == expected -- -2.37.3 +2.36.1 diff --git a/notify-beacon-for-debian-ubuntu-systems-347.patch b/notify-beacon-for-debian-ubuntu-systems-347.patch new file mode 100644 index 0000000..ad13002 --- /dev/null +++ b/notify-beacon-for-debian-ubuntu-systems-347.patch @@ -0,0 +1,92 @@ +From e0f8087409bdff4c3036e38ed4f22f5f031306e8 Mon Sep 17 00:00:00 2001 +From: Ricardo Mateus +Date: Fri, 9 Apr 2021 10:57:27 +0100 +Subject: [PATCH] Notify beacon for Debian/Ubuntu systems (#347) + +Signed-off-by: Ricardo Mateus +(cherry picked from commit 33d6baebba94cc7a66d5555de984ca98684157a0) +--- + scripts/suse/dpkg/99dpkgnotify | 1 + + scripts/suse/dpkg/README.md | 9 +++++++ + scripts/suse/dpkg/dpkgnotify | 44 ++++++++++++++++++++++++++++++++++ + 3 files changed, 54 insertions(+) + create mode 100644 scripts/suse/dpkg/99dpkgnotify + create mode 100644 scripts/suse/dpkg/README.md + create mode 100644 scripts/suse/dpkg/dpkgnotify + +diff --git a/scripts/suse/dpkg/99dpkgnotify b/scripts/suse/dpkg/99dpkgnotify +new file mode 100644 +index 0000000000..8013387a57 +--- /dev/null ++++ b/scripts/suse/dpkg/99dpkgnotify +@@ -0,0 +1 @@ ++DPkg::Post-Invoke {"/usr/bin/dpkgnotify";}; +diff --git a/scripts/suse/dpkg/README.md b/scripts/suse/dpkg/README.md +new file mode 100644 +index 0000000000..b7a75c4786 +--- /dev/null ++++ b/scripts/suse/dpkg/README.md +@@ -0,0 +1,9 @@ ++## What it is ++ ++Debian base package to notify installation of new packages outside the control of salt. ++ ++## Installation ++This script depends on python package, so python3 should be installed on the machine ++ ++- The 99dpkgnotify file must be installed in /etc/apt/apt.conf.d/99dpkgnotify ++- The dpkgnotify file must be installed in /usr/bin/dpkgnotify +diff --git a/scripts/suse/dpkg/dpkgnotify b/scripts/suse/dpkg/dpkgnotify +new file mode 100644 +index 0000000000..d3ad3d2ba9 +--- /dev/null ++++ b/scripts/suse/dpkg/dpkgnotify +@@ -0,0 +1,44 @@ ++#!/usr/bin/python3 ++ ++import os ++import hashlib ++ ++CK_PATH = "/var/cache/salt/minion/dpkg.cookie" ++DPKG_PATH = "/var/lib/dpkg/status" ++ ++def _get_mtime(): ++ """ ++ Get the modified time of the Package Database. ++ Returns: ++ Unix ticks ++ """ ++ return os.path.exists(DPKG_PATH) and int(os.path.getmtime(DPKG_PATH)) or 0 ++ ++ ++def _get_checksum(): ++ """ ++ Get the checksum of the Package Database. ++ Returns: ++ hexdigest ++ """ ++ digest = hashlib.sha256() ++ with open(DPKG_PATH, "rb") as pkg_db_fh: ++ while True: ++ buff = pkg_db_fh.read(0x1000) ++ if not buff: ++ break ++ digest.update(buff) ++ return digest.hexdigest() ++ ++ ++def dpkg_post_invoke(): ++ """ ++ Hook after the package installation transaction. ++ """ ++ if 'SALT_RUNNING' not in os.environ: ++ with open(CK_PATH, 'w') as ck_fh: ++ ck_fh.write('{chksum} {mtime}\n'.format(chksum=_get_checksum(), mtime=_get_mtime())) ++ ++ ++if __name__ == "__main__": ++ dpkg_post_invoke() +-- +2.30.2 + + diff --git a/pass-the-context-to-pillar-ext-modules.patch b/pass-the-context-to-pillar-ext-modules.patch deleted file mode 100644 index 91d82da..0000000 --- a/pass-the-context-to-pillar-ext-modules.patch +++ /dev/null @@ -1,276 +0,0 @@ -From 8a584a8546667ab5390e9a2003a8ce3cb3add25c Mon Sep 17 00:00:00 2001 -From: Victor Zhestkov -Date: Fri, 28 Oct 2022 13:20:13 +0300 -Subject: [PATCH] Pass the context to pillar ext modules - -* Pass __context__ to ext pillar - -* Add test for passing the context to pillar ext module - -* Align the test and pillar to prevent failing test ---- - salt/master.py | 7 ++- - salt/pillar/__init__.py | 16 +++++- - tests/pytests/unit/test_master.py | 91 ++++++++++++++++++++++++++++++- - 3 files changed, 108 insertions(+), 6 deletions(-) - -diff --git a/salt/master.py b/salt/master.py -index 7f41ffe77b..0110082626 100644 ---- a/salt/master.py -+++ b/salt/master.py -@@ -951,6 +951,7 @@ class MWorker(salt.utils.process.SignalHandlingProcess): - self.k_mtime = 0 - self.stats = collections.defaultdict(lambda: {"mean": 0, "runs": 0}) - self.stat_clock = time.time() -+ self.context = {} - - # We need __setstate__ and __getstate__ to also pickle 'SMaster.secrets'. - # Otherwise, 'SMaster.secrets' won't be copied over to the spawned process -@@ -1138,7 +1139,7 @@ class MWorker(salt.utils.process.SignalHandlingProcess): - self.key, - ) - self.clear_funcs.connect() -- self.aes_funcs = AESFuncs(self.opts) -+ self.aes_funcs = AESFuncs(self.opts, context=self.context) - salt.utils.crypt.reinit_crypto() - self.__bind() - -@@ -1202,7 +1203,7 @@ class AESFuncs(TransportMethods): - "_ext_nodes", # To be removed in 3006 (Sulfur) #60980 - ) - -- def __init__(self, opts): -+ def __init__(self, opts, context=None): - """ - Create a new AESFuncs - -@@ -1212,6 +1213,7 @@ class AESFuncs(TransportMethods): - :returns: Instance for handling AES operations - """ - self.opts = opts -+ self.context = context - self.event = salt.utils.event.get_master_event( - self.opts, self.opts["sock_dir"], listen=False - ) -@@ -1603,6 +1605,7 @@ class AESFuncs(TransportMethods): - pillarenv=load.get("pillarenv"), - extra_minion_data=load.get("extra_minion_data"), - clean_cache=load.get("clean_cache"), -+ context=self.context, - ) - data = pillar.compile_pillar() - self.fs_.update_opts() -diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py -index 7be963566a..906bdfe55d 100644 ---- a/salt/pillar/__init__.py -+++ b/salt/pillar/__init__.py -@@ -46,6 +46,7 @@ def get_pillar( - pillarenv=None, - extra_minion_data=None, - clean_cache=False, -+ context=None, - ): - """ - Return the correct pillar driver based on the file_client option -@@ -81,6 +82,7 @@ def get_pillar( - pillar_override=pillar_override, - pillarenv=pillarenv, - clean_cache=clean_cache, -+ context=context, - ) - return ptype( - opts, -@@ -92,6 +94,7 @@ def get_pillar( - pillar_override=pillar_override, - pillarenv=pillarenv, - extra_minion_data=extra_minion_data, -+ context=context, - ) - - -@@ -280,7 +283,7 @@ class AsyncRemotePillar(RemotePillarMixin): - raise salt.ext.tornado.gen.Return(ret_pillar) - - def destroy(self): -- if self._closing: -+ if hasattr(self, "_closing") and self._closing: - return - - self._closing = True -@@ -309,6 +312,7 @@ class RemotePillar(RemotePillarMixin): - pillar_override=None, - pillarenv=None, - extra_minion_data=None, -+ context=None, - ): - self.opts = opts - self.opts["saltenv"] = saltenv -@@ -333,6 +337,7 @@ class RemotePillar(RemotePillarMixin): - merge_lists=True, - ) - self._closing = False -+ self.context = context - - def compile_pillar(self): - """ -@@ -406,6 +411,7 @@ class PillarCache: - pillarenv=None, - extra_minion_data=None, - clean_cache=False, -+ context=None, - ): - # Yes, we need all of these because we need to route to the Pillar object - # if we have no cache. This is another refactor target. -@@ -432,6 +438,8 @@ class PillarCache: - minion_cache_path=self._minion_cache_path(minion_id), - ) - -+ self.context = context -+ - def _minion_cache_path(self, minion_id): - """ - Return the path to the cache file for the minion. -@@ -455,6 +463,7 @@ class PillarCache: - functions=self.functions, - pillar_override=self.pillar_override, - pillarenv=self.pillarenv, -+ context=self.context, - ) - return fresh_pillar.compile_pillar() - -@@ -530,6 +539,7 @@ class Pillar: - pillar_override=None, - pillarenv=None, - extra_minion_data=None, -+ context=None, - ): - self.minion_id = minion_id - self.ext = ext -@@ -568,7 +578,7 @@ class Pillar: - if opts.get("pillar_source_merging_strategy"): - self.merge_strategy = opts["pillar_source_merging_strategy"] - -- self.ext_pillars = salt.loader.pillars(ext_pillar_opts, self.functions) -+ self.ext_pillars = salt.loader.pillars(ext_pillar_opts, self.functions, context=context) - self.ignored_pillars = {} - self.pillar_override = pillar_override or {} - if not isinstance(self.pillar_override, dict): -@@ -1335,7 +1345,7 @@ class Pillar: - """ - This method exist in order to be API compatible with RemotePillar - """ -- if self._closing: -+ if hasattr(self, "_closing") and self._closing: - return - self._closing = True - -diff --git a/tests/pytests/unit/test_master.py b/tests/pytests/unit/test_master.py -index a49ecfec3b..ca02c7788d 100644 ---- a/tests/pytests/unit/test_master.py -+++ b/tests/pytests/unit/test_master.py -@@ -1,7 +1,7 @@ - import time - - import salt.master --from tests.support.mock import patch -+from tests.support.mock import MagicMock, patch - - - def test_fileserver_duration(): -@@ -14,3 +14,92 @@ def test_fileserver_duration(): - update.called_once() - # Timeout is 1 second - assert 2 > end - start > 1 -+ -+ -+def test_mworker_pass_context(): -+ """ -+ Test of passing the __context__ to pillar ext module loader -+ """ -+ req_channel_mock = MagicMock() -+ local_client_mock = MagicMock() -+ -+ opts = { -+ "req_server_niceness": None, -+ "mworker_niceness": None, -+ "sock_dir": "/tmp", -+ "conf_file": "/tmp/fake_conf", -+ "transport": "zeromq", -+ "fileserver_backend": ["roots"], -+ "file_client": "local", -+ "pillar_cache": False, -+ "state_top": "top.sls", -+ "pillar_roots": {}, -+ } -+ -+ data = { -+ "id": "MINION_ID", -+ "grains": {}, -+ "saltenv": None, -+ "pillarenv": None, -+ "pillar_override": {}, -+ "extra_minion_data": {}, -+ "ver": "2", -+ "cmd": "_pillar", -+ } -+ -+ test_context = {"testing": 123} -+ -+ def mworker_bind_mock(): -+ mworker.aes_funcs.run_func(data["cmd"], data) -+ -+ with patch("salt.client.get_local_client", local_client_mock), patch( -+ "salt.master.ClearFuncs", MagicMock() -+ ), patch("salt.minion.MasterMinion", MagicMock()), patch( -+ "salt.utils.verify.valid_id", return_value=True -+ ), patch( -+ "salt.loader.matchers", MagicMock() -+ ), patch( -+ "salt.loader.render", MagicMock() -+ ), patch( -+ "salt.loader.utils", MagicMock() -+ ), patch( -+ "salt.loader.fileserver", MagicMock() -+ ), patch( -+ "salt.loader.minion_mods", MagicMock() -+ ), patch( -+ "salt.loader._module_dirs", MagicMock() -+ ), patch( -+ "salt.loader.LazyLoader", MagicMock() -+ ) as loadler_pillars_mock: -+ mworker = salt.master.MWorker(opts, {}, {}, [req_channel_mock]) -+ -+ with patch.object(mworker, "_MWorker__bind", mworker_bind_mock), patch.dict( -+ mworker.context, test_context -+ ): -+ mworker.run() -+ assert ( -+ loadler_pillars_mock.call_args_list[0][1].get("pack").get("__context__") -+ == test_context -+ ) -+ -+ loadler_pillars_mock.reset_mock() -+ -+ opts.update( -+ { -+ "pillar_cache": True, -+ "pillar_cache_backend": "file", -+ "pillar_cache_ttl": 1000, -+ "cachedir": "/tmp", -+ } -+ ) -+ -+ mworker = salt.master.MWorker(opts, {}, {}, [req_channel_mock]) -+ -+ with patch.object(mworker, "_MWorker__bind", mworker_bind_mock), patch.dict( -+ mworker.context, test_context -+ ), patch("salt.utils.cache.CacheFactory.factory", MagicMock()): -+ mworker.run() -+ assert ( -+ loadler_pillars_mock.call_args_list[0][1].get("pack").get("__context__") -+ == test_context -+ ) --- -2.37.3 - - diff --git a/prevent-affection-of-ssh.opts-with-lazyloader-bsc-11.patch b/prevent-affection-of-ssh.opts-with-lazyloader-bsc-11.patch index 7197ff9..48cb0cf 100644 --- a/prevent-affection-of-ssh.opts-with-lazyloader-bsc-11.patch +++ b/prevent-affection-of-ssh.opts-with-lazyloader-bsc-11.patch @@ -1,4 +1,4 @@ -From ad3735581379e5d4bbc7baef3eaa4a1b8387ccbf Mon Sep 17 00:00:00 2001 +From c86432645863a21da589346ad587b610ab51a2a9 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Tue, 12 Apr 2022 10:06:43 +0300 Subject: [PATCH] Prevent affection of SSH.opts with LazyLoader @@ -11,14 +11,14 @@ Subject: [PATCH] Prevent affection of SSH.opts with LazyLoader * Fix test_ssh unit tests --- salt/client/ssh/__init__.py | 19 +++++++++++-------- - tests/unit/client/test_ssh.py | 18 +++++++++--------- - 2 files changed, 20 insertions(+), 17 deletions(-) + tests/unit/client/test_ssh.py | 16 ++++++++-------- + 2 files changed, 19 insertions(+), 16 deletions(-) diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py -index 8ae417f575..fe1213b723 100644 +index bc77eb700e..6d24d8d716 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py -@@ -224,15 +224,16 @@ class SSH(MultiprocessingStateMixin): +@@ -225,15 +225,16 @@ class SSH: ROSTER_UPDATE_FLAG = "#__needs_update" def __init__(self, opts, context=None): @@ -29,8 +29,8 @@ index 8ae417f575..fe1213b723 100644 + pull_sock = os.path.join(self.opts["sock_dir"], "master_event_pull.ipc") if os.path.exists(pull_sock) and zmq: self.event = salt.utils.event.get_event( -- "master", opts["sock_dir"], opts=opts, listen=False -+ "master", self.opts["sock_dir"], opts=self.opts, listen=False +- "master", opts["sock_dir"], opts["transport"], opts=opts, listen=False ++ "master", self.opts["sock_dir"], self.opts["transport"], opts=self.opts, listen=False ) else: self.event = None @@ -38,7 +38,7 @@ index 8ae417f575..fe1213b723 100644 if self.opts["regen_thin"]: self.opts["ssh_wipe"] = True if not salt.utils.path.which("ssh"): -@@ -243,7 +244,7 @@ class SSH(MultiprocessingStateMixin): +@@ -244,7 +245,7 @@ class SSH: " to run. Exiting." ), ) @@ -47,7 +47,7 @@ index 8ae417f575..fe1213b723 100644 self.tgt_type = ( self.opts["selected_target_option"] if self.opts["selected_target_option"] -@@ -339,6 +340,9 @@ class SSH(MultiprocessingStateMixin): +@@ -341,6 +342,9 @@ class SSH: self.opts["cachedir"], "salt-ssh.session.lock" ) self.ssh_session_grace_time = int(self.opts.get("ssh_session_grace_time", 1)) @@ -55,9 +55,9 @@ index 8ae417f575..fe1213b723 100644 + self.sopts["ssh_cli_tgt"] = copy.deepcopy(self.opts["ssh_cli_tgt"]) + self.opts = self.sopts - # __setstate__ and __getstate__ are only used on spawning platforms. - def __setstate__(self, state): -@@ -607,7 +611,6 @@ class SSH(MultiprocessingStateMixin): + @property + def parse_tgt(self): +@@ -594,7 +598,6 @@ class SSH: Spin up the needed threads or processes and execute the subsequent routines """ @@ -65,7 +65,7 @@ index 8ae417f575..fe1213b723 100644 que = multiprocessing.Queue() running = {} targets_queue = deque(self.targets.keys()) -@@ -618,7 +621,7 @@ class SSH(MultiprocessingStateMixin): +@@ -605,7 +608,7 @@ class SSH: if not self.targets: log.error("No matching targets found in roster.") break @@ -74,7 +74,7 @@ index 8ae417f575..fe1213b723 100644 if targets_queue: host = targets_queue.popleft() else: -@@ -682,7 +685,7 @@ class SSH(MultiprocessingStateMixin): +@@ -669,7 +672,7 @@ class SSH: continue args = ( que, @@ -83,7 +83,7 @@ index 8ae417f575..fe1213b723 100644 host, self.targets[host], mine, -@@ -776,7 +779,7 @@ class SSH(MultiprocessingStateMixin): +@@ -763,7 +766,7 @@ class SSH: if len(rets) >= len(self.targets): break # Sleep when limit or all threads started @@ -93,7 +93,7 @@ index 8ae417f575..fe1213b723 100644 ) >= len(running): time.sleep(0.1) diff --git a/tests/unit/client/test_ssh.py b/tests/unit/client/test_ssh.py -index 00313ed55f..92a9314149 100644 +index 23cb3d0700..5003500de1 100644 --- a/tests/unit/client/test_ssh.py +++ b/tests/unit/client/test_ssh.py @@ -95,7 +95,7 @@ class SSHReturnEventTests(ShellCase): @@ -110,20 +110,11 @@ index 00313ed55f..92a9314149 100644 ): client._expand_target() - assert opts["tgt"] == host -+ assert client.opts["tgt"] == host - - def test_expand_target_no_host(self): - """ -@@ -564,7 +564,7 @@ class SSHTests(ShellCase): - assert opts["tgt"] == user + host - with patch("salt.roster.get_roster_file", MagicMock(return_value=roster_file)): - client._expand_target() -- assert opts["tgt"] == host + assert client.opts["tgt"] == host def test_expand_target_dns(self): """ -@@ -587,7 +587,7 @@ class SSHTests(ShellCase): +@@ -562,7 +562,7 @@ class SSHTests(ShellCase): MagicMock(return_value=salt.utils.yaml.safe_load(self.roster)), ): client._expand_target() @@ -132,7 +123,7 @@ index 00313ed55f..92a9314149 100644 def test_expand_target_no_user(self): """ -@@ -627,7 +627,7 @@ class SSHTests(ShellCase): +@@ -602,7 +602,7 @@ class SSHTests(ShellCase): client = ssh.SSH(opts) assert opts["tgt"] == user + host client._update_targets() @@ -141,7 +132,7 @@ index 00313ed55f..92a9314149 100644 assert client.targets[host]["user"] == user.split("@")[0] def test_update_targets_dns(self): -@@ -645,7 +645,7 @@ class SSHTests(ShellCase): +@@ -620,7 +620,7 @@ class SSHTests(ShellCase): client = ssh.SSH(opts) assert opts["tgt"] == user + host client._update_targets() @@ -150,7 +141,7 @@ index 00313ed55f..92a9314149 100644 assert client.targets[host]["user"] == user.split("@")[0] def test_update_targets_no_user(self): -@@ -686,7 +686,7 @@ class SSHTests(ShellCase): +@@ -661,7 +661,7 @@ class SSHTests(ShellCase): ): client._expand_target() client._update_targets() @@ -159,7 +150,7 @@ index 00313ed55f..92a9314149 100644 assert client.targets[host]["user"] == user.split("@")[0] def test_parse_tgt(self): -@@ -706,7 +706,7 @@ class SSHTests(ShellCase): +@@ -681,7 +681,7 @@ class SSHTests(ShellCase): client = ssh.SSH(opts) assert client.parse_tgt["hostname"] == host assert client.parse_tgt["user"] == user.split("@")[0] @@ -168,7 +159,7 @@ index 00313ed55f..92a9314149 100644 def test_parse_tgt_no_user(self): """ -@@ -725,7 +725,7 @@ class SSHTests(ShellCase): +@@ -700,7 +700,7 @@ class SSHTests(ShellCase): client = ssh.SSH(opts) assert client.parse_tgt["hostname"] == host assert client.parse_tgt["user"] == opts["ssh_user"] @@ -178,6 +169,6 @@ index 00313ed55f..92a9314149 100644 def test_extra_filerefs(self): """ -- -2.37.3 +2.35.1 diff --git a/prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch b/prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch index 4ac11f9..b3a3efe 100644 --- a/prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch +++ b/prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch @@ -1,4 +1,4 @@ -From 6c1878310bf75be467b5ce15e8c89134a6f770cb Mon Sep 17 00:00:00 2001 +From 27db7d49c4b3348d5dcfe229f0d5823c0e770179 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> Date: Mon, 8 Nov 2021 17:42:36 +0300 Subject: [PATCH] Prevent pkg plugins errors on missing cookie path @@ -12,11 +12,52 @@ Subject: [PATCH] Prevent pkg plugins errors on missing cookie path * Fix yumnotify --- + scripts/suse/dpkg/dpkgnotify | 18 +++++++++++++++--- scripts/suse/yum/plugins/README.md | 2 +- scripts/suse/yum/plugins/yumnotify.py | 17 +++++++++++++---- scripts/suse/zypper/plugins/commit/zyppnotify | 18 ++++++++++++------ - 3 files changed, 26 insertions(+), 11 deletions(-) + 4 files changed, 41 insertions(+), 14 deletions(-) +diff --git a/scripts/suse/dpkg/dpkgnotify b/scripts/suse/dpkg/dpkgnotify +index d3ad3d2ba9..3d6d038a98 100644 +--- a/scripts/suse/dpkg/dpkgnotify ++++ b/scripts/suse/dpkg/dpkgnotify +@@ -2,10 +2,12 @@ + + import os + import hashlib ++import sys + + CK_PATH = "/var/cache/salt/minion/dpkg.cookie" + DPKG_PATH = "/var/lib/dpkg/status" + ++ + def _get_mtime(): + """ + Get the modified time of the Package Database. +@@ -35,9 +37,19 @@ def dpkg_post_invoke(): + """ + Hook after the package installation transaction. + """ +- if 'SALT_RUNNING' not in os.environ: +- with open(CK_PATH, 'w') as ck_fh: +- ck_fh.write('{chksum} {mtime}\n'.format(chksum=_get_checksum(), mtime=_get_mtime())) ++ if "SALT_RUNNING" not in os.environ: ++ try: ++ ck_dir = os.path.dirname(CK_PATH) ++ if not os.path.exists(ck_dir): ++ os.makedirs(ck_dir) ++ with open(CK_PATH, "w") as ck_fh: ++ ck_fh.write( ++ "{chksum} {mtime}\n".format( ++ chksum=_get_checksum(), mtime=_get_mtime() ++ ) ++ ) ++ except OSError as e: ++ print("Unable to save the cookie file: %s" % (e), file=sys.stderr) + + + if __name__ == "__main__": diff --git a/scripts/suse/yum/plugins/README.md b/scripts/suse/yum/plugins/README.md index cb3abd2260..3515845b31 100644 --- a/scripts/suse/yum/plugins/README.md @@ -97,6 +138,6 @@ index bacbc8b97e..e3528e87a9 100755 self.ack() -- -2.37.3 +2.34.1 diff --git a/prevent-shell-injection-via-pre_flight_script_args-4.patch b/prevent-shell-injection-via-pre_flight_script_args-4.patch index 76bf003..1eb3558 100644 --- a/prevent-shell-injection-via-pre_flight_script_args-4.patch +++ b/prevent-shell-injection-via-pre_flight_script_args-4.patch @@ -1,4 +1,4 @@ -From 57f2400bfce206e16e7f282cd3b93cd4d7e99dd7 Mon Sep 17 00:00:00 2001 +From 24093156ace91a8766eb1f5acbc47eee8e634d8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Mon, 28 Feb 2022 14:25:43 +0000 @@ -11,14 +11,17 @@ Readjust logic to validate script args Use RLock to prevent issues in single threads --- - salt/_logging/impl.py | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) + salt/_logging/impl.py | 2 +- + salt/client/ssh/__init__.py | 9 ++-- + tests/integration/ssh/test_pre_flight.py | 56 ++++++++++++++++++++++-- + tests/unit/client/test_ssh.py | 35 +++++++++++++++ + 4 files changed, 92 insertions(+), 10 deletions(-) diff --git a/salt/_logging/impl.py b/salt/_logging/impl.py -index e050f43caf..2d1a276cb8 100644 +index 953490b284..4f48672032 100644 --- a/salt/_logging/impl.py +++ b/salt/_logging/impl.py -@@ -107,7 +107,7 @@ DFLT_LOG_FMT_LOGFILE = "%(asctime)s,%(msecs)03d [%(name)-17s:%(lineno)-4d][%(lev +@@ -92,7 +92,7 @@ MODNAME_PATTERN = re.compile(r"(?P%%\(name\)(?:\-(?P[\d]+))?s)") # LOG_LOCK is used to prevent deadlocks on using logging # in combination with multiprocessing with salt-api @@ -26,8 +29,163 @@ index e050f43caf..2d1a276cb8 100644 +LOG_LOCK = threading.RLock() - class SaltLogRecord(logging.LogRecord): + # ----- REMOVE ME ON REFACTOR COMPLETE ------------------------------------------------------------------------------> +diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py +index 0066f4597b..3e032c7197 100644 +--- a/salt/client/ssh/__init__.py ++++ b/salt/client/ssh/__init__.py +@@ -15,6 +15,7 @@ import os + import psutil + import queue + import re ++import shlex + import subprocess + import sys + import tarfile +@@ -1458,11 +1459,9 @@ ARGS = {arguments}\n'''.format( + """ + args = "" + if script_args: +- args = " {}".format( +- " ".join([str(el) for el in script_args]) +- if isinstance(script_args, (list, tuple)) +- else script_args +- ) ++ if not isinstance(script_args, (list, tuple)): ++ script_args = shlex.split(str(script_args)) ++ args = " {}".format(" ".join([shlex.quote(str(el)) for el in script_args])) + if extension == "ps1": + ret = self.shell.exec_cmd('"powershell {}"'.format(script)) + else: +diff --git a/tests/integration/ssh/test_pre_flight.py b/tests/integration/ssh/test_pre_flight.py +index 6233ec0fe7..9c39219e9d 100644 +--- a/tests/integration/ssh/test_pre_flight.py ++++ b/tests/integration/ssh/test_pre_flight.py +@@ -25,10 +25,14 @@ class SSHPreFlightTest(SSHCase): + RUNTIME_VARS.TMP, "test-pre-flight-script-worked.txt" + ) + +- def _create_roster(self): +- self.custom_roster(self.roster, self.data) ++ def _create_roster(self, pre_flight_script_args=None): ++ data = dict(self.data) ++ if pre_flight_script_args: ++ data["ssh_pre_flight_args"] = pre_flight_script_args + +- with salt.utils.files.fopen(self.data["ssh_pre_flight"], "w") as fp_: ++ self.custom_roster(self.roster, data) ++ ++ with salt.utils.files.fopen(data["ssh_pre_flight"], "w") as fp_: + fp_.write("touch {}".format(self.test_script)) + + @pytest.mark.slow_test +@@ -58,6 +62,45 @@ class SSHPreFlightTest(SSHCase): + ) + assert os.path.exists(self.test_script) + ++ @pytest.mark.slow_test ++ def test_ssh_run_pre_flight_args(self): ++ """ ++ test ssh when --pre-flight is passed to salt-ssh ++ to ensure the script runs successfully passing some args ++ """ ++ self._create_roster(pre_flight_script_args="foobar test") ++ # make sure we previously ran a command so the thin dir exists ++ self.run_function("test.ping", wipe=False) ++ assert not os.path.exists(self.test_script) ++ ++ assert self.run_function( ++ "test.ping", ssh_opts="--pre-flight", roster_file=self.roster, wipe=False ++ ) ++ assert os.path.exists(self.test_script) ++ ++ @pytest.mark.slow_test ++ def test_ssh_run_pre_flight_args_prevent_injection(self): ++ """ ++ test ssh when --pre-flight is passed to salt-ssh ++ and evil arguments are used in order to produce shell injection ++ """ ++ injected_file = os.path.join(RUNTIME_VARS.TMP, "injection") ++ self._create_roster( ++ pre_flight_script_args="foobar; echo injected > {}".format(injected_file) ++ ) ++ # make sure we previously ran a command so the thin dir exists ++ self.run_function("test.ping", wipe=False) ++ assert not os.path.exists(self.test_script) ++ assert not os.path.isfile(injected_file) ++ ++ assert self.run_function( ++ "test.ping", ssh_opts="--pre-flight", roster_file=self.roster, wipe=False ++ ) ++ ++ assert not os.path.isfile( ++ injected_file ++ ), "File injection suceeded. This shouldn't happend" ++ + @pytest.mark.slow_test + def test_ssh_run_pre_flight_failure(self): + """ +@@ -77,7 +120,12 @@ class SSHPreFlightTest(SSHCase): + """ + make sure to clean up any old ssh directories + """ +- files = [self.roster, self.data["ssh_pre_flight"], self.test_script] ++ files = [ ++ self.roster, ++ self.data["ssh_pre_flight"], ++ self.test_script, ++ os.path.join(RUNTIME_VARS.TMP, "injection"), ++ ] + for fp_ in files: + if os.path.exists(fp_): + os.remove(fp_) +diff --git a/tests/unit/client/test_ssh.py b/tests/unit/client/test_ssh.py +index 6f3d87d493..23cb3d0700 100644 +--- a/tests/unit/client/test_ssh.py ++++ b/tests/unit/client/test_ssh.py +@@ -234,6 +234,41 @@ class SSHSingleTests(TestCase): + mock_flight.assert_called() + assert ret == cmd_ret + ++ def test_run_with_pre_flight_with_args(self): ++ """ ++ test Single.run() when ssh_pre_flight is set ++ and script successfully runs ++ """ ++ target = self.target.copy() ++ target["ssh_pre_flight"] = os.path.join(RUNTIME_VARS.TMP, "script.sh") ++ target["ssh_pre_flight_args"] = "foobar" ++ single = ssh.Single( ++ self.opts, ++ self.opts["argv"], ++ "localhost", ++ mods={}, ++ fsclient=None, ++ thin=salt.utils.thin.thin_path(self.opts["cachedir"]), ++ mine=False, ++ **target ++ ) ++ ++ cmd_ret = ("Success", "foobar", 0) ++ mock_flight = MagicMock(return_value=cmd_ret) ++ mock_cmd = MagicMock(return_value=cmd_ret) ++ patch_flight = patch("salt.client.ssh.Single.run_ssh_pre_flight", mock_flight) ++ patch_cmd = patch("salt.client.ssh.Single.cmd_block", mock_cmd) ++ patch_exec_cmd = patch( ++ "salt.client.ssh.shell.Shell.exec_cmd", return_value=("", "", 1) ++ ) ++ patch_os = patch("os.path.exists", side_effect=[True]) ++ ++ with patch_os, patch_flight, patch_cmd, patch_exec_cmd: ++ ret = single.run() ++ mock_cmd.assert_called() ++ mock_flight.assert_called() ++ assert ret == cmd_ret ++ + def test_run_with_pre_flight_stderr(self): + """ + test Single.run() when ssh_pre_flight is set -- -2.37.3 +2.35.1 diff --git a/read-repo-info-without-using-interpolation-bsc-11356.patch b/read-repo-info-without-using-interpolation-bsc-11356.patch index 4ec0781..8bcd86e 100644 --- a/read-repo-info-without-using-interpolation-bsc-11356.patch +++ b/read-repo-info-without-using-interpolation-bsc-11356.patch @@ -1,4 +1,4 @@ -From c27ac9afb6bd13fc26fc440e0a097bbb82cbb640 Mon Sep 17 00:00:00 2001 +From c3a058842344dacd01b0a0c55483c22b35f449e8 Mon Sep 17 00:00:00 2001 From: Mihai Dinca Date: Thu, 7 Nov 2019 15:11:49 +0100 Subject: [PATCH] Read repo info without using interpolation @@ -9,10 +9,10 @@ Subject: [PATCH] Read repo info without using interpolation 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py -index dd836b7ad0..32e22ce9a8 100644 +index b5621174a4..c3342ab6d1 100644 --- a/salt/modules/zypperpkg.py +++ b/salt/modules/zypperpkg.py -@@ -1121,7 +1121,9 @@ def _get_repo_info(alias, repos_cfg=None, root=None): +@@ -1111,7 +1111,9 @@ def _get_repo_info(alias, repos_cfg=None, root=None): Get one repo meta-data. """ try: @@ -24,6 +24,6 @@ index dd836b7ad0..32e22ce9a8 100644 for key, val in meta.items(): if val in ["0", "1"]: -- -2.37.3 +2.29.2 diff --git a/refactor-and-improvements-for-transactional-updates-.patch b/refactor-and-improvements-for-transactional-updates-.patch new file mode 100644 index 0000000..35964da --- /dev/null +++ b/refactor-and-improvements-for-transactional-updates-.patch @@ -0,0 +1,1063 @@ +From e28a67ec69b4781fc9c667a9cdec66192e4cca45 Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Wed, 19 Jan 2022 17:45:01 +0100 +Subject: [PATCH] Refactor and improvements for "transactional-updates" + module + +Add --no-return-event option to salt-call + +Act on concurrent flag when running highstate + +Simplify transactional_update module to not use SSH wrapper + +Fix tests for transactional update + +Add changelog + +Fix pylint issues + +Fix failing unit test for state.highstate after refactor + +Remove hack about tukit issue that has been already fixed +--- + changelog/61188.fixed | 3 + + salt/cli/caller.py | 2 +- + salt/modules/state.py | 12 +- + salt/modules/transactional_update.py | 235 ++--------- + salt/utils/parsers.py | 6 + + .../pytests/unit/modules/state/test_state.py | 2 +- + .../unit/modules/test_transactional_update.py | 389 ++---------------- + 7 files changed, 81 insertions(+), 568 deletions(-) + create mode 100644 changelog/61188.fixed + +diff --git a/changelog/61188.fixed b/changelog/61188.fixed +new file mode 100644 +index 0000000000..102a8982a6 +--- /dev/null ++++ b/changelog/61188.fixed +@@ -0,0 +1,3 @@ ++Add "--no-return-event" option to salt-call to prevent sending return event back to master. ++Make "state.highstate" to acts on concurrent flag. ++Simplify "transactional_update" module to not use SSH wrapper and allow more flexible execution +diff --git a/salt/cli/caller.py b/salt/cli/caller.py +index 795d32e4c9..10eb5e397a 100644 +--- a/salt/cli/caller.py ++++ b/salt/cli/caller.py +@@ -293,7 +293,7 @@ class BaseCaller: + pass + + # return the job infos back up to the respective minion's master +- if not is_local: ++ if not is_local and not self.opts.get("no_return_event", False): + try: + mret = ret.copy() + mret["jid"] = "req" +diff --git a/salt/modules/state.py b/salt/modules/state.py +index c78072131b..0c3dfc3317 100644 +--- a/salt/modules/state.py ++++ b/salt/modules/state.py +@@ -1053,9 +1053,15 @@ def highstate(test=None, queue=False, **kwargs): + } + return ret + +- conflict = _check_queue(queue, kwargs) +- if conflict is not None: +- return conflict ++ concurrent = kwargs.get("concurrent", False) ++ ++ if queue: ++ _wait(kwargs.get("__pub_jid")) ++ else: ++ conflict = running(concurrent) ++ if conflict: ++ __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR ++ return conflict + + orig_test = __opts__.get("test", None) + opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) +diff --git a/salt/modules/transactional_update.py b/salt/modules/transactional_update.py +index 799fe08e4d..28b02f8fec 100644 +--- a/salt/modules/transactional_update.py ++++ b/salt/modules/transactional_update.py +@@ -275,11 +275,7 @@ transaction. + + """ + +-import copy + import logging +-import os +-import sys +-import tempfile + + # required by _check_queue invocation later + import time # pylint: disable=unused-import +@@ -312,11 +308,6 @@ def __virtual__(): + return (False, "Module transactional_update requires a transactional system") + + +-class TransactionalUpdateHighstate(salt.client.ssh.state.SSHHighState): +- def _master_tops(self): +- return self.client.master_tops() +- +- + def _global_params(self_update, snapshot=None, quiet=False): + """Utility function to prepare common global parameters.""" + params = ["--non-interactive", "--drop-if-no-change"] +@@ -950,65 +941,42 @@ def call(function, *args, **kwargs): + + activate_transaction = kwargs.pop("activate_transaction", False) + +- # Generate the salt-thin and create a temporary directory in a +- # place that the new transaction will have access to, and where we +- # can untar salt-thin +- thin_path = __utils__["thin.gen_thin"]( +- __opts__["cachedir"], +- extra_mods=__salt__["config.option"]("thin_extra_mods", ""), +- so_mods=__salt__["config.option"]("thin_so_mods", ""), +- ) +- thin_dest_path = tempfile.mkdtemp(dir=__opts__["cachedir"]) +- # Some bug in Salt is preventing us to use `archive.tar` here. A +- # AsyncZeroMQReqChannel is not closed at the end of the salt-call, +- # and makes the client never exit. +- # +- # stdout = __salt__['archive.tar']('xzf', thin_path, dest=thin_dest_path) +- # +- stdout = __salt__["cmd.run"](["tar", "xzf", thin_path, "-C", thin_dest_path]) +- if stdout: +- __utils__["files.rm_rf"](thin_dest_path) +- return {"result": False, "comment": stdout} +- + try: + safe_kwargs = salt.utils.args.clean_kwargs(**kwargs) + salt_argv = ( + [ +- "python{}".format(sys.version_info[0]), +- os.path.join(thin_dest_path, "salt-call"), +- "--metadata", +- "--local", +- "--log-file", +- os.path.join(thin_dest_path, "log"), +- "--cachedir", +- os.path.join(thin_dest_path, "cache"), ++ "salt-call", + "--out", + "json", + "-l", + "quiet", ++ "--no-return-event", + "--", + function, + ] + + list(args) + + ["{}={}".format(k, v) for (k, v) in safe_kwargs.items()] + ) ++ + try: + ret_stdout = run([str(x) for x in salt_argv], snapshot="continue") + except salt.exceptions.CommandExecutionError as e: ++ # This happens when there was an problem with salt-call execution + ret_stdout = e.message + + # Process "real" result in stdout + try: + data = __utils__["json.find_json"](ret_stdout) + local = data.get("local", data) +- if isinstance(local, dict) and "retcode" in local: +- __context__["retcode"] = local["retcode"] +- return local.get("return", data) ++ if isinstance(local, dict): ++ if "retcode" in local: ++ __context__["retcode"] = local["retcode"] ++ return local.get("return", local) ++ else: ++ return local + except ValueError: + return {"result": False, "retcode": 1, "comment": ret_stdout} + finally: +- __utils__["files.rm_rf"](thin_dest_path) +- + # Check if reboot is needed + if activate_transaction and pending_transaction(): + reboot() +@@ -1044,49 +1012,7 @@ def apply_(mods=None, **kwargs): + return highstate(**kwargs) + + +-def _create_and_execute_salt_state( +- chunks, file_refs, test, hash_type, activate_transaction +-): +- """Create the salt_state tarball, and execute it in a transaction""" +- +- # Create the tar containing the state pkg and relevant files. +- salt.client.ssh.wrapper.state._cleanup_slsmod_low_data(chunks) +- trans_tar = salt.client.ssh.state.prep_trans_tar( +- salt.fileclient.get_file_client(__opts__), chunks, file_refs, __pillar__.value() +- ) +- trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, hash_type) +- +- ret = None +- +- # Create a temporary directory accesible later by the transaction +- # where we can move the salt_state.tgz +- salt_state_path = tempfile.mkdtemp(dir=__opts__["cachedir"]) +- salt_state_path = os.path.join(salt_state_path, "salt_state.tgz") +- try: +- salt.utils.files.copyfile(trans_tar, salt_state_path) +- ret = call( +- "state.pkg", +- salt_state_path, +- test=test, +- pkg_sum=trans_tar_sum, +- hash_type=hash_type, +- activate_transaction=activate_transaction, +- ) +- finally: +- __utils__["files.rm_rf"](salt_state_path) +- +- return ret +- +- +-def sls( +- mods, +- saltenv="base", +- test=None, +- exclude=None, +- activate_transaction=False, +- queue=False, +- **kwargs +-): ++def sls(mods, activate_transaction=False, queue=False, **kwargs): + """Execute the states in one or more SLS files inside a transaction. + + saltenv +@@ -1132,55 +1058,14 @@ def sls( + if conflict is not None: + return conflict + +- # Get a copy of the pillar data, to avoid overwriting the current +- # pillar, instead the one delegated +- pillar = copy.deepcopy(__pillar__.value()) +- pillar.update(kwargs.get("pillar", {})) +- +- # Clone the options data and apply some default values. May not be +- # needed, as this module just delegate +- opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) +- st_ = TransactionalUpdateHighstate( +- opts, pillar, __salt__, salt.fileclient.get_file_client(__opts__) +- ) +- +- if isinstance(mods, str): +- mods = mods.split(",") +- +- high_data, errors = st_.render_highstate({saltenv: mods}) +- if exclude: +- if isinstance(exclude, str): +- exclude = exclude.split(",") +- if "__exclude__" in high_data: +- high_data["__exclude__"].extend(exclude) +- else: +- high_data["__exclude__"] = exclude +- +- high_data, ext_errors = st_.state.reconcile_extend(high_data) +- errors += ext_errors +- errors += st_.state.verify_high(high_data) +- if errors: +- return errors +- +- high_data, req_in_errors = st_.state.requisite_in(high_data) +- errors += req_in_errors +- if errors: +- return errors +- +- high_data = st_.state.apply_exclude(high_data) +- +- # Compile and verify the raw chunks +- chunks = st_.state.compile_high_data(high_data) +- file_refs = salt.client.ssh.state.lowstate_file_refs( +- chunks, +- salt.client.ssh.wrapper.state._merge_extra_filerefs( +- kwargs.get("extra_filerefs", ""), opts.get("extra_filerefs", "") +- ), +- ) ++ concurrent = kwargs.pop("concurrent", True) + +- hash_type = opts["hash_type"] +- return _create_and_execute_salt_state( +- chunks, file_refs, test, hash_type, activate_transaction ++ return call( ++ "state.sls", ++ mods, ++ activate_transaction=activate_transaction, ++ concurrent=concurrent, ++ **kwargs + ) + + +@@ -1216,40 +1101,15 @@ def highstate(activate_transaction=False, queue=False, **kwargs): + if conflict is not None: + return conflict + +- # Get a copy of the pillar data, to avoid overwriting the current +- # pillar, instead the one delegated +- pillar = copy.deepcopy(__pillar__.value()) +- pillar.update(kwargs.get("pillar", {})) +- +- # Clone the options data and apply some default values. May not be +- # needed, as this module just delegate +- opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) +- st_ = TransactionalUpdateHighstate( +- opts, pillar, __salt__, salt.fileclient.get_file_client(__opts__) +- ) +- +- # Compile and verify the raw chunks +- chunks = st_.compile_low_chunks() +- file_refs = salt.client.ssh.state.lowstate_file_refs( +- chunks, +- salt.client.ssh.wrapper.state._merge_extra_filerefs( +- kwargs.get("extra_filerefs", ""), opts.get("extra_filerefs", "") +- ), +- ) +- # Check for errors +- for chunk in chunks: +- if not isinstance(chunk, dict): +- __context__["retcode"] = 1 +- return chunks +- +- test = kwargs.pop("test", False) +- hash_type = opts["hash_type"] +- return _create_and_execute_salt_state( +- chunks, file_refs, test, hash_type, activate_transaction ++ return call( ++ "state.highstate", ++ activate_transaction=activate_transaction, ++ concurrent=True, ++ **kwargs + ) + + +-def single(fun, name, test=None, activate_transaction=False, queue=False, **kwargs): ++def single(fun, name, activate_transaction=False, queue=False, **kwargs): + """Execute a single state function with the named kwargs, returns + False if insufficient data is sent to the command + +@@ -1282,44 +1142,11 @@ def single(fun, name, test=None, activate_transaction=False, queue=False, **kwar + if conflict is not None: + return conflict + +- # Get a copy of the pillar data, to avoid overwriting the current +- # pillar, instead the one delegated +- pillar = copy.deepcopy(__pillar__.value()) +- pillar.update(kwargs.get("pillar", {})) +- +- # Clone the options data and apply some default values. May not be +- # needed, as this module just delegate +- opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) +- st_ = salt.client.ssh.state.SSHState(opts, pillar) +- +- # state.fun -> [state, fun] +- comps = fun.split(".") +- if len(comps) < 2: +- __context__["retcode"] = 1 +- return "Invalid function passed" +- +- # Create the low chunk, using kwargs as a base +- kwargs.update({"state": comps[0], "fun": comps[1], "__id__": name, "name": name}) +- +- # Verify the low chunk +- err = st_.verify_data(kwargs) +- if err: +- __context__["retcode"] = 1 +- return err +- +- # Must be a list of low-chunks +- chunks = [kwargs] +- +- # Retrieve file refs for the state run, so we can copy relevant +- # files down to the minion before executing the state +- file_refs = salt.client.ssh.state.lowstate_file_refs( +- chunks, +- salt.client.ssh.wrapper.state._merge_extra_filerefs( +- kwargs.get("extra_filerefs", ""), opts.get("extra_filerefs", "") +- ), +- ) +- +- hash_type = opts["hash_type"] +- return _create_and_execute_salt_state( +- chunks, file_refs, test, hash_type, activate_transaction ++ return call( ++ "state.single", ++ fun=fun, ++ name=name, ++ activate_transaction=activate_transaction, ++ concurrent=True, ++ **kwargs + ) +diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py +index c0820e5df0..5ff3c964be 100644 +--- a/salt/utils/parsers.py ++++ b/salt/utils/parsers.py +@@ -3108,6 +3108,12 @@ class SaltCallOptionParser( + action="store_true", + help="Force a refresh of the grains cache.", + ) ++ self.add_option( ++ "--no-return-event", ++ default=False, ++ action="store_true", ++ help=("Do not produce the return event back to master."), ++ ) + self.add_option( + "-t", + "--timeout", +diff --git a/tests/pytests/unit/modules/state/test_state.py b/tests/pytests/unit/modules/state/test_state.py +index 3fa663edeb..02fd2dd307 100644 +--- a/tests/pytests/unit/modules/state/test_state.py ++++ b/tests/pytests/unit/modules/state/test_state.py +@@ -777,7 +777,7 @@ def test_highstate(): + } + + mock = MagicMock(side_effect=["A", None, None]) +- with patch.object(state, "_check_queue", mock): ++ with patch.object(state, "running", mock): + assert state.highstate("whitelist=sls1.sls") == "A" + + with patch.dict(state.__opts__, {"test": "A"}): +diff --git a/tests/pytests/unit/modules/test_transactional_update.py b/tests/pytests/unit/modules/test_transactional_update.py +index 032ca0c9e8..40dab0e2f6 100644 +--- a/tests/pytests/unit/modules/test_transactional_update.py ++++ b/tests/pytests/unit/modules/test_transactional_update.py +@@ -1,5 +1,3 @@ +-import sys +- + import pytest + import salt.loader.context + import salt.modules.state as statemod +@@ -353,114 +351,23 @@ def test_call_fails_input_validation(): + tu.call("") + + +-@patch("tempfile.mkdtemp", MagicMock(return_value="/var/cache/salt/minion/tmp01")) +-def test_call_fails_untar(): +- """Test transactional_update.call when tar fails""" +- utils_mock = { +- "thin.gen_thin": MagicMock(return_value="/salt-thin.tgz"), +- "files.rm_rf": MagicMock(), +- } +- opts_mock = {"cachedir": "/var/cache/salt/minion"} +- salt_mock = { +- "cmd.run": MagicMock(return_value="Error"), +- "config.option": MagicMock(), +- } +- with patch.dict(tu.__utils__, utils_mock), patch.dict( +- tu.__opts__, opts_mock +- ), patch.dict(tu.__salt__, salt_mock): +- assert tu.call("/chroot", "test.ping") == { +- "result": False, +- "comment": "Error", +- } +- +- utils_mock["thin.gen_thin"].assert_called_once() +- salt_mock["config.option"].assert_called() +- salt_mock["cmd.run"].assert_called_once() +- utils_mock["files.rm_rf"].assert_called_once() +- +- +-@patch("tempfile.mkdtemp", MagicMock(return_value="/var/cache/salt/minion/tmp01")) +-def test_call_fails_salt_thin(): +- """Test transactional_update.chroot when fails salt_thin""" +- utils_mock = { +- "thin.gen_thin": MagicMock(return_value="/salt-thin.tgz"), +- "files.rm_rf": MagicMock(), +- "json.find_json": MagicMock(side_effect=ValueError()), +- } +- opts_mock = {"cachedir": "/var/cache/salt/minion"} +- salt_mock = { +- "cmd.run": MagicMock(return_value=""), +- "config.option": MagicMock(), +- "cmd.run_all": MagicMock(return_value={"retcode": 1, "stderr": "Error"}), +- } +- with patch.dict(tu.__utils__, utils_mock), patch.dict( +- tu.__opts__, opts_mock +- ), patch.dict(tu.__salt__, salt_mock): +- assert tu.call("test.ping") == { +- "result": False, +- "retcode": 1, +- "comment": "Error", +- } +- +- utils_mock["thin.gen_thin"].assert_called_once() +- salt_mock["config.option"].assert_called() +- salt_mock["cmd.run"].assert_called_once() +- salt_mock["cmd.run_all"].assert_called_with( +- [ +- "transactional-update", +- "--non-interactive", +- "--drop-if-no-change", +- "--no-selfupdate", +- "--continue", +- "--quiet", +- "run", +- "python{}".format(sys.version_info[0]), +- "/var/cache/salt/minion/tmp01/salt-call", +- "--metadata", +- "--local", +- "--log-file", +- "/var/cache/salt/minion/tmp01/log", +- "--cachedir", +- "/var/cache/salt/minion/tmp01/cache", +- "--out", +- "json", +- "-l", +- "quiet", +- "--", +- "test.ping", +- ] +- ) +- utils_mock["files.rm_rf"].assert_called_once() +- +- +-@patch("tempfile.mkdtemp", MagicMock(return_value="/var/cache/salt/minion/tmp01")) + def test_call_fails_function(): + """Test transactional_update.chroot when fails the function""" + utils_mock = { +- "thin.gen_thin": MagicMock(return_value="/salt-thin.tgz"), +- "files.rm_rf": MagicMock(), + "json.find_json": MagicMock(side_effect=ValueError()), + } +- opts_mock = {"cachedir": "/var/cache/salt/minion"} + salt_mock = { +- "cmd.run": MagicMock(return_value=""), +- "config.option": MagicMock(), + "cmd.run_all": MagicMock( + return_value={"retcode": 0, "stdout": "Not found", "stderr": ""} + ), + } +- with patch.dict(tu.__utils__, utils_mock), patch.dict( +- tu.__opts__, opts_mock +- ), patch.dict(tu.__salt__, salt_mock): ++ with patch.dict(tu.__utils__, utils_mock), patch.dict(tu.__salt__, salt_mock): + assert tu.call("test.ping") == { + "result": False, + "retcode": 1, + "comment": "Not found", + } + +- utils_mock["thin.gen_thin"].assert_called_once() +- salt_mock["config.option"].assert_called() +- salt_mock["cmd.run"].assert_called_once() + salt_mock["cmd.run_all"].assert_called_with( + [ + "transactional-update", +@@ -470,47 +377,29 @@ def test_call_fails_function(): + "--continue", + "--quiet", + "run", +- "python{}".format(sys.version_info[0]), +- "/var/cache/salt/minion/tmp01/salt-call", +- "--metadata", +- "--local", +- "--log-file", +- "/var/cache/salt/minion/tmp01/log", +- "--cachedir", +- "/var/cache/salt/minion/tmp01/cache", ++ "salt-call", + "--out", + "json", + "-l", + "quiet", ++ "--no-return-event", + "--", + "test.ping", + ] + ) +- utils_mock["files.rm_rf"].assert_called_once() + + +-@patch("tempfile.mkdtemp", MagicMock(return_value="/var/cache/salt/minion/tmp01")) + def test_call_success_no_reboot(): + """Test transactional_update.chroot when succeed""" + utils_mock = { +- "thin.gen_thin": MagicMock(return_value="/salt-thin.tgz"), +- "files.rm_rf": MagicMock(), + "json.find_json": MagicMock(return_value={"return": "result"}), + } +- opts_mock = {"cachedir": "/var/cache/salt/minion"} + salt_mock = { +- "cmd.run": MagicMock(return_value=""), +- "config.option": MagicMock(), + "cmd.run_all": MagicMock(return_value={"retcode": 0, "stdout": ""}), + } +- with patch.dict(tu.__utils__, utils_mock), patch.dict( +- tu.__opts__, opts_mock +- ), patch.dict(tu.__salt__, salt_mock): ++ with patch.dict(tu.__utils__, utils_mock), patch.dict(tu.__salt__, salt_mock): + assert tu.call("test.ping") == "result" + +- utils_mock["thin.gen_thin"].assert_called_once() +- salt_mock["config.option"].assert_called() +- salt_mock["cmd.run"].assert_called_once() + salt_mock["cmd.run_all"].assert_called_with( + [ + "transactional-update", +@@ -520,43 +409,30 @@ def test_call_success_no_reboot(): + "--continue", + "--quiet", + "run", +- "python{}".format(sys.version_info[0]), +- "/var/cache/salt/minion/tmp01/salt-call", +- "--metadata", +- "--local", +- "--log-file", +- "/var/cache/salt/minion/tmp01/log", +- "--cachedir", +- "/var/cache/salt/minion/tmp01/cache", ++ "salt-call", + "--out", + "json", + "-l", + "quiet", ++ "--no-return-event", + "--", + "test.ping", + ] + ) +- utils_mock["files.rm_rf"].assert_called_once() + + +-@patch("tempfile.mkdtemp", MagicMock(return_value="/var/cache/salt/minion/tmp01")) + def test_call_success_reboot(): + """Test transactional_update.chroot when succeed and reboot""" + pending_transaction_mock = MagicMock(return_value=True) + reboot_mock = MagicMock() + utils_mock = { +- "thin.gen_thin": MagicMock(return_value="/salt-thin.tgz"), +- "files.rm_rf": MagicMock(), + "json.find_json": MagicMock(return_value={"return": "result"}), + } +- opts_mock = {"cachedir": "/var/cache/salt/minion"} + salt_mock = { +- "cmd.run": MagicMock(return_value=""), +- "config.option": MagicMock(), + "cmd.run_all": MagicMock(return_value={"retcode": 0, "stdout": ""}), + } + with patch.dict(tu.__utils__, utils_mock), patch.dict( +- tu.__opts__, opts_mock ++ tu.__salt__, salt_mock + ), patch.dict(tu.__salt__, salt_mock), patch( + "salt.modules.transactional_update.pending_transaction", + pending_transaction_mock, +@@ -567,9 +443,6 @@ def test_call_success_reboot(): + tu.call("transactional_update.dup", activate_transaction=True) == "result" + ) + +- utils_mock["thin.gen_thin"].assert_called_once() +- salt_mock["config.option"].assert_called() +- salt_mock["cmd.run"].assert_called_once() + salt_mock["cmd.run_all"].assert_called_with( + [ + "transactional-update", +@@ -579,49 +452,31 @@ def test_call_success_reboot(): + "--continue", + "--quiet", + "run", +- "python{}".format(sys.version_info[0]), +- "/var/cache/salt/minion/tmp01/salt-call", +- "--metadata", +- "--local", +- "--log-file", +- "/var/cache/salt/minion/tmp01/log", +- "--cachedir", +- "/var/cache/salt/minion/tmp01/cache", ++ "salt-call", + "--out", + "json", + "-l", + "quiet", ++ "--no-return-event", + "--", + "transactional_update.dup", + ] + ) +- utils_mock["files.rm_rf"].assert_called_once() + pending_transaction_mock.assert_called_once() + reboot_mock.assert_called_once() + + +-@patch("tempfile.mkdtemp", MagicMock(return_value="/var/cache/salt/minion/tmp01")) + def test_call_success_parameters(): + """Test transactional_update.chroot when succeed with parameters""" + utils_mock = { +- "thin.gen_thin": MagicMock(return_value="/salt-thin.tgz"), +- "files.rm_rf": MagicMock(), + "json.find_json": MagicMock(return_value={"return": "result"}), + } +- opts_mock = {"cachedir": "/var/cache/salt/minion"} + salt_mock = { +- "cmd.run": MagicMock(return_value=""), +- "config.option": MagicMock(), + "cmd.run_all": MagicMock(return_value={"retcode": 0, "stdout": ""}), + } +- with patch.dict(tu.__utils__, utils_mock), patch.dict( +- tu.__opts__, opts_mock +- ), patch.dict(tu.__salt__, salt_mock): ++ with patch.dict(tu.__utils__, utils_mock), patch.dict(tu.__salt__, salt_mock): + assert tu.call("module.function", key="value") == "result" + +- utils_mock["thin.gen_thin"].assert_called_once() +- salt_mock["config.option"].assert_called() +- salt_mock["cmd.run"].assert_called_once() + salt_mock["cmd.run_all"].assert_called_with( + [ + "transactional-update", +@@ -631,75 +486,32 @@ def test_call_success_parameters(): + "--continue", + "--quiet", + "run", +- "python{}".format(sys.version_info[0]), +- "/var/cache/salt/minion/tmp01/salt-call", +- "--metadata", +- "--local", +- "--log-file", +- "/var/cache/salt/minion/tmp01/log", +- "--cachedir", +- "/var/cache/salt/minion/tmp01/cache", ++ "salt-call", + "--out", + "json", + "-l", + "quiet", ++ "--no-return-event", + "--", + "module.function", + "key=value", + ] + ) +- utils_mock["files.rm_rf"].assert_called_once() + + + def test_sls(): + """Test transactional_update.sls""" +- transactional_update_highstate_mock = MagicMock() +- transactional_update_highstate_mock.return_value = ( +- transactional_update_highstate_mock +- ) +- transactional_update_highstate_mock.render_highstate.return_value = (None, []) +- transactional_update_highstate_mock.state.reconcile_extend.return_value = (None, []) +- transactional_update_highstate_mock.state.requisite_in.return_value = (None, []) +- transactional_update_highstate_mock.state.verify_high.return_value = [] +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock(return_value=[]), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.modules.transactional_update.TransactionalUpdateHighstate", +- transactional_update_highstate_mock, +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.sls("module") == "result" +- _create_and_execute_salt_state_mock.assert_called_once() + + + def test_sls_queue_true(): + """Test transactional_update.sls""" +- transactional_update_highstate_mock = MagicMock() +- transactional_update_highstate_mock.return_value = ( +- transactional_update_highstate_mock +- ) +- transactional_update_highstate_mock.render_highstate.return_value = (None, []) +- transactional_update_highstate_mock.state.reconcile_extend.return_value = (None, []) +- transactional_update_highstate_mock.state.requisite_in.return_value = (None, []) +- transactional_update_highstate_mock.state.verify_high.return_value = [] +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock( + side_effect=[ +@@ -714,37 +526,14 @@ def test_sls_queue_true(): + ] + ), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.modules.transactional_update.TransactionalUpdateHighstate", +- transactional_update_highstate_mock, +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.sls("module", queue=True) == "result" +- _create_and_execute_salt_state_mock.assert_called_once() + + + def test_sls_queue_false_failing(): + """Test transactional_update.sls""" +- transactional_update_highstate_mock = MagicMock() +- transactional_update_highstate_mock.return_value = ( +- transactional_update_highstate_mock +- ) +- transactional_update_highstate_mock.render_highstate.return_value = (None, []) +- transactional_update_highstate_mock.state.reconcile_extend.return_value = (None, []) +- transactional_update_highstate_mock.state.requisite_in.return_value = (None, []) +- transactional_update_highstate_mock.state.verify_high.return_value = [] +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock( + side_effect=[ +@@ -759,65 +548,27 @@ def test_sls_queue_false_failing(): + ] + ), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.modules.transactional_update.TransactionalUpdateHighstate", +- transactional_update_highstate_mock, +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.sls("module", queue=False) == [ + 'The function "state.running" is running as PID 4126 and was started at 2015, Mar 25 12:34:07.204096 with jid 20150325123407204096' + ] +- _create_and_execute_salt_state_mock.assert_not_called() + + + def test_highstate(): + """Test transactional_update.highstage""" +- transactional_update_highstate_mock = MagicMock() +- transactional_update_highstate_mock.return_value = ( +- transactional_update_highstate_mock +- ) +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock(return_value=[]), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.modules.transactional_update.TransactionalUpdateHighstate", +- transactional_update_highstate_mock, +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.highstate() == "result" +- _create_and_execute_salt_state_mock.assert_called_once() + + + def test_highstate_queue_true(): + """Test transactional_update.highstage""" +- transactional_update_highstate_mock = MagicMock() +- transactional_update_highstate_mock.return_value = ( +- transactional_update_highstate_mock +- ) +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock( + side_effect=[ +@@ -832,33 +583,14 @@ def test_highstate_queue_true(): + ] + ), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.modules.transactional_update.TransactionalUpdateHighstate", +- transactional_update_highstate_mock, +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.highstate(queue=True) == "result" +- _create_and_execute_salt_state_mock.assert_called_once() + + + def test_highstate_queue_false_failing(): + """Test transactional_update.highstage""" +- transactional_update_highstate_mock = MagicMock() +- transactional_update_highstate_mock.return_value = ( +- transactional_update_highstate_mock +- ) +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock( + side_effect=[ +@@ -873,62 +605,27 @@ def test_highstate_queue_false_failing(): + ] + ), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.modules.transactional_update.TransactionalUpdateHighstate", +- transactional_update_highstate_mock, +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.highstate(queue=False) == [ + 'The function "state.running" is running as PID 4126 and was started at 2015, Mar 25 12:34:07.204096 with jid 20150325123407204096' + ] +- _create_and_execute_salt_state_mock.assert_not_called() + + + def test_single(): + """Test transactional_update.single""" +- ssh_state_mock = MagicMock() +- ssh_state_mock.return_value = ssh_state_mock +- ssh_state_mock.verify_data.return_value = None +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock(return_value=[]), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.client.ssh.state.SSHState", ssh_state_mock +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.single("pkg.installed", name="emacs") == "result" +- _create_and_execute_salt_state_mock.assert_called_once() + + + def test_single_queue_false_failing(): + """Test transactional_update.single""" +- ssh_state_mock = MagicMock() +- ssh_state_mock.return_value = ssh_state_mock +- ssh_state_mock.verify_data.return_value = None +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock( + side_effect=[ +@@ -943,33 +640,16 @@ def test_single_queue_false_failing(): + ] + ), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.client.ssh.state.SSHState", ssh_state_mock +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.single("pkg.installed", name="emacs", queue=False) == [ + 'The function "state.running" is running as PID 4126 and was started at 2015, Mar 25 12:34:07.204096 with jid 20150325123407204096' + ] +- _create_and_execute_salt_state_mock.assert_not_called() + + + def test_single_queue_true(): + """Test transactional_update.single""" +- ssh_state_mock = MagicMock() +- ssh_state_mock.return_value = ssh_state_mock +- ssh_state_mock.verify_data.return_value = None +- +- _create_and_execute_salt_state_mock = MagicMock(return_value="result") +- opts_mock = { +- "hash_type": "md5", +- } + salt_mock = { + "saltutil.is_running": MagicMock( + side_effect=[ +@@ -984,16 +664,7 @@ def test_single_queue_true(): + ] + ), + } +- get_sls_opts_mock = MagicMock(return_value=opts_mock) +- with patch.dict(tu.__opts__, opts_mock), patch.dict( +- statemod.__salt__, salt_mock +- ), patch("salt.utils.state.get_sls_opts", get_sls_opts_mock), patch( +- "salt.fileclient.get_file_client", MagicMock() +- ), patch( +- "salt.client.ssh.state.SSHState", ssh_state_mock +- ), patch( +- "salt.modules.transactional_update._create_and_execute_salt_state", +- _create_and_execute_salt_state_mock, ++ with patch.dict(statemod.__salt__, salt_mock), patch( ++ "salt.modules.transactional_update.call", MagicMock(return_value="result") + ): + assert tu.single("pkg.installed", name="emacs", queue=True) == "result" +- _create_and_execute_salt_state_mock.assert_called_once() +-- +2.34.1 + + diff --git a/restore-default-behaviour-of-pkg-list-return.patch b/restore-default-behaviour-of-pkg-list-return.patch index a598703..8fd3087 100644 --- a/restore-default-behaviour-of-pkg-list-return.patch +++ b/restore-default-behaviour-of-pkg-list-return.patch @@ -1,4 +1,4 @@ -From 2de635bd9c2f9571092d5904ac8fa971c0140235 Mon Sep 17 00:00:00 2001 +From 9885fa0fc9ec818515551b2a415e6f1b8e3680b9 Mon Sep 17 00:00:00 2001 From: Jochen Breuer Date: Fri, 30 Aug 2019 14:20:06 +0200 Subject: [PATCH] Restore default behaviour of pkg list return @@ -13,10 +13,10 @@ Co-authored-by: Mihai Dinca 1 file changed, 25 insertions(+), 9 deletions(-) diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py -index e52535d428..dd836b7ad0 100644 +index 6af922337f..ac618bd385 100644 --- a/salt/modules/zypperpkg.py +++ b/salt/modules/zypperpkg.py -@@ -1410,8 +1410,10 @@ def refresh_db(force=None, root=None): +@@ -1401,8 +1401,10 @@ def refresh_db(force=None, root=None): return ret @@ -28,7 +28,7 @@ index e52535d428..dd836b7ad0 100644 return sorted({pkg.split(":", 1)[0] for pkg in pkgs if len(pkg.split(":", 1)) == 2}) -@@ -1427,6 +1429,7 @@ def install( +@@ -1418,6 +1420,7 @@ def install( ignore_repo_failure=False, no_recommends=False, root=None, @@ -36,7 +36,7 @@ index e52535d428..dd836b7ad0 100644 **kwargs ): """ -@@ -1542,6 +1545,9 @@ def install( +@@ -1533,6 +1536,9 @@ def install( .. versionadded:: 2018.3.0 @@ -46,7 +46,7 @@ index e52535d428..dd836b7ad0 100644 Returns a dict containing the new package names and versions:: -@@ -1617,7 +1623,8 @@ def install( +@@ -1608,7 +1614,8 @@ def install( diff_attr = kwargs.get("diff_attr") @@ -56,7 +56,7 @@ index e52535d428..dd836b7ad0 100644 old = ( list_pkgs(attr=diff_attr, root=root, includes=includes) if not downloadonly -@@ -1847,7 +1854,7 @@ def upgrade( +@@ -1838,7 +1845,7 @@ def upgrade( return ret @@ -65,7 +65,7 @@ index e52535d428..dd836b7ad0 100644 """ Remove and purge do identical things but with different Zypper commands, this function performs the common logic. -@@ -1857,7 +1864,7 @@ def _uninstall(name=None, pkgs=None, root=None): +@@ -1848,7 +1855,7 @@ def _uninstall(name=None, pkgs=None, root=None): except MinionError as exc: raise CommandExecutionError(exc) @@ -74,7 +74,7 @@ index e52535d428..dd836b7ad0 100644 old = list_pkgs(root=root, includes=includes) targets = [] for target in pkg_params: -@@ -1920,7 +1927,7 @@ def normalize_name(name): +@@ -1911,7 +1918,7 @@ def normalize_name(name): def remove( @@ -83,7 +83,7 @@ index e52535d428..dd836b7ad0 100644 ): # pylint: disable=unused-argument """ .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 -@@ -1952,8 +1959,11 @@ def remove( +@@ -1943,8 +1950,11 @@ def remove( root Operate on a different root directory. @@ -96,7 +96,7 @@ index e52535d428..dd836b7ad0 100644 Returns a dict containing the changes. -@@ -1965,10 +1975,12 @@ def remove( +@@ -1956,10 +1966,12 @@ def remove( salt '*' pkg.remove ,, salt '*' pkg.remove pkgs='["foo", "bar"]' """ @@ -111,7 +111,7 @@ index e52535d428..dd836b7ad0 100644 """ .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 On minions running systemd>=205, `systemd-run(1)`_ is now used to -@@ -2000,6 +2012,10 @@ def purge(name=None, pkgs=None, root=None, **kwargs): # pylint: disable=unused- +@@ -1991,6 +2003,10 @@ def purge(name=None, pkgs=None, root=None, **kwargs): # pylint: disable=unused- root Operate on a different root directory. @@ -122,7 +122,7 @@ index e52535d428..dd836b7ad0 100644 .. versionadded:: 0.16.0 -@@ -2013,7 +2029,7 @@ def purge(name=None, pkgs=None, root=None, **kwargs): # pylint: disable=unused- +@@ -2004,7 +2020,7 @@ def purge(name=None, pkgs=None, root=None, **kwargs): # pylint: disable=unused- salt '*' pkg.purge ,, salt '*' pkg.purge pkgs='["foo", "bar"]' """ @@ -130,8 +130,8 @@ index e52535d428..dd836b7ad0 100644 + return _uninstall(inclusion_detection, name=name, pkgs=pkgs, root=root) - def list_holds(pattern=None, full=True, root=None, **kwargs): + def list_locks(root=None): -- -2.37.3 +2.33.0 diff --git a/retry-if-rpm-lock-is-temporarily-unavailable-547.patch b/retry-if-rpm-lock-is-temporarily-unavailable-547.patch index 46cb60b..ceb3777 100644 --- a/retry-if-rpm-lock-is-temporarily-unavailable-547.patch +++ b/retry-if-rpm-lock-is-temporarily-unavailable-547.patch @@ -1,4 +1,4 @@ -From 4a9ec335e7da2f0e3314580e43075bb69fe90c38 Mon Sep 17 00:00:00 2001 +From cedde1082b3a11b941327ba8e213f44637fb8a6b Mon Sep 17 00:00:00 2001 From: Witek Bedyk Date: Mon, 29 Aug 2022 14:16:00 +0200 Subject: [PATCH] Retry if RPM lock is temporarily unavailable (#547) @@ -27,7 +27,7 @@ index 0000000000..59f1914593 @@ -0,0 +1 @@ +Fixed Zypper module failing on RPM lock file being temporarily unavailable. diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py -index 2c36e2968a..c787d4009d 100644 +index b622105e15..7a249486fb 100644 --- a/salt/modules/zypperpkg.py +++ b/salt/modules/zypperpkg.py @@ -14,6 +14,7 @@ Package support for openSUSE via the zypper package manager @@ -292,6 +292,6 @@ index 3f1560a385..37d555844c 100644 + self.assertEqual(lockf_mock.call_count, 2) + zypper.__zypper__._reset() -- -2.37.3 +2.37.2 diff --git a/return-the-expected-powerpc-os-arch-bsc-1117995.patch b/return-the-expected-powerpc-os-arch-bsc-1117995.patch index d653643..780d80c 100644 --- a/return-the-expected-powerpc-os-arch-bsc-1117995.patch +++ b/return-the-expected-powerpc-os-arch-bsc-1117995.patch @@ -1,4 +1,4 @@ -From 3ce70f43376dbf62edf2ca2aa8c9f28aa733b3d8 Mon Sep 17 00:00:00 2001 +From 2883215dfb434d4056812ed96d968f7043501d70 Mon Sep 17 00:00:00 2001 From: Mihai Dinca Date: Thu, 13 Dec 2018 12:17:35 +0100 Subject: [PATCH] Return the expected powerpc os arch (bsc#1117995) @@ -8,7 +8,7 @@ Subject: [PATCH] Return the expected powerpc os arch (bsc#1117995) 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/salt/utils/pkg/rpm.py b/salt/utils/pkg/rpm.py -index e80a01f92f..8203d2f989 100644 +index ba600e106b..3e990cc05d 100644 --- a/salt/utils/pkg/rpm.py +++ b/salt/utils/pkg/rpm.py @@ -59,9 +59,10 @@ def get_osarch(): @@ -26,6 +26,6 @@ index e80a01f92f..8203d2f989 100644 def check_32(arch, osarch=None): -- -2.37.3 +2.33.0 diff --git a/revert-fixing-a-use-case-when-multiple-inotify-beaco.patch b/revert-fixing-a-use-case-when-multiple-inotify-beaco.patch index e8b6485..434424d 100644 --- a/revert-fixing-a-use-case-when-multiple-inotify-beaco.patch +++ b/revert-fixing-a-use-case-when-multiple-inotify-beaco.patch @@ -1,4 +1,4 @@ -From dc849a15ea214170d4de9f54615caa8b3136dc10 Mon Sep 17 00:00:00 2001 +From a82b6316d8a780a7a8cbfbabeb52fa50b3fb1032 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 18 Jan 2022 19:07:34 +0100 Subject: [PATCH] Revert "Fixing a use case when multiple inotify beacons @@ -21,19 +21,19 @@ This reverts commit 66c58dedf8c364eaeb35c5adce8bcc8fe5c1219a. 7 files changed, 11 insertions(+), 48 deletions(-) diff --git a/salt/beacons/__init__.py b/salt/beacons/__init__.py -index b346c2a648..90918cba5b 100644 +index 414142c262..13e2ae78db 100644 --- a/salt/beacons/__init__.py +++ b/salt/beacons/__init__.py -@@ -94,7 +94,6 @@ class Beacon: - log.error("Configuration for beacon must be a list.") - continue - +@@ -71,7 +71,6 @@ class Beacon: + beacon_name = current_beacon_config["beacon_module"] + else: + beacon_name = mod - b_config[mod].append({"_beacon_name": mod}) fun_str = "{}.beacon".format(beacon_name) + validate_str = "{}.validate".format(beacon_name) if fun_str in self.beacons: - runonce = self._determine_beacon_config( diff --git a/salt/beacons/diskusage.py b/salt/beacons/diskusage.py -index 5be33ff975..0b8d7c53e1 100644 +index 1216abf79b..897bc90a6a 100644 --- a/salt/beacons/diskusage.py +++ b/salt/beacons/diskusage.py @@ -8,7 +8,6 @@ Beacon to monitor disk usage. @@ -44,7 +44,7 @@ index 5be33ff975..0b8d7c53e1 100644 import salt.utils.platform try: -@@ -83,8 +82,6 @@ def beacon(config): +@@ -81,8 +80,6 @@ def beacon(config): it will override the previously defined threshold. """ @@ -54,10 +54,10 @@ index 5be33ff975..0b8d7c53e1 100644 ret = [] for mounts in config: diff --git a/salt/beacons/inotify.py b/salt/beacons/inotify.py -index 283b84fdc7..0dc60662a6 100644 +index b6e7334eee..c44bd49fb0 100644 --- a/salt/beacons/inotify.py +++ b/salt/beacons/inotify.py -@@ -67,19 +67,17 @@ def _get_notifier(config): +@@ -65,19 +65,17 @@ def _get_notifier(config): """ Check the context for the notifier and construct it if not present """ @@ -81,7 +81,7 @@ index 283b84fdc7..0dc60662a6 100644 def validate(config): -@@ -239,9 +237,6 @@ def beacon(config): +@@ -237,9 +235,6 @@ def beacon(config): being at the Notifier level in pyinotify. """ @@ -91,7 +91,7 @@ index 283b84fdc7..0dc60662a6 100644 config = salt.utils.beacons.list_to_dict(config) ret = [] -@@ -264,7 +259,7 @@ def beacon(config): +@@ -262,7 +257,7 @@ def beacon(config): break path = os.path.dirname(path) @@ -100,7 +100,7 @@ index 283b84fdc7..0dc60662a6 100644 if excludes and isinstance(excludes, list): for exclude in excludes: -@@ -351,9 +346,6 @@ def beacon(config): +@@ -349,9 +344,6 @@ def beacon(config): def close(config): @@ -114,7 +114,7 @@ index 283b84fdc7..0dc60662a6 100644 + __context__["inotify.notifier"].stop() + del __context__["inotify.notifier"] diff --git a/salt/beacons/napalm_beacon.py b/salt/beacons/napalm_beacon.py -index 122d56edb7..692fbe07aa 100644 +index ec8cf63fca..164b29cdf8 100644 --- a/salt/beacons/napalm_beacon.py +++ b/salt/beacons/napalm_beacon.py @@ -168,9 +168,10 @@ with a NTP server at a stratum level greater than 5. @@ -129,7 +129,7 @@ index 122d56edb7..692fbe07aa 100644 import salt.utils.napalm log = logging.getLogger(__name__) -@@ -306,9 +307,6 @@ def beacon(config): +@@ -301,9 +302,6 @@ def beacon(config): """ Watch napalm function and fire events. """ @@ -186,10 +186,10 @@ index f5befb2756..dfaf1d499a 100644 - assert "httpd.inotify.notifier" in inotify.__context__ + assert "inotify.notifier" in inotify.__context__ diff --git a/tests/pytests/unit/test_beacons.py b/tests/pytests/unit/test_beacons.py -index 2ca0b30ea2..841a3b8140 100644 +index 27940c6f65..a347f3f27f 100644 --- a/tests/pytests/unit/test_beacons.py +++ b/tests/pytests/unit/test_beacons.py -@@ -108,19 +108,3 @@ def test_beacon_module(): +@@ -70,19 +70,3 @@ def test_beacon_module(): ] assert ret == _expected @@ -210,6 +210,6 @@ index 2ca0b30ea2..841a3b8140 100644 - beacon.process(mock_opts["beacons"], mock_opts["grains"]) - patched[name].assert_has_calls(calls) -- -2.37.3 +2.34.1 diff --git a/run-salt-api-as-user-salt-bsc-1064520.patch b/run-salt-api-as-user-salt-bsc-1064520.patch index c639975..ddc6df0 100644 --- a/run-salt-api-as-user-salt-bsc-1064520.patch +++ b/run-salt-api-as-user-salt-bsc-1064520.patch @@ -1,4 +1,4 @@ -From 0dc36f5d2e8ba94e2a527323b698fd49a98f5246 Mon Sep 17 00:00:00 2001 +From cdecbbdf5db3f1cb6b603916fecd80738f5fae9a Mon Sep 17 00:00:00 2001 From: Christian Lanig Date: Mon, 27 Nov 2017 13:10:26 +0100 Subject: [PATCH] Run salt-api as user salt (bsc#1064520) @@ -8,7 +8,7 @@ Subject: [PATCH] Run salt-api as user salt (bsc#1064520) 1 file changed, 1 insertion(+) diff --git a/pkg/salt-api.service b/pkg/salt-api.service -index d0b6d74120..9cdc9c582b 100644 +index 7ca582dfb4..bf513e4dbd 100644 --- a/pkg/salt-api.service +++ b/pkg/salt-api.service @@ -6,6 +6,7 @@ After=network.target @@ -20,6 +20,6 @@ index d0b6d74120..9cdc9c582b 100644 ExecStart=/usr/bin/salt-api TimeoutStopSec=3 -- -2.37.3 +2.29.2 diff --git a/run-salt-master-as-dedicated-salt-user.patch b/run-salt-master-as-dedicated-salt-user.patch index 0835259..2ea2968 100644 --- a/run-salt-master-as-dedicated-salt-user.patch +++ b/run-salt-master-as-dedicated-salt-user.patch @@ -1,4 +1,4 @@ -From 34d047fa0e2733359501e15ecc282159ddbd29f9 Mon Sep 17 00:00:00 2001 +From 3d75826c24a6a1533623982cc4d92325c739d908 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Klaus=20K=C3=A4mpf?= Date: Wed, 20 Jan 2016 11:01:06 +0100 Subject: [PATCH] Run salt master as dedicated salt user @@ -10,7 +10,7 @@ Subject: [PATCH] Run salt master as dedicated salt user 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/conf/master b/conf/master -index 17b3768267..95aed0066f 100644 +index 07bf2e9591..6415a536e7 100644 --- a/conf/master +++ b/conf/master @@ -25,7 +25,8 @@ @@ -42,6 +42,6 @@ index a0306ff370..97d158db18 100644 missingok rotate 7 -- -2.37.3 +2.34.1 diff --git a/salt.changes b/salt.changes index dc8ecb6..3829959 100644 --- a/salt.changes +++ b/salt.changes @@ -1,167 +1,3 @@ -------------------------------------------------------------------- -Mon Jan 9 12:44:28 UTC 2023 - Pablo Suárez Hernández - -- Add missing patch after rebase to fix collections Mapping issues - -- Added: - * fixes-for-python-3.10-502.patch - -------------------------------------------------------------------- -Wed Jan 4 13:29:57 UTC 2023 - Pablo Suárez Hernández - -- Prevent deadlocks in salt-ssh executions - -- Added: - * use-rlock-to-avoid-deadlocks-in-salt-ssh.patch - -------------------------------------------------------------------- -Mon Jan 2 15:51:45 UTC 2023 - Pablo Suárez Hernández - -- Create new salt-tests subpackage containing Salt tests - -------------------------------------------------------------------- -Thu Dec 29 13:35:08 UTC 2022 - Pablo Suárez Hernández - -- Update to Salt release version 3005.1 - * See release notes: https://docs.saltstack.com/en/latest/topics/releases/3005.1.html - -- Modified: - * activate-all-beacons-sources-config-pillar-grains.patch - * add-amazon-ec2-detection-for-virtual-grains-bsc-1195.patch - * add-custom-suse-capabilities-as-grains.patch - * add-environment-variable-to-know-if-yum-is-invoked-f.patch - * add-migrated-state-and-gpg-key-management-functions-.patch - * add-publish_batch-to-clearfuncs-exposed-methods.patch - * add-salt-ssh-support-with-venv-salt-minion-3004-493.patch - * add-sleep-on-exception-handling-on-minion-connection.patch - * add-standalone-configuration-file-for-enabling-packa.patch - * add-support-for-gpgautoimport-539.patch - * add-support-for-name-pkgs-and-diff_attr-parameters-t.patch - * align-amazon-ec2-nitro-grains-with-upstream-pr-bsc-1.patch - * allow-vendor-change-option-with-zypper.patch - * async-batch-implementation.patch - * avoid-excessive-syslogging-by-watchdog-cronjob-58.patch - * bsc-1176024-fix-file-directory-user-and-group-owners.patch - * change-the-delimeters-to-prevent-possible-tracebacks.patch - * clarify-pkg.installed-pkg_verify-documentation.patch - * debian-info_installed-compatibility-50453.patch - * detect-module.run-syntax.patch - * dnfnotify-pkgset-plugin-implementation-3002.2-450.patch - * do-not-load-pip-state-if-there-is-no-3rd-party-depen.patch - * don-t-use-shell-sbin-nologin-in-requisites.patch - * drop-serial-from-event.unpack-in-cli.batch_async.patch - * early-feature-support-config.patch - * enable-passing-a-unix_socket-for-mysql-returners-bsc.patch - * enhance-openscap-module-add-xccdf_eval-call-386.patch - * fix-bsc-1065792.patch - * fix-for-suse-expanded-support-detection.patch - * fix-issue-2068-test.patch - * fix-missing-minion-returns-in-batch-mode-360.patch - * fix-ownership-of-salt-thin-directory-when-using-the-.patch - * fix-regression-with-depending-client.ssh-on-psutil-b.patch - * fix-salt-ssh-opts-poisoning-bsc-1197637-3004-501.patch - * fix-salt.states.file.managed-for-follow_symlinks-tru.patch - * fix-salt.utils.stringutils.to_str-calls-to-make-it-w.patch - * fix-state.apply-in-test-mode-with-file-state-module-.patch - * fix-test_ipc-unit-tests.patch - * fix-the-regression-for-yumnotify-plugin-456.patch - * fix-traceback.print_exc-calls-for-test_pip_state-432.patch - * fopen-workaround-bad-buffering-for-binary-mode-563.patch - * ignore-erros-on-reading-license-files-with-dpkg_lowp.patch - * ignore-extend-declarations-from-excluded-sls-files.patch - * ignore-non-utf8-characters-while-reading-files-with-.patch - * include-aliases-in-the-fqdns-grains.patch - * include-stdout-in-error-message-for-zypperpkg-559.patch - * info_installed-works-without-status-attr-now.patch - * let-salt-ssh-use-platform-python-binary-in-rhel8-191.patch - * make-aptpkg.list_repos-compatible-on-enabled-disable.patch - * make-pass-renderer-configurable-other-fixes-532.patch - * make-setup.py-script-to-not-require-setuptools-9.1.patch - * make-sure-saltcacheloader-use-correct-fileclient-519.patch - * normalize-package-names-once-with-pkg.installed-remo.patch - * pass-the-context-to-pillar-ext-modules.patch - * prevent-affection-of-ssh.opts-with-lazyloader-bsc-11.patch - * prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch - * prevent-shell-injection-via-pre_flight_script_args-4.patch - * read-repo-info-without-using-interpolation-bsc-11356.patch - * restore-default-behaviour-of-pkg-list-return.patch - * retry-if-rpm-lock-is-temporarily-unavailable-547.patch - * return-the-expected-powerpc-os-arch-bsc-1117995.patch - * revert-fixing-a-use-case-when-multiple-inotify-beaco.patch - * run-salt-api-as-user-salt-bsc-1064520.patch - * run-salt-master-as-dedicated-salt-user.patch - * save-log-to-logfile-with-docker.build.patch - * set-default-target-for-pip-from-venv_pip_target-envi.patch - * state.apply-don-t-check-for-cached-pillar-errors.patch - * state.orchestrate_single-does-not-pass-pillar-none-4.patch - * switch-firewalld-state-to-use-change_interface.patch - * temporary-fix-extend-the-whitelist-of-allowed-comman.patch - * update-target-fix-for-salt-ssh-to-process-targets-li.patch - * use-adler32-algorithm-to-compute-string-checksums.patch - * use-salt-bundle-in-dockermod.patch - * x509-fixes-111.patch - * zypperpkg-ignore-retcode-104-for-search-bsc-1176697-.patch - -- Removed: - * 3003.3-do-not-consider-skipped-targets-as-failed-for.patch - * 3003.3-postgresql-json-support-in-pillar-423.patch - * add-missing-ansible-module-functions-to-whitelist-in.patch - * add-rpm_vercmp-python-library-for-version-comparison.patch - * adds-explicit-type-cast-for-port.patch - * backport-syndic-auth-fixes.patch - * batch.py-avoid-exception-when-minion-does-not-respon.patch - * check-if-dpkgnotify-is-executable-bsc-1186674-376.patch - * do-not-crash-when-unexpected-cmd-output-at-listing-p.patch - * enhance-logging-when-inotify-beacon-is-missing-pyino.patch - * fix-62092-catch-zmq.error.zmqerror-to-set-hwm-for-zm.patch - * fix-crash-when-calling-manage.not_alive-runners.patch - * fixes-56144-to-enable-hotadd-profile-support.patch - * fixes-for-python-3.10-502.patch - * fix-exception-in-yumpkg.remove-for-not-installed-pac.patch - * fix-for-cve-2022-22967-bsc-1200566.patch - * fix-inspector-module-export-function-bsc-1097531-481.patch - * fix-ip6_interface-grain-to-not-leak-secondary-ipv4-a.patch - * fix-issues-with-salt-ssh-s-extra-filerefs.patch - * fix-jinja2-contextfuntion-base-on-version-bsc-119874.patch - * fix-multiple-security-issues-bsc-1197417.patch - * fix-salt-call-event.send-call-with-grains-and-pillar.patch - * fix-the-regression-in-schedule-module-releasded-in-3.patch - * fix-wrong-test_mod_del_repo_multiline_values-test-af.patch - * force-zyppnotify-to-prefer-packages.db-than-packages.patch - * implementation-of-held-unheld-functions-for-state-pk.patch - * implementation-of-suse_ip-execution-module-bsc-10999.patch - * improvements-on-ansiblegate-module-354.patch - * mock-ip_addrs-in-utils-minions.py-unit-test-443.patch - * notify-beacon-for-debian-ubuntu-systems-347.patch - * refactor-and-improvements-for-transactional-updates-.patch - * support-transactional-systems-microos.patch - * wipe-notify_socket-from-env-in-cmdmod-bsc-1193357-30.patch - -------------------------------------------------------------------- -Fri Oct 28 14:43:03 UTC 2022 - Victor Zhestkov - -- Pass the context to pillar ext modules -- Align Amazon EC2 (Nitro) grains with upstream (bsc#1203685) -- Detect module run syntax version -- Implement automated patches alignment for the Salt Bundle - -- Added: - * detect-module.run-syntax.patch - * pass-the-context-to-pillar-ext-modules.patch - * align-amazon-ec2-nitro-grains-with-upstream-pr-bsc-1.patch - -------------------------------------------------------------------- -Fri Oct 21 13:30:08 UTC 2022 - Alexander Graul - -- Ignore extend declarations from excluded SLS files (bsc#1203886) -- Clarify pkg.installed pkg_verify documentation -- Enhance capture of error messages for Zypper calls in zypperpkg module - -- Added: - * ignore-extend-declarations-from-excluded-sls-files.patch - * include-stdout-in-error-message-for-zypperpkg-559.patch - * clarify-pkg.installed-pkg_verify-documentation.patch - ------------------------------------------------------------------- Thu Oct 6 10:10:16 UTC 2022 - Pablo Suárez Hernández diff --git a/salt.spec b/salt.spec index 1b9f112..14b9636 100644 --- a/salt.spec +++ b/salt.spec @@ -36,7 +36,7 @@ %bcond_with builddocs Name: salt -Version: 3005.1 +Version: 3004 Release: 0 Summary: A parallel remote execution system License: Apache-2.0 @@ -50,9 +50,6 @@ Source4: update-documentation.sh Source5: travis.yml Source6: transactional_update.conf -### SALT PATCHES LIST BEGIN -### IMPORTANT: The line above is used as a snippet marker. Do not touch it. - # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/commit/88f40fff3b81edaa55f37949f56c67112ca2dcad Patch1: run-salt-master-as-dedicated-salt-user.patch # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/commit/cdecbbdf5db3f1cb6b603916fecd80738f5fae9a @@ -155,153 +152,204 @@ Patch21: switch-firewalld-state-to-use-change_interface.patch Patch22: add-standalone-configuration-file-for-enabling-packa.patch ############# +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/53159 (missing PR to master) +Patch23: batch.py-avoid-exception-when-minion-does-not-respon.patch # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/177 # (deviation from upstream - we should probably port this) -Patch23: restore-default-behaviour-of-pkg-list-return.patch +Patch24: restore-default-behaviour-of-pkg-list-return.patch # PATCH_FIX_OPENSUSE https://github.com/openSUSE/salt/pull/186 (missing upstream PR to master) -Patch24: read-repo-info-without-using-interpolation-bsc-11356.patch +Patch25: read-repo-info-without-using-interpolation-bsc-11356.patch # PATCH_FIX_OPENSUSE https://github.com/openSUSE/salt/pull/191 (missing upstream PR to master) -Patch25: let-salt-ssh-use-platform-python-binary-in-rhel8-191.patch +Patch26: let-salt-ssh-use-platform-python-binary-in-rhel8-191.patch +# PATCH_FIX_OPENSUSE https://github.com/openSUSE/salt/commit/a8f0a15e4067ec278c8a2d690e3bf815523286ca (missing upstream PR) +Patch27: fix-wrong-test_mod_del_repo_multiline_values-test-af.patch # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/commit/a18ac47b75550bd55f4ca91dc221ed408881984c -Patch26: make-setup.py-script-to-not-require-setuptools-9.1.patch +Patch28: make-setup.py-script-to-not-require-setuptools-9.1.patch # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/228 (missing upstream PR) +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/61017 +Patch29: adds-explicit-type-cast-for-port.patch # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/commit/da936daeebd701e147707ad814c07bfc259d4be (not yet upstream PR) -Patch27: add-publish_batch-to-clearfuncs-exposed-methods.patch +Patch30: add-publish_batch-to-clearfuncs-exposed-methods.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/57779 -Patch28: info_installed-works-without-status-attr-now.patch +Patch31: info_installed-works-without-status-attr-now.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/58552 -Patch29: zypperpkg-ignore-retcode-104-for-search-bsc-1176697-.patch +Patch32: zypperpkg-ignore-retcode-104-for-search-bsc-1176697-.patch + +#### MICROOS - TRANSACTIONAL UPDATES #### +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/58520 (master PR merged but not included in 3003) +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/60903 +Patch33: support-transactional-systems-microos.patch +########### # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/275 (missing upstream PR) -Patch30: bsc-1176024-fix-file-directory-user-and-group-owners.patch +Patch34: bsc-1176024-fix-file-directory-user-and-group-owners.patch #### NO VENDOR CHANGE #### # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/60421 -Patch31: allow-vendor-change-option-with-zypper.patch +Patch35: allow-vendor-change-option-with-zypper.patch ########### # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/58784 -Patch32: add-migrated-state-and-gpg-key-management-functions-.patch +Patch36: add-migrated-state-and-gpg-key-management-functions-.patch ### BEACON CONFIG ### # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/commit/5ea2f10b15684dd417bad858642faafc92cd382 # (revert https://github.com/saltstack/salt/pull/58655) -Patch33: revert-fixing-a-use-case-when-multiple-inotify-beaco.patch +Patch37: revert-fixing-a-use-case-when-multiple-inotify-beaco.patch ########### # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/298 (missing upstream PR) -Patch34: fix-salt.utils.stringutils.to_str-calls-to-make-it-w.patch +Patch38: fix-salt.utils.stringutils.to_str-calls-to-make-it-w.patch +# PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/304 (missing uptstream PR) +Patch39: force-zyppnotify-to-prefer-packages.db-than-packages.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/59354 (master PR merged but not included in 3003) +Patch40: do-not-crash-when-unexpected-cmd-output-at-listing-p.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/56173 +Patch41: fixes-56144-to-enable-hotadd-profile-support.patch # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/307 (missing upstream PR) -Patch35: add-sleep-on-exception-handling-on-minion-connection.patch +Patch42: add-sleep-on-exception-handling-on-minion-connection.patch +# PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/323 (missing upstream PR) +Patch43: implementation-of-suse_ip-execution-module-bsc-10999.patch +# PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/347 (missing upstream PR) +Patch44: notify-beacon-for-debian-ubuntu-systems-347.patch ### SALT-SSH PROCESSING TARGETS ### # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/336 (missing upstream PR) # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/353 (missing upstream PR) -Patch36: update-target-fix-for-salt-ssh-to-process-targets-li.patch +Patch45: update-target-fix-for-salt-ssh-to-process-targets-li.patch ############ +#### ANSIBLE GATE IMPROVEMENTS #### +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/60056 +Patch46: improvements-on-ansiblegate-module-354.patch +########## + +# PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/376 (missing upstream PR) +Patch47: check-if-dpkgnotify-is-executable-bsc-1186674-376.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/58503 -Patch37: fix-missing-minion-returns-in-batch-mode-360.patch +Patch48: fix-missing-minion-returns-in-batch-mode-360.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/60402 +Patch49: enhance-logging-when-inotify-beacon-is-missing-pyino.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/60356 +Patch50: fix-exception-in-yumpkg.remove-for-not-installed-pac.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/60432 (merged on master but not included in 3003) +Patch51: implementation-of-held-unheld-functions-for-state-pk.patch #### OPENSCAP ENHANCE #### # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/59756 -Patch38: enhance-openscap-module-add-xccdf_eval-call-386.patch +Patch52: enhance-openscap-module-add-xccdf_eval-call-386.patch ############### # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/413 (missing upstream PR) -Patch39: don-t-use-shell-sbin-nologin-in-requisites.patch +Patch53: don-t-use-shell-sbin-nologin-in-requisites.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/59777 +Patch54: 3003.3-postgresql-json-support-in-pillar-423.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/60983 +Patch55: 3003.3-do-not-consider-skipped-targets-as-failed-for.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/61017 +Patch56: fix-crash-when-calling-manage.not_alive-runners.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/61014 +Patch57: fix-issues-with-salt-ssh-s-extra-filerefs.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61061 +Patch58: fix-ip6_interface-grain-to-not-leak-secondary-ipv4-a.patch # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/432 (missing upstream PR) -Patch40: fix-traceback.print_exc-calls-for-test_pip_state-432.patch +Patch59: fix-traceback.print_exc-calls-for-test_pip_state-432.patch # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/415 (missing upstream PR) -Patch41: prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch +Patch60: prevent-pkg-plugins-errors-on-missing-cookie-path-bs.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60815 +Patch61: add-rpm_vercmp-python-library-for-version-comparison.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61180 -Patch42: dnfnotify-pkgset-plugin-implementation-3002.2-450.patch +Patch62: dnfnotify-pkgset-plugin-implementation-3002.2-450.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/60324 +Patch63: mock-ip_addrs-in-utils-minions.py-unit-test-443.patch # PATCH-FIX_OPENSUSE https://github.com/openSUSE/salt/pull/456 (missing upstream PR) -Patch43: fix-the-regression-for-yumnotify-plugin-456.patch +Patch64: fix-the-regression-for-yumnotify-plugin-456.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61188 +Patch65: refactor-and-improvements-for-transactional-updates-.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61189 -Patch44: state.apply-don-t-check-for-cached-pillar-errors.patch +Patch66: state.apply-don-t-check-for-cached-pillar-errors.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61393 +Patch67: wipe-notify_socket-from-env-in-cmdmod-bsc-1193357-30.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61530 +Patch68: fix-inspector-module-export-function-bsc-1097531-481.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/482 -Patch45: drop-serial-from-event.unpack-in-cli.batch_async.patch +Patch69: drop-serial-from-event.unpack-in-cli.batch_async.patch +# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/485 +Patch70: add-missing-ansible-module-functions-to-whitelist-in.patch +# PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/61256 +Patch71: fix-salt-call-event.send-call-with-grains-and-pillar.patch # PATCH-FIX_UPSTREAM https://github.com/saltstack/salt/pull/61093 -Patch46: state.orchestrate_single-does-not-pass-pillar-none-4.patch +Patch72: state.orchestrate_single-does-not-pass-pillar-none-4.patch ### SALT-SSH WITH SALT BUNDLE ### # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61715 (ssh_pre_flight_args) # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/493 # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/497 -Patch47: add-salt-ssh-support-with-venv-salt-minion-3004-493.patch -Patch48: prevent-shell-injection-via-pre_flight_script_args-4.patch +Patch73: add-salt-ssh-support-with-venv-salt-minion-3004-493.patch +Patch74: prevent-shell-injection-via-pre_flight_script_args-4.patch ############### +# PATCH-FIX_UPSTREAM: implemented at 3004.1 release (no PR) +Patch75: fix-multiple-security-issues-bsc-1197417.patch + # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/501 -Patch49: fix-salt-ssh-opts-poisoning-bsc-1197637-3004-501.patch +Patch76: fix-salt-ssh-opts-poisoning-bsc-1197637-3004-501.patch + +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61064 +Patch77: fixes-for-python-3.10-502.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/505 -Patch50: prevent-affection-of-ssh.opts-with-lazyloader-bsc-11.patch +Patch78: prevent-affection-of-ssh.opts-with-lazyloader-bsc-11.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/506 -Patch51: fix-regression-with-depending-client.ssh-on-psutil-b.patch +Patch79: fix-regression-with-depending-client.ssh-on-psutil-b.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61895 -Patch52: make-sure-saltcacheloader-use-correct-fileclient-519.patch +Patch80: make-sure-saltcacheloader-use-correct-fileclient-519.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/commit/e068a34ccb2e17ae7224f8016a24b727f726d4c8 +Patch81: fix-for-cve-2022-22967-bsc-1200566.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61827 -Patch53: ignore-erros-on-reading-license-files-with-dpkg_lowp.patch +Patch82: ignore-erros-on-reading-license-files-with-dpkg_lowp.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62109 -Patch54: use-salt-bundle-in-dockermod.patch +Patch83: use-salt-bundle-in-dockermod.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61984 -Patch55: save-log-to-logfile-with-docker.build.patch +Patch84: save-log-to-logfile-with-docker.build.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62029 -Patch56: normalize-package-names-once-with-pkg.installed-remo.patch +Patch85: normalize-package-names-once-with-pkg.installed-remo.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62089 -Patch57: set-default-target-for-pip-from-venv_pip_target-envi.patch +Patch86: set-default-target-for-pip-from-venv_pip_target-envi.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/534 -Patch58: fix-ownership-of-salt-thin-directory-when-using-the-.patch +Patch87: fix-ownership-of-salt-thin-directory-when-using-the-.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62033 -Patch59: add-support-for-name-pkgs-and-diff_attr-parameters-t.patch +Patch88: add-support-for-name-pkgs-and-diff_attr-parameters-t.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62119 +Patch89: fix-62092-catch-zmq.error.zmqerror-to-set-hwm-for-zm.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62067 -Patch60: fix-salt.states.file.managed-for-follow_symlinks-tru.patch +Patch90: fix-salt.states.file.managed-for-follow_symlinks-tru.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61856 +Patch91: fix-jinja2-contextfuntion-base-on-version-bsc-119874.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62209 -Patch61: add-support-for-gpgautoimport-539.patch +Patch92: add-support-for-gpgautoimport-539.patch # PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/2b486d0484c51509e9972e581d97655f4f87852e -Patch62: fix-test_ipc-unit-tests.patch +Patch93: fix-test_ipc-unit-tests.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62204 -Patch63: retry-if-rpm-lock-is-temporarily-unavailable-547.patch +Patch94: retry-if-rpm-lock-is-temporarily-unavailable-547.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62519 -Patch64: change-the-delimeters-to-prevent-possible-tracebacks.patch +Patch95: change-the-delimeters-to-prevent-possible-tracebacks.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61847 -Patch65: fix-state.apply-in-test-mode-with-file-state-module-.patch +Patch96: fix-state.apply-in-test-mode-with-file-state-module-.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61423 +Patch97: fix-the-regression-in-schedule-module-releasded-in-3.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62539 -Patch66: add-amazon-ec2-detection-for-virtual-grains-bsc-1195.patch +Patch98: add-amazon-ec2-detection-for-virtual-grains-bsc-1195.patch +# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/commit/643bd4b572ca97466e085ecd1d84da45b1684332 +Patch99: backport-syndic-auth-fixes.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62633 -Patch67: ignore-non-utf8-characters-while-reading-files-with-.patch +Patch100: ignore-non-utf8-characters-while-reading-files-with-.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62817 -Patch68: fopen-workaround-bad-buffering-for-binary-mode-563.patch +Patch101: fopen-workaround-bad-buffering-for-binary-mode-563.patch # PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62120 -Patch69: make-pass-renderer-configurable-other-fixes-532.patch -### ENHANCE ZYPPERPKG ERROR MESSAGES ### -# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62750 -# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62346 -Patch70: include-stdout-in-error-message-for-zypperpkg-559.patch -############### -# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/57426 -Patch71: clarify-pkg.installed-pkg_verify-documentation.patch -# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62862 -Patch72: ignore-extend-declarations-from-excluded-sls-files.patch -# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61772 -Patch73: detect-module.run-syntax.patch -# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62539 -Patch74: align-amazon-ec2-nitro-grains-with-upstream-pr-bsc-1.patch -# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/62898 -Patch75: pass-the-context-to-pillar-ext-modules.patch -# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/c6be36eeea49ee0d0641da272087305f79c32c99 (not yet upstream) -# Fix problem caused by: https://github.com/openSUSE/salt/pull/493 (Patch47) affecting only 3005.1. -Patch76: use-rlock-to-avoid-deadlocks-in-salt-ssh.patch -# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/61064 -# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/commit/5e3ff4d662321c237ddd5b2c5c83f35a84af594c (not PR to master yet) -Patch77: fixes-for-python-3.10-502.patch - -### IMPORTANT: The line below is used as a snippet marker. Do not touch it. -### SALT PATCHES LIST END +Patch102: make-pass-renderer-configurable-other-fixes-532.patch BuildRoot: %{_tmppath}/%{name}-%{version}-build BuildRequires: logrotate @@ -649,13 +697,6 @@ Requires(pre): %fillup_prereq Salt ssh is a master running without zmq. it enables the management of minions over a ssh connection. -%package tests -Summary: Unit and integration tests for Salt -Requires: %{name} = %{version}-%{release} - -%description tests -Collections of unit and integration tests for Salt - %if %{with bash_completion} %package bash-completion Summary: Bash Completion for %{name} @@ -802,11 +843,6 @@ install -Dd -m 0755 %{buildroot}%{_sysconfdir}/logrotate.d/ # Install salt-support profiles install -Dpm 0644 salt/cli/support/profiles/* %{buildroot}%{python3_sitelib}/salt/cli/support/profiles -# Install Salt tests -install -Dd -m 0750 %{buildroot}%{_datadir}/salt -install -Dd -m 0750 %{buildroot}%{_datadir}/salt/tests -cp -a tests/* %{buildroot}%{_datadir}/salt/tests/ -sed -i '1s=^#!/usr/bin/\(python\|env python\)[0-9.]*=#!/usr/bin/python3=' %{buildroot}%{_datadir}/salt/tests/runtests.py ## Install Zypper plugins only on SUSE machines %if 0%{?suse_version} @@ -1364,6 +1400,8 @@ rm -f %{_localstatedir}/cache/salt/minion/thin/version %{_bindir}/spm %{_bindir}/salt-call %{_bindir}/salt-support +%{_bindir}/salt-unity +%{_mandir}/man1/salt-unity.1.gz %{_mandir}/man1/salt-call.1.gz %{_mandir}/man1/spm.1.gz %config(noreplace) %{_sysconfdir}/logrotate.d/salt @@ -1392,11 +1430,6 @@ rm -f %{_localstatedir}/cache/salt/minion/thin/version %doc doc/_build/html %endif -%files tests -%dir %{_datadir}/salt/ -%dir %{_datadir}/salt/tests/ -%{_datadir}/salt/tests/* - %if %{with bash_completion} %files bash-completion %defattr(-,root,root) diff --git a/save-log-to-logfile-with-docker.build.patch b/save-log-to-logfile-with-docker.build.patch index 41fd9d8..9061fc3 100644 --- a/save-log-to-logfile-with-docker.build.patch +++ b/save-log-to-logfile-with-docker.build.patch @@ -1,4 +1,4 @@ -From af5eb3f436fa405b76851c6ba0d491559b020974 Mon Sep 17 00:00:00 2001 +From c70db2e50599339118c9bf00c69f5cd38ef220bb Mon Sep 17 00:00:00 2001 From: Vladimir Nadvornik Date: Mon, 27 Jun 2022 17:00:58 +0200 Subject: [PATCH] Save log to logfile with docker.build @@ -8,10 +8,10 @@ Subject: [PATCH] Save log to logfile with docker.build 1 file changed, 18 insertions(+) diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py -index ab5c2ac609..461c89431f 100644 +index e6b81e7f09..1f871b40cf 100644 --- a/salt/modules/dockermod.py +++ b/salt/modules/dockermod.py -@@ -4001,6 +4001,7 @@ def build( +@@ -3990,6 +3990,7 @@ def build( fileobj=None, dockerfile=None, buildargs=None, @@ -19,7 +19,7 @@ index ab5c2ac609..461c89431f 100644 ): """ .. versionchanged:: 2018.3.0 -@@ -4054,6 +4055,9 @@ def build( +@@ -4043,6 +4044,9 @@ def build( buildargs A dictionary of build arguments provided to the docker build process. @@ -29,7 +29,7 @@ index ab5c2ac609..461c89431f 100644 **RETURN DATA** -@@ -4128,6 +4132,20 @@ def build( +@@ -4117,6 +4121,20 @@ def build( stream_data = [] for line in response: stream_data.extend(salt.utils.json.loads(line, cls=DockerJSONDecoder)) @@ -51,6 +51,6 @@ index ab5c2ac609..461c89431f 100644 # Iterate through API response and collect information for item in stream_data: -- -2.37.3 +2.36.1 diff --git a/set-default-target-for-pip-from-venv_pip_target-envi.patch b/set-default-target-for-pip-from-venv_pip_target-envi.patch index c3a6934..a82374f 100644 --- a/set-default-target-for-pip-from-venv_pip_target-envi.patch +++ b/set-default-target-for-pip-from-venv_pip_target-envi.patch @@ -1,4 +1,4 @@ -From d561491c48ee30472e0d4699ba389648ef0d863a Mon Sep 17 00:00:00 2001 +From 003266fc86c1364a41ac4bd35207290c036151a0 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Mon, 27 Jun 2022 18:02:31 +0300 Subject: [PATCH] Set default target for pip from VENV_PIP_TARGET @@ -12,11 +12,12 @@ if set and no target specified on the call * Changelog entry --- - changelog/62089.changed | 1 + - salt/modules/pip.py | 6 +++++ - tests/pytests/unit/modules/test_pip.py | 31 ++++++++++++++++++++++++++ - 3 files changed, 38 insertions(+) + changelog/62089.changed | 1 + + salt/modules/pip.py | 6 + + tests/pytests/unit/modules/test_pip.py | 1806 ++++++++++++++++++++++++ + 3 files changed, 1813 insertions(+) create mode 100644 changelog/62089.changed + create mode 100644 tests/pytests/unit/modules/test_pip.py diff --git a/changelog/62089.changed b/changelog/62089.changed new file mode 100644 @@ -43,13 +44,1786 @@ index da26416662..9410024fd5 100644 cmd.extend(["--target", target]) diff --git a/tests/pytests/unit/modules/test_pip.py b/tests/pytests/unit/modules/test_pip.py -index 405ec6c82e..ae9005d806 100644 ---- a/tests/pytests/unit/modules/test_pip.py +new file mode 100644 +index 0000000000..ae9005d806 +--- /dev/null +++ b/tests/pytests/unit/modules/test_pip.py -@@ -1773,3 +1773,34 @@ def test_when_version_is_called_with_a_user_it_should_be_passed_to_undelying_run - cwd=None, - python_shell=False, - ) +@@ -0,0 +1,1806 @@ ++import os ++import sys ++ ++import pytest ++import salt.modules.pip as pip ++import salt.utils.files ++import salt.utils.platform ++from salt.exceptions import CommandExecutionError ++from tests.support.mock import MagicMock, patch ++ ++ ++class FakeFopen: ++ def __init__(self, filename): ++ d = { ++ "requirements-0.txt": ( ++ b"--index-url http://fake.com/simple\n\n" ++ b"one # -r wrong.txt, other\n" ++ b"two # --requirement wrong.exe;some\n" ++ b"three\n" ++ b"-r requirements-1.txt\n" ++ b"# nothing\n" ++ ), ++ "requirements-1.txt": ( ++ "four\n" ++ "five\n" ++ "--requirement=requirements-2.txt\t# --requirements-2.txt\n\n" ++ ), ++ "requirements-2.txt": b"""six""", ++ "requirements-3.txt": ( ++ b"# some comment\n" ++ b"-e git+ssh://git.example.com/MyProject#egg=MyProject # the project\n" ++ b"seven\n" ++ b"-e git+ssh://git.example.com/Example#egg=example\n" ++ b"eight # -e something#or other\n" ++ b"--requirement requirements-4.txt\n\n" ++ ), ++ "requirements-4.txt": "", ++ } ++ self.val = d[filename] ++ ++ def __enter__(self): ++ return self ++ ++ def __exit__(self, *args, **kwargs): ++ pass ++ ++ def read(self): ++ return self.val ++ ++ ++@pytest.fixture ++def expected_user(): ++ return "fnord" ++ ++ ++@pytest.fixture ++def configure_loader_modules(): ++ return {pip: {"__salt__": {"cmd.which_bin": lambda _: "pip"}}} ++ ++ ++def test__pip_bin_env(): ++ ret = pip._pip_bin_env(None, "C:/Users/ch44d/Documents/salt/tests/pip.exe") ++ if salt.utils.platform.is_windows(): ++ assert ret == "C:/Users/ch44d/Documents/salt/tests" ++ else: ++ assert ret is None ++ ++ ++def test__pip_bin_env_no_change(): ++ cwd = "C:/Users/ch44d/Desktop" ++ ret = pip._pip_bin_env(cwd, "C:/Users/ch44d/Documents/salt/tests/pip.exe") ++ assert ret == cwd ++ ++ ++def test__pip_bin_env_no_bin_env(): ++ ret = pip._pip_bin_env(None, None) ++ assert ret is None ++ ++ ++def test_install_frozen_app(): ++ pkg = "pep8" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch("sys.frozen", True, create=True): ++ with patch("sys._MEIPASS", True, create=True): ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg) ++ expected = [ ++ sys.executable, ++ "pip", ++ "install", ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ python_shell=False, ++ saltenv="base", ++ use_vt=False, ++ runas=None, ++ ) ++ ++ ++def test_install_source_app(): ++ pkg = "pep8" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch("sys.frozen", False, create=True): ++ with patch("sys._MEIPASS", False, create=True): ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ python_shell=False, ++ saltenv="base", ++ use_vt=False, ++ runas=None, ++ ) ++ ++ ++def test_fix4361(): ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(requirements="requirements.txt") ++ expected_cmd = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--requirement", ++ "requirements.txt", ++ ] ++ mock.assert_called_with( ++ expected_cmd, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_editable_without_egg_fails(): ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pytest.raises( ++ CommandExecutionError, ++ pip.install, ++ editable="git+https://github.com/saltstack/salt-testing.git", ++ ) ++ ++ ++def test_install_multiple_editable(): ++ editables = [ ++ "git+https://github.com/jek/blinker.git#egg=Blinker", ++ "git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting", ++ ] ++ ++ expected = [sys.executable, "-m", "pip", "install"] ++ for item in editables: ++ expected.extend(["--editable", item]) ++ ++ # Passing editables as a list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(editable=editables) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing editables as a comma separated list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(editable=",".join(editables)) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_multiple_pkgs_and_editables(): ++ pkgs = ["pep8", "salt"] ++ editables = [ ++ "git+https://github.com/jek/blinker.git#egg=Blinker", ++ "git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting", ++ ] ++ ++ expected = [sys.executable, "-m", "pip", "install"] ++ expected.extend(pkgs) ++ for item in editables: ++ expected.extend(["--editable", item]) ++ ++ # Passing editables as a list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkgs=pkgs, editable=editables) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing editables as a comma separated list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkgs=",".join(pkgs), editable=",".join(editables)) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # As single string (just use the first element from pkgs and editables) ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkgs=pkgs[0], editable=editables[0]) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ pkgs[0], ++ "--editable", ++ editables[0], ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_issue5940_install_multiple_pip_mirrors(): ++ """ ++ test multiple pip mirrors. This test only works with pip < 7.0.0 ++ """ ++ with patch.object(pip, "version", MagicMock(return_value="1.4")): ++ mirrors = [ ++ "http://g.pypi.python.org", ++ "http://c.pypi.python.org", ++ "http://pypi.crate.io", ++ ] ++ ++ expected = [sys.executable, "-m", "pip", "install", "--use-mirrors"] ++ for item in mirrors: ++ expected.extend(["--mirrors", item]) ++ expected.append("pep8") ++ ++ # Passing mirrors as a list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkgs=["pep8"], mirrors=mirrors) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing mirrors as a comma separated list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkgs=["pep8"], mirrors=",".join(mirrors)) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--use-mirrors", ++ "--mirrors", ++ mirrors[0], ++ "pep8", ++ ] ++ ++ # As single string (just use the first element from mirrors) ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkgs=["pep8"], mirrors=mirrors[0]) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_with_multiple_find_links(): ++ find_links = [ ++ "http://g.pypi.python.org", ++ "http://c.pypi.python.org", ++ "http://pypi.crate.io", ++ ] ++ pkg = "pep8" ++ ++ expected = [sys.executable, "-m", "pip", "install"] ++ for item in find_links: ++ expected.extend(["--find-links", item]) ++ expected.append(pkg) ++ ++ # Passing mirrors as a list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, find_links=find_links) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing mirrors as a comma separated list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, find_links=",".join(find_links)) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Valid protos work? ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, find_links=find_links) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--find-links", ++ find_links[0], ++ pkg, ++ ] ++ ++ # As single string (just use the first element from find_links) ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, find_links=find_links[0]) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Invalid proto raises exception ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pytest.raises( ++ CommandExecutionError, ++ pip.install, ++ "'" + pkg + "'", ++ find_links="sftp://pypi.crate.io", ++ ) ++ ++ ++def test_install_no_index_with_index_url_or_extra_index_url_raises(): ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pytest.raises( ++ CommandExecutionError, ++ pip.install, ++ no_index=True, ++ index_url="http://foo.tld", ++ ) ++ ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pytest.raises( ++ CommandExecutionError, ++ pip.install, ++ no_index=True, ++ extra_index_url="http://foo.tld", ++ ) ++ ++ ++def test_install_failed_cached_requirements(): ++ with patch("salt.modules.pip._get_cached_requirements") as get_cached_requirements: ++ get_cached_requirements.return_value = False ++ ret = pip.install(requirements="salt://my_test_reqs") ++ assert False is ret["result"] ++ assert "my_test_reqs" in ret["comment"] ++ ++ ++def test_install_cached_requirements_used(): ++ with patch("salt.modules.pip._get_cached_requirements") as get_cached_requirements: ++ get_cached_requirements.return_value = "my_cached_reqs" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(requirements="salt://requirements.txt") ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--requirement", ++ "my_cached_reqs", ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_venv(): ++ with patch("os.path") as mock_path: ++ ++ def join(*args): ++ return os.path.normpath(os.sep.join(args)) ++ ++ mock_path.is_file.return_value = True ++ mock_path.isdir.return_value = True ++ mock_path.join = join ++ ++ if salt.utils.platform.is_windows(): ++ venv_path = "C:\\test_env" ++ bin_path = os.path.join(venv_path, "python.exe") ++ else: ++ venv_path = "/test_env" ++ bin_path = os.path.join(venv_path, "python") ++ ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ pip_bin = MagicMock(return_value=[bin_path, "-m", "pip"]) ++ ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}), patch.object( ++ pip, "_get_pip_bin", pip_bin ++ ): ++ pip.install("mock", bin_env=venv_path) ++ mock.assert_called_with( ++ [bin_path, "-m", "pip", "install", "mock"], ++ env={"VIRTUAL_ENV": venv_path}, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_log_argument_in_resulting_command(): ++ with patch("os.access") as mock_path: ++ pkg = "pep8" ++ log_path = "/tmp/pip-install.log" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, log=log_path) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--log", ++ log_path, ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_non_writeable_log(): ++ with patch("os.path") as mock_path: ++ # Let's fake a non-writable log file ++ pkg = "pep8" ++ log_path = "/tmp/pip-install.log" ++ mock_path.exists.side_effect = IOError("Fooo!") ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pytest.raises(IOError, pip.install, pkg, log=log_path) ++ ++ ++def test_install_timeout_argument_in_resulting_command(): ++ # Passing an int ++ pkg = "pep8" ++ expected = [sys.executable, "-m", "pip", "install", "--timeout"] ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, timeout=10) ++ mock.assert_called_with( ++ expected + [10, pkg], ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing an int as a string ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, timeout="10") ++ mock.assert_called_with( ++ expected + ["10", pkg], ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing a non-int to timeout ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pytest.raises(ValueError, pip.install, pkg, timeout="a") ++ ++ ++def test_install_index_url_argument_in_resulting_command(): ++ pkg = "pep8" ++ index_url = "http://foo.tld" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, index_url=index_url) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--index-url", ++ index_url, ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_extra_index_url_argument_in_resulting_command(): ++ pkg = "pep8" ++ extra_index_url = "http://foo.tld" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, extra_index_url=extra_index_url) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--extra-index-url", ++ extra_index_url, ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_no_index_argument_in_resulting_command(): ++ pkg = "pep8" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, no_index=True) ++ expected = [sys.executable, "-m", "pip", "install", "--no-index", pkg] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_build_argument_in_resulting_command(): ++ pkg = "pep8" ++ build = "/tmp/foo" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, build=build) ++ expected = [sys.executable, "-m", "pip", "install", "--build", build, pkg] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_target_argument_in_resulting_command(): ++ pkg = "pep8" ++ target = "/tmp/foo" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, target=target) ++ expected = [sys.executable, "-m", "pip", "install", "--target", target, pkg] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_download_argument_in_resulting_command(): ++ pkg = "pep8" ++ download = "/tmp/foo" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, download=download) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--download", ++ download, ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_no_download_argument_in_resulting_command(): ++ pkg = "pep8" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, no_download=True) ++ expected = [sys.executable, "-m", "pip", "install", "--no-download", pkg] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_download_cache_dir_arguments_in_resulting_command(): ++ pkg = "pep8" ++ cache_dir_arg_mapping = { ++ "1.5.6": "--download-cache", ++ "6.0": "--cache-dir", ++ } ++ download_cache = "/tmp/foo" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ for pip_version, cmd_arg in cache_dir_arg_mapping.items(): ++ with patch("salt.modules.pip.version", MagicMock(return_value=pip_version)): ++ # test `download_cache` kwarg ++ pip.install(pkg, download_cache="/tmp/foo") ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ cmd_arg, ++ download_cache, ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # test `cache_dir` kwarg ++ pip.install(pkg, cache_dir="/tmp/foo") ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_source_argument_in_resulting_command(): ++ pkg = "pep8" ++ source = "/tmp/foo" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, source=source) ++ expected = [sys.executable, "-m", "pip", "install", "--source", source, pkg] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_exists_action_argument_in_resulting_command(): ++ pkg = "pep8" ++ for action in ("s", "i", "w", "b"): ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, exists_action=action) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--exists-action", ++ action, ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Test for invalid action ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pytest.raises(CommandExecutionError, pip.install, pkg, exists_action="d") ++ ++ ++def test_install_install_options_argument_in_resulting_command(): ++ install_options = ["--exec-prefix=/foo/bar", "--install-scripts=/foo/bar/bin"] ++ pkg = "pep8" ++ ++ expected = [sys.executable, "-m", "pip", "install"] ++ for item in install_options: ++ expected.extend(["--install-option", item]) ++ expected.append(pkg) ++ ++ # Passing options as a list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, install_options=install_options) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing mirrors as a comma separated list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, install_options=",".join(install_options)) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing mirrors as a single string entry ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, install_options=install_options[0]) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--install-option", ++ install_options[0], ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_global_options_argument_in_resulting_command(): ++ global_options = ["--quiet", "--no-user-cfg"] ++ pkg = "pep8" ++ ++ expected = [sys.executable, "-m", "pip", "install"] ++ for item in global_options: ++ expected.extend(["--global-option", item]) ++ expected.append(pkg) ++ ++ # Passing options as a list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, global_options=global_options) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing mirrors as a comma separated list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, global_options=",".join(global_options)) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing mirrors as a single string entry ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, global_options=global_options[0]) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--global-option", ++ global_options[0], ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_upgrade_argument_in_resulting_command(): ++ pkg = "pep8" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, upgrade=True) ++ expected = [sys.executable, "-m", "pip", "install", "--upgrade", pkg] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_force_reinstall_argument_in_resulting_command(): ++ pkg = "pep8" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, force_reinstall=True) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--force-reinstall", ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_ignore_installed_argument_in_resulting_command(): ++ pkg = "pep8" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, ignore_installed=True) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--ignore-installed", ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_no_deps_argument_in_resulting_command(): ++ pkg = "pep8" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, no_deps=True) ++ expected = [sys.executable, "-m", "pip", "install", "--no-deps", pkg] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_no_install_argument_in_resulting_command(): ++ pkg = "pep8" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, no_install=True) ++ expected = [sys.executable, "-m", "pip", "install", "--no-install", pkg] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_proxy_argument_in_resulting_command(): ++ pkg = "pep8" ++ proxy = "salt-user:salt-passwd@salt-proxy:3128" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(pkg, proxy=proxy) ++ expected = [sys.executable, "-m", "pip", "install", "--proxy", proxy, pkg] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_proxy_false_argument_in_resulting_command(): ++ """ ++ Checking that there is no proxy set if proxy arg is set to False ++ even if the global proxy is set. ++ """ ++ pkg = "pep8" ++ proxy = False ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ config_mock = { ++ "proxy_host": "salt-proxy", ++ "proxy_port": "3128", ++ "proxy_username": "salt-user", ++ "proxy_password": "salt-passwd", ++ } ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch.dict(pip.__opts__, config_mock): ++ pip.install(pkg, proxy=proxy) ++ expected = [sys.executable, "-m", "pip", "install", pkg] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_global_proxy_in_resulting_command(): ++ """ ++ Checking that there is proxy set if global proxy is set. ++ """ ++ pkg = "pep8" ++ proxy = "http://salt-user:salt-passwd@salt-proxy:3128" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ config_mock = { ++ "proxy_host": "salt-proxy", ++ "proxy_port": "3128", ++ "proxy_username": "salt-user", ++ "proxy_password": "salt-passwd", ++ } ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch.dict(pip.__opts__, config_mock): ++ pip.install(pkg) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--proxy", ++ proxy, ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_multiple_requirements_arguments_in_resulting_command(): ++ with patch("salt.modules.pip._get_cached_requirements") as get_cached_requirements: ++ cached_reqs = ["my_cached_reqs-1", "my_cached_reqs-2"] ++ get_cached_requirements.side_effect = cached_reqs ++ requirements = ["salt://requirements-1.txt", "salt://requirements-2.txt"] ++ ++ expected = [sys.executable, "-m", "pip", "install"] ++ for item in cached_reqs: ++ expected.extend(["--requirement", item]) ++ ++ # Passing option as a list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(requirements=requirements) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing option as a comma separated list ++ get_cached_requirements.side_effect = cached_reqs ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(requirements=",".join(requirements)) ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing option as a single string entry ++ get_cached_requirements.side_effect = [cached_reqs[0]] ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install(requirements=requirements[0]) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ "--requirement", ++ cached_reqs[0], ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_extra_args_arguments_in_resulting_command(): ++ pkg = "pep8" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.install( ++ pkg, extra_args=[{"--latest-pip-kwarg": "param"}, "--latest-pip-arg"] ++ ) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "install", ++ pkg, ++ "--latest-pip-kwarg", ++ "param", ++ "--latest-pip-arg", ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_install_extra_args_arguments_recursion_error(): ++ pkg = "pep8" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ ++ pytest.raises( ++ TypeError, ++ lambda: pip.install( ++ pkg, extra_args=[{"--latest-pip-kwarg": ["param1", "param2"]}] ++ ), ++ ) ++ ++ pytest.raises( ++ TypeError, ++ lambda: pip.install( ++ pkg, extra_args=[{"--latest-pip-kwarg": [{"--too-deep": dict()}]}] ++ ), ++ ) ++ ++ ++def test_uninstall_multiple_requirements_arguments_in_resulting_command(): ++ with patch("salt.modules.pip._get_cached_requirements") as get_cached_requirements: ++ cached_reqs = ["my_cached_reqs-1", "my_cached_reqs-2"] ++ get_cached_requirements.side_effect = cached_reqs ++ requirements = ["salt://requirements-1.txt", "salt://requirements-2.txt"] ++ ++ expected = [sys.executable, "-m", "pip", "uninstall", "-y"] ++ for item in cached_reqs: ++ expected.extend(["--requirement", item]) ++ ++ # Passing option as a list ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.uninstall(requirements=requirements) ++ mock.assert_called_with( ++ expected, ++ cwd=None, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing option as a comma separated list ++ get_cached_requirements.side_effect = cached_reqs ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.uninstall(requirements=",".join(requirements)) ++ mock.assert_called_with( ++ expected, ++ cwd=None, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing option as a single string entry ++ get_cached_requirements.side_effect = [cached_reqs[0]] ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.uninstall(requirements=requirements[0]) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "uninstall", ++ "-y", ++ "--requirement", ++ cached_reqs[0], ++ ] ++ mock.assert_called_with( ++ expected, ++ cwd=None, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_uninstall_global_proxy_in_resulting_command(): ++ """ ++ Checking that there is proxy set if global proxy is set. ++ """ ++ pkg = "pep8" ++ proxy = "http://salt-user:salt-passwd@salt-proxy:3128" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ config_mock = { ++ "proxy_host": "salt-proxy", ++ "proxy_port": "3128", ++ "proxy_username": "salt-user", ++ "proxy_password": "salt-passwd", ++ } ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch.dict(pip.__opts__, config_mock): ++ pip.uninstall(pkg) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "uninstall", ++ "-y", ++ "--proxy", ++ proxy, ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ cwd=None, ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_uninstall_proxy_false_argument_in_resulting_command(): ++ """ ++ Checking that there is no proxy set if proxy arg is set to False ++ even if the global proxy is set. ++ """ ++ pkg = "pep8" ++ proxy = False ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ config_mock = { ++ "proxy_host": "salt-proxy", ++ "proxy_port": "3128", ++ "proxy_username": "salt-user", ++ "proxy_password": "salt-passwd", ++ } ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch.dict(pip.__opts__, config_mock): ++ pip.uninstall(pkg, proxy=proxy) ++ expected = [sys.executable, "-m", "pip", "uninstall", "-y", pkg] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ cwd=None, ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_uninstall_log_argument_in_resulting_command(): ++ pkg = "pep8" ++ log_path = "/tmp/pip-install.log" ++ ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.uninstall(pkg, log=log_path) ++ expected = [ ++ sys.executable, ++ "-m", ++ "pip", ++ "uninstall", ++ "-y", ++ "--log", ++ log_path, ++ pkg, ++ ] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ cwd=None, ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Let's fake a non-writable log file ++ with patch("os.path") as mock_path: ++ mock_path.exists.side_effect = IOError("Fooo!") ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pytest.raises(IOError, pip.uninstall, pkg, log=log_path) ++ ++ ++def test_uninstall_timeout_argument_in_resulting_command(): ++ pkg = "pep8" ++ expected = [sys.executable, "-m", "pip", "uninstall", "-y", "--timeout"] ++ # Passing an int ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.uninstall(pkg, timeout=10) ++ mock.assert_called_with( ++ expected + [10, pkg], ++ cwd=None, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing an int as a string ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pip.uninstall(pkg, timeout="10") ++ mock.assert_called_with( ++ expected + ["10", pkg], ++ cwd=None, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ # Passing a non-int to timeout ++ mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ pytest.raises(ValueError, pip.uninstall, pkg, timeout="a") ++ ++ ++def test_freeze_command(): ++ expected = [sys.executable, "-m", "pip", "freeze"] ++ eggs = [ ++ "M2Crypto==0.21.1", ++ "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", ++ "bbfreeze==1.1.0", ++ "bbfreeze-loader==1.1.0", ++ "pycrypto==2.6", ++ ] ++ mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ++ ret = pip.freeze() ++ mock.assert_called_with( ++ expected, ++ cwd=None, ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ assert ret == eggs ++ ++ mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)}) ++ # Passing env_vars passes them to underlying command? ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ++ ret = pip.freeze(env_vars={"foo": "bar"}) ++ mock.assert_called_with( ++ expected, ++ cwd=None, ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ env={"foo": "bar"}, ++ ) ++ assert ret == eggs ++ ++ # Non zero returncode raises exception? ++ mock = MagicMock(return_value={"retcode": 1, "stderr": "CABOOOOMMM!"}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ++ pytest.raises( ++ CommandExecutionError, ++ pip.freeze, ++ ) ++ ++ ++def test_freeze_command_with_all(): ++ eggs = [ ++ "M2Crypto==0.21.1", ++ "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", ++ "bbfreeze==1.1.0", ++ "bbfreeze-loader==1.1.0", ++ "pip==0.9.1", ++ "pycrypto==2.6", ++ "setuptools==20.10.1", ++ ] ++ mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="9.0.1")): ++ ret = pip.freeze() ++ expected = [sys.executable, "-m", "pip", "freeze", "--all"] ++ mock.assert_called_with( ++ expected, ++ cwd=None, ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ assert ret == eggs ++ ++ # Non zero returncode raises exception? ++ mock = MagicMock(return_value={"retcode": 1, "stderr": "CABOOOOMMM!"}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="9.0.1")): ++ pytest.raises( ++ CommandExecutionError, ++ pip.freeze, ++ ) ++ ++ ++def test_list_command(): ++ eggs = [ ++ "M2Crypto==0.21.1", ++ "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", ++ "bbfreeze==1.1.0", ++ "bbfreeze-loader==1.1.0", ++ "pycrypto==2.6", ++ ] ++ mock_version = "6.1.1" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value=mock_version)): ++ ret = pip.list_() ++ expected = [sys.executable, "-m", "pip", "freeze"] ++ mock.assert_called_with( ++ expected, ++ cwd=None, ++ runas=None, ++ python_shell=False, ++ use_vt=False, ++ ) ++ assert ret == { ++ "SaltTesting-dev": "git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8", ++ "M2Crypto": "0.21.1", ++ "bbfreeze-loader": "1.1.0", ++ "bbfreeze": "1.1.0", ++ "pip": mock_version, ++ "pycrypto": "2.6", ++ } ++ ++ # Non zero returncode raises exception? ++ mock = MagicMock(return_value={"retcode": 1, "stderr": "CABOOOOMMM!"}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ++ pytest.raises( ++ CommandExecutionError, ++ pip.list_, ++ ) ++ ++ ++def test_list_command_with_all(): ++ eggs = [ ++ "M2Crypto==0.21.1", ++ "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", ++ "bbfreeze==1.1.0", ++ "bbfreeze-loader==1.1.0", ++ "pip==9.0.1", ++ "pycrypto==2.6", ++ "setuptools==20.10.1", ++ ] ++ # N.B.: this is deliberately different from the "output" of pip freeze. ++ # This is to demonstrate that the version reported comes from freeze ++ # instead of from the pip.version function. ++ mock_version = "9.0.0" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value=mock_version)): ++ ret = pip.list_() ++ expected = [sys.executable, "-m", "pip", "freeze", "--all"] ++ mock.assert_called_with( ++ expected, ++ cwd=None, ++ runas=None, ++ python_shell=False, ++ use_vt=False, ++ ) ++ assert ret == { ++ "SaltTesting-dev": "git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8", ++ "M2Crypto": "0.21.1", ++ "bbfreeze-loader": "1.1.0", ++ "bbfreeze": "1.1.0", ++ "pip": "9.0.1", ++ "pycrypto": "2.6", ++ "setuptools": "20.10.1", ++ } ++ ++ # Non zero returncode raises exception? ++ mock = MagicMock(return_value={"retcode": 1, "stderr": "CABOOOOMMM!"}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ++ pytest.raises( ++ CommandExecutionError, ++ pip.list_, ++ ) ++ ++ ++def test_list_command_with_prefix(): ++ eggs = [ ++ "M2Crypto==0.21.1", ++ "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", ++ "bbfreeze==1.1.0", ++ "bbfreeze-loader==1.1.0", ++ "pycrypto==2.6", ++ ] ++ mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ++ ret = pip.list_(prefix="bb") ++ expected = [sys.executable, "-m", "pip", "freeze"] ++ mock.assert_called_with( ++ expected, ++ cwd=None, ++ runas=None, ++ python_shell=False, ++ use_vt=False, ++ ) ++ assert ret == {"bbfreeze-loader": "1.1.0", "bbfreeze": "1.1.0"} ++ ++ ++def test_list_upgrades_legacy(): ++ eggs = [ ++ "apache-libcloud (Current: 1.1.0 Latest: 2.2.1 [wheel])", ++ "appdirs (Current: 1.4.1 Latest: 1.4.3 [wheel])", ++ "awscli (Current: 1.11.63 Latest: 1.12.1 [sdist])", ++ ] ++ mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ++ ret = pip.list_upgrades() ++ mock.assert_called_with( ++ [sys.executable, "-m", "pip", "list", "--outdated"], ++ cwd=None, ++ runas=None, ++ ) ++ assert ret == { ++ "apache-libcloud": "2.2.1 [wheel]", ++ "appdirs": "1.4.3 [wheel]", ++ "awscli": "1.12.1 [sdist]", ++ } ++ ++ ++def test_list_upgrades_gt9(): ++ eggs = """[{"latest_filetype": "wheel", "version": "1.1.0", "name": "apache-libcloud", "latest_version": "2.2.1"}, ++ {"latest_filetype": "wheel", "version": "1.4.1", "name": "appdirs", "latest_version": "1.4.3"}, ++ {"latest_filetype": "sdist", "version": "1.11.63", "name": "awscli", "latest_version": "1.12.1"} ++ ]""" ++ mock = MagicMock(return_value={"retcode": 0, "stdout": "{}".format(eggs)}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="9.1.1")): ++ ret = pip.list_upgrades() ++ mock.assert_called_with( ++ [ ++ sys.executable, ++ "-m", ++ "pip", ++ "list", ++ "--outdated", ++ "--format=json", ++ ], ++ cwd=None, ++ runas=None, ++ ) ++ assert ret == { ++ "apache-libcloud": "2.2.1 [wheel]", ++ "appdirs": "1.4.3 [wheel]", ++ "awscli": "1.12.1 [sdist]", ++ } ++ ++ ++def test_is_installed_true(): ++ eggs = [ ++ "M2Crypto==0.21.1", ++ "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", ++ "bbfreeze==1.1.0", ++ "bbfreeze-loader==1.1.0", ++ "pycrypto==2.6", ++ ] ++ mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ++ ret = pip.is_installed(pkgname="bbfreeze") ++ mock.assert_called_with( ++ [sys.executable, "-m", "pip", "freeze"], ++ cwd=None, ++ runas=None, ++ python_shell=False, ++ use_vt=False, ++ ) ++ assert ret ++ ++ ++def test_is_installed_false(): ++ eggs = [ ++ "M2Crypto==0.21.1", ++ "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", ++ "bbfreeze==1.1.0", ++ "bbfreeze-loader==1.1.0", ++ "pycrypto==2.6", ++ ] ++ mock = MagicMock(return_value={"retcode": 0, "stdout": "\n".join(eggs)}) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ++ ret = pip.is_installed(pkgname="notexist") ++ mock.assert_called_with( ++ [sys.executable, "-m", "pip", "freeze"], ++ cwd=None, ++ runas=None, ++ python_shell=False, ++ use_vt=False, ++ ) ++ assert not ret ++ ++ ++def test_install_pre_argument_in_resulting_command(): ++ pkg = "pep8" ++ # Lower than 1.4 versions don't end up with `--pre` in the resulting output ++ mock = MagicMock( ++ side_effect=[ ++ {"retcode": 0, "stdout": "pip 1.2.0 /path/to/site-packages/pip"}, ++ {"retcode": 0, "stdout": ""}, ++ ] ++ ) ++ with patch.dict(pip.__salt__, {"cmd.run_all": mock}): ++ with patch("salt.modules.pip.version", MagicMock(return_value="1.3")): ++ pip.install(pkg, pre_releases=True) ++ expected = [sys.executable, "-m", "pip", "install", pkg] ++ mock.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ mock_run = MagicMock(return_value="pip 1.4.1 /path/to/site-packages/pip") ++ mock_run_all = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ with patch.dict( ++ pip.__salt__, {"cmd.run_stdout": mock_run, "cmd.run_all": mock_run_all} ++ ): ++ with patch("salt.modules.pip._get_pip_bin", MagicMock(return_value=["pip"])): ++ pip.install(pkg, pre_releases=True) ++ expected = ["pip", "install", "--pre", pkg] ++ mock_run_all.assert_called_with( ++ expected, ++ saltenv="base", ++ runas=None, ++ use_vt=False, ++ python_shell=False, ++ ) ++ ++ ++def test_resolve_requirements_chain_function(): ++ with patch("salt.utils.files.fopen", FakeFopen): ++ chain = pip._resolve_requirements_chain( ++ ["requirements-0.txt", "requirements-3.txt"] ++ ) ++ assert chain == [ ++ "requirements-0.txt", ++ "requirements-1.txt", ++ "requirements-2.txt", ++ "requirements-3.txt", ++ "requirements-4.txt", ++ ] ++ ++ ++def test_when_upgrade_is_called_and_there_are_available_upgrades_it_should_call_correct_command( ++ expected_user, ++): ++ fake_run_all = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ pip_user = expected_user ++ with patch.dict(pip.__salt__, {"cmd.run_all": fake_run_all}), patch( ++ "salt.modules.pip.list_upgrades", autospec=True, return_value=[pip_user] ++ ), patch( ++ "salt.modules.pip._get_pip_bin", ++ autospec=True, ++ return_value=["some-other-pip"], ++ ): ++ pip.upgrade(user=pip_user) ++ ++ fake_run_all.assert_any_call( ++ ["some-other-pip", "install", "-U", "freeze", "--all", pip_user], ++ runas=pip_user, ++ cwd=None, ++ use_vt=False, ++ ) ++ ++ ++def test_when_list_upgrades_is_provided_a_user_it_should_be_passed_to_the_version_command( ++ expected_user, ++): ++ fake_run_all = MagicMock(return_value={"retcode": 0, "stdout": "{}"}) ++ pip_user = expected_user ++ ++ def all_new_commands(*args, **kwargs): ++ """ ++ Without this, mutating the return value mutates the return value ++ for EVERYTHING. ++ """ ++ return ["some-other-pip"] ++ ++ with patch.dict(pip.__salt__, {"cmd.run_all": fake_run_all}), patch( ++ "salt.modules.pip._get_pip_bin", ++ autospec=True, ++ side_effect=all_new_commands, ++ ): ++ pip._clear_context() ++ pip.list_upgrades(user=pip_user) ++ fake_run_all.assert_any_call( ++ ["some-other-pip", "--version"], ++ runas=expected_user, ++ cwd=None, ++ python_shell=False, ++ ) ++ ++ ++def test_when_install_is_provided_a_user_it_should_be_passed_to_the_version_command( ++ expected_user, ++): ++ fake_run_all = MagicMock(return_value={"retcode": 0, "stdout": "{}"}) ++ pip_user = expected_user ++ ++ def all_new_commands(*args, **kwargs): ++ """ ++ Without this, mutating the return value mutates the return value ++ for EVERYTHING. ++ """ ++ return ["some-other-pip"] ++ ++ with patch.dict(pip.__salt__, {"cmd.run_all": fake_run_all}), patch( ++ "salt.modules.pip._get_pip_bin", ++ autospec=True, ++ side_effect=all_new_commands, ++ ): ++ pip._clear_context() ++ pip.install(user=pip_user) ++ fake_run_all.assert_any_call( ++ ["some-other-pip", "--version"], ++ runas=pip_user, ++ cwd=None, ++ python_shell=False, ++ ) ++ ++ ++def test_when_version_is_called_with_a_user_it_should_be_passed_to_undelying_runas( ++ expected_user, ++): ++ fake_run_all = MagicMock(return_value={"retcode": 0, "stdout": ""}) ++ pip_user = expected_user ++ with patch.dict(pip.__salt__, {"cmd.run_all": fake_run_all}), patch( ++ "salt.modules.pip.list_upgrades", autospec=True, return_value=[pip_user] ++ ), patch( ++ "salt.modules.pip._get_pip_bin", ++ autospec=True, ++ return_value=["some-new-pip"], ++ ): ++ pip.version(user=pip_user) ++ fake_run_all.assert_called_with( ++ ["some-new-pip", "--version"], ++ runas=pip_user, ++ cwd=None, ++ python_shell=False, ++ ) + + +def test_install_target_from_VENV_PIP_TARGET_in_resulting_command(): @@ -82,6 +1856,6 @@ index 405ec6c82e..ae9005d806 100644 + python_shell=False, + ) -- -2.37.3 +2.36.1 diff --git a/state.apply-don-t-check-for-cached-pillar-errors.patch b/state.apply-don-t-check-for-cached-pillar-errors.patch index a724ea5..05200a1 100644 --- a/state.apply-don-t-check-for-cached-pillar-errors.patch +++ b/state.apply-don-t-check-for-cached-pillar-errors.patch @@ -1,4 +1,4 @@ -From cba6455bd0480bfb80c466a2b34a702a9afb5bd5 Mon Sep 17 00:00:00 2001 +From 5880703551d82a68a0e2f3108878124d8ae98bf0 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 25 Jan 2022 17:20:55 +0100 Subject: [PATCH] state.apply: don't check for cached pillar errors @@ -45,7 +45,7 @@ index 0000000000..af885d77fa @@ -0,0 +1 @@ +Don't check for cached pillar errors on state.apply diff --git a/salt/modules/state.py b/salt/modules/state.py -index f214291328..c0feabe842 100644 +index 0c3dfc3317..0027744229 100644 --- a/salt/modules/state.py +++ b/salt/modules/state.py @@ -106,18 +106,17 @@ def _set_retcode(ret, highstate=None): @@ -357,6 +357,6 @@ index 02fd2dd307..30cda303cc 100644 + state._get_pillar_errors(kwargs={}, pillar=pillar.fresh) == expected_errors + ) -- -2.37.3 +2.34.1 diff --git a/state.orchestrate_single-does-not-pass-pillar-none-4.patch b/state.orchestrate_single-does-not-pass-pillar-none-4.patch index 511ee81..59d30ff 100644 --- a/state.orchestrate_single-does-not-pass-pillar-none-4.patch +++ b/state.orchestrate_single-does-not-pass-pillar-none-4.patch @@ -1,4 +1,4 @@ -From 634e82874b17c38bd4d27c0c07a53c9e39e49968 Mon Sep 17 00:00:00 2001 +From d44207dc209894b36f2a2c8af4c81afcd86b4625 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= Date: Wed, 9 Feb 2022 09:01:08 +0000 @@ -100,6 +100,6 @@ index 0000000000..df0a718a41 + ) + assert "pillar" not in mock_state_single.call_args.kwargs -- -2.37.3 +2.35.1 diff --git a/support-transactional-systems-microos.patch b/support-transactional-systems-microos.patch new file mode 100644 index 0000000..831dbc2 --- /dev/null +++ b/support-transactional-systems-microos.patch @@ -0,0 +1,226 @@ +From 5d7b9be571b765faae5cefc5a0810c61c2a25814 Mon Sep 17 00:00:00 2001 +From: Alexander Graul +Date: Tue, 18 Jan 2022 17:36:12 +0100 +Subject: [PATCH] Support transactional systems (MicroOS) + +Add rebootmgr module + +Add transactional_update module + +chroot: add chroot detector + +systemd: add offline mode detector + +transactional_update: add pending_transaction detector + +extra: add EFI and transactional grains + +transactional_update: add call, apply_, sls & highstate + +transactional_update: add documentation + +transactional_update: add executor + +Add changelog entry 58519.added + +Closes #58519 + +transactional_update: update the cleanups family + +transactional_update: add activate_transaction param + +transactional_update: skip tests on Windows + +transactional_update: unify with chroot.call + +Return for both .call() "retcode" when fail + +Add MicroOS information in release note + +systemd: support NamedLoaderContext + +transactional_update: detect recursion in the executor + +Handle master tops data when states are applied by transactional_update + +Fix unit tests for transactional_update module + +Do noop for services states when running systemd in offline mode + +transactional_updates: do not execute states in parallel but use a queue + +Add changes suggested by pre-commit + +Fix unit tests for transactional_updates module + +Add unit tests to cover queue cases on transaction_update states + +Refactor offline checkers and add unit tests + +Fix regression that always consider offline mode + +Add proper mocking and skip tests when running in offline mode + +Fix failing unit tests for systemd + +test_rebootmgr: convert to pytest + +test_transactional_update: convert to pytest + +Update release documentation to 3004 +--- + changelog/58519.added | 1 + + salt/modules/chroot.py | 10 +++++++--- + salt/modules/transactional_update.py | 8 ++++---- + .../unit/modules/test_transactional_update.py | 10 +++++++++- + tests/unit/modules/test_chroot.py | 13 ++++++++++++- + 5 files changed, 33 insertions(+), 9 deletions(-) + create mode 100644 changelog/58519.added + +diff --git a/changelog/58519.added b/changelog/58519.added +new file mode 100644 +index 0000000000..1cc8d7dc74 +--- /dev/null ++++ b/changelog/58519.added +@@ -0,0 +1 @@ ++Add support for transactional systems, like openSUSE MicroOS +\ No newline at end of file +diff --git a/salt/modules/chroot.py b/salt/modules/chroot.py +index 39dfff6b86..91f139455b 100644 +--- a/salt/modules/chroot.py ++++ b/salt/modules/chroot.py +@@ -242,7 +242,11 @@ def _create_and_execute_salt_state(root, chunks, file_refs, test, hash_type): + # Create the tar containing the state pkg and relevant files. + salt.client.ssh.wrapper.state._cleanup_slsmod_low_data(chunks) + trans_tar = salt.client.ssh.state.prep_trans_tar( +- salt.fileclient.get_file_client(__opts__), chunks, file_refs, __pillar__, root ++ salt.fileclient.get_file_client(__opts__), ++ chunks, ++ file_refs, ++ __pillar__.value(), ++ root, + ) + trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, hash_type) + +@@ -303,7 +307,7 @@ def sls(root, mods, saltenv="base", test=None, exclude=None, **kwargs): + """ + # Get a copy of the pillar data, to avoid overwriting the current + # pillar, instead the one delegated +- pillar = copy.deepcopy(__pillar__) ++ pillar = copy.deepcopy(__pillar__.value()) + pillar.update(kwargs.get("pillar", {})) + + # Clone the options data and apply some default values. May not be +@@ -372,7 +376,7 @@ def highstate(root, **kwargs): + """ + # Get a copy of the pillar data, to avoid overwriting the current + # pillar, instead the one delegated +- pillar = copy.deepcopy(__pillar__) ++ pillar = copy.deepcopy(__pillar__.value()) + pillar.update(kwargs.get("pillar", {})) + + # Clone the options data and apply some default values. May not be +diff --git a/salt/modules/transactional_update.py b/salt/modules/transactional_update.py +index 6fcad40b35..799fe08e4d 100644 +--- a/salt/modules/transactional_update.py ++++ b/salt/modules/transactional_update.py +@@ -1052,7 +1052,7 @@ def _create_and_execute_salt_state( + # Create the tar containing the state pkg and relevant files. + salt.client.ssh.wrapper.state._cleanup_slsmod_low_data(chunks) + trans_tar = salt.client.ssh.state.prep_trans_tar( +- salt.fileclient.get_file_client(__opts__), chunks, file_refs, __pillar__ ++ salt.fileclient.get_file_client(__opts__), chunks, file_refs, __pillar__.value() + ) + trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, hash_type) + +@@ -1134,7 +1134,7 @@ def sls( + + # Get a copy of the pillar data, to avoid overwriting the current + # pillar, instead the one delegated +- pillar = copy.deepcopy(__pillar__) ++ pillar = copy.deepcopy(__pillar__.value()) + pillar.update(kwargs.get("pillar", {})) + + # Clone the options data and apply some default values. May not be +@@ -1218,7 +1218,7 @@ def highstate(activate_transaction=False, queue=False, **kwargs): + + # Get a copy of the pillar data, to avoid overwriting the current + # pillar, instead the one delegated +- pillar = copy.deepcopy(__pillar__) ++ pillar = copy.deepcopy(__pillar__.value()) + pillar.update(kwargs.get("pillar", {})) + + # Clone the options data and apply some default values. May not be +@@ -1284,7 +1284,7 @@ def single(fun, name, test=None, activate_transaction=False, queue=False, **kwar + + # Get a copy of the pillar data, to avoid overwriting the current + # pillar, instead the one delegated +- pillar = copy.deepcopy(__pillar__) ++ pillar = copy.deepcopy(__pillar__.value()) + pillar.update(kwargs.get("pillar", {})) + + # Clone the options data and apply some default values. May not be +diff --git a/tests/pytests/unit/modules/test_transactional_update.py b/tests/pytests/unit/modules/test_transactional_update.py +index f9eb1fd595..032ca0c9e8 100644 +--- a/tests/pytests/unit/modules/test_transactional_update.py ++++ b/tests/pytests/unit/modules/test_transactional_update.py +@@ -1,6 +1,7 @@ + import sys + + import pytest ++import salt.loader.context + import salt.modules.state as statemod + import salt.modules.transactional_update as tu + from salt.exceptions import CommandExecutionError +@@ -13,8 +14,15 @@ pytestmark = [ + + @pytest.fixture + def configure_loader_modules(): ++ loader_context = salt.loader.context.LoaderContext() + return { +- tu: {"__salt__": {}, "__utils__": {}}, ++ tu: { ++ "__salt__": {}, ++ "__utils__": {}, ++ "__pillar__": salt.loader.context.NamedLoaderContext( ++ "__pillar__", loader_context, {} ++ ), ++ }, + statemod: {"__salt__": {}, "__context__": {}}, + } + +diff --git a/tests/unit/modules/test_chroot.py b/tests/unit/modules/test_chroot.py +index cdbfcb0fab..9cdfeaf066 100644 +--- a/tests/unit/modules/test_chroot.py ++++ b/tests/unit/modules/test_chroot.py +@@ -27,6 +27,7 @@ + + import sys + ++import salt.loader.context + import salt.modules.chroot as chroot + import salt.utils.platform + from salt.exceptions import CommandExecutionError +@@ -42,7 +43,17 @@ class ChrootTestCase(TestCase, LoaderModuleMockMixin): + """ + + def setup_loader_modules(self): +- return {chroot: {"__salt__": {}, "__utils__": {}, "__opts__": {"cachedir": ""}}} ++ loader_context = salt.loader.context.LoaderContext() ++ return { ++ chroot: { ++ "__salt__": {}, ++ "__utils__": {}, ++ "__opts__": {"cachedir": ""}, ++ "__pillar__": salt.loader.context.NamedLoaderContext( ++ "__pillar__", loader_context, {} ++ ), ++ } ++ } + + @patch("os.path.isdir") + def test_exist(self, isdir): +-- +2.34.1 + + diff --git a/switch-firewalld-state-to-use-change_interface.patch b/switch-firewalld-state-to-use-change_interface.patch index d76ff18..6e7a03b 100644 --- a/switch-firewalld-state-to-use-change_interface.patch +++ b/switch-firewalld-state-to-use-change_interface.patch @@ -1,4 +1,4 @@ -From 995f67741e591b60fcecff87d4d97099ca82bafc Mon Sep 17 00:00:00 2001 +From c3e73e4a9e0c81b5dca198fac5c49a1ae91c7111 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 18 Jan 2022 17:12:04 +0100 Subject: [PATCH] Switch firewalld state to use change_interface @@ -67,6 +67,6 @@ index cc6eaba5c3..534b9dd62d 100644 ret["comment"] = "Error: {}".format(err) return ret -- -2.37.3 +2.34.1 diff --git a/temporary-fix-extend-the-whitelist-of-allowed-comman.patch b/temporary-fix-extend-the-whitelist-of-allowed-comman.patch index f817e6b..73c85d5 100644 --- a/temporary-fix-extend-the-whitelist-of-allowed-comman.patch +++ b/temporary-fix-extend-the-whitelist-of-allowed-comman.patch @@ -1,4 +1,4 @@ -From e4374250234ab74bce0ef86d95cb30e873f0af3c Mon Sep 17 00:00:00 2001 +From 2bfea35790926e3d4cbecd5f78f695deb8b086ec Mon Sep 17 00:00:00 2001 From: Bo Maryniuk Date: Thu, 24 Jan 2019 18:12:35 +0100 Subject: [PATCH] temporary fix: extend the whitelist of allowed commands @@ -8,7 +8,7 @@ Subject: [PATCH] temporary fix: extend the whitelist of allowed commands 1 file changed, 3 insertions(+) diff --git a/salt/auth/__init__.py b/salt/auth/__init__.py -index afd7ad65fa..0c64755235 100644 +index 6e970173b4..3b73c2ec08 100644 --- a/salt/auth/__init__.py +++ b/salt/auth/__init__.py @@ -12,6 +12,7 @@ so that any external authentication system can be used inside of Salt @@ -29,6 +29,6 @@ index afd7ad65fa..0c64755235 100644 "print_event", "raw", -- -2.37.3 +2.33.0 diff --git a/update-target-fix-for-salt-ssh-to-process-targets-li.patch b/update-target-fix-for-salt-ssh-to-process-targets-li.patch index aa57dd8..d5bfd41 100644 --- a/update-target-fix-for-salt-ssh-to-process-targets-li.patch +++ b/update-target-fix-for-salt-ssh-to-process-targets-li.patch @@ -1,4 +1,4 @@ -From 9e527322ee0f5c7e1f9b7c16f14be9c349cf8e55 Mon Sep 17 00:00:00 2001 +From 74d3d43d09c692ba41138278f34d2a2e2ef83dd8 Mon Sep 17 00:00:00 2001 From: Victor Zhestkov <35733135+vzhestkov@users.noreply.github.com> Date: Fri, 9 Apr 2021 16:01:32 +0300 Subject: [PATCH] Update target fix for salt-ssh to process targets list @@ -14,10 +14,10 @@ Regression fix of salt-ssh on processing targets (#353) 1 file changed, 29 insertions(+), 17 deletions(-) diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py -index ef162d2270..ad2796bc87 100644 +index 564d622ab5..37faa869bc 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py -@@ -338,7 +338,7 @@ class SSH(MultiprocessingStateMixin): +@@ -327,7 +327,7 @@ class SSH: if not self.opts.get("ssh_cli_tgt"): self.opts["ssh_cli_tgt"] = self.opts.get("tgt", "") hostname = self.opts.get("ssh_cli_tgt", "") @@ -26,7 +26,7 @@ index ef162d2270..ad2796bc87 100644 user, hostname = hostname.split("@", 1) else: user = self.opts.get("ssh_user") -@@ -393,7 +393,7 @@ class SSH(MultiprocessingStateMixin): +@@ -378,7 +378,7 @@ class SSH: self.__parsed_rosters[self.ROSTER_UPDATE_FLAG] = False return @@ -35,7 +35,7 @@ index ef162d2270..ad2796bc87 100644 """ Update default flat roster with the passed in information. :return: -@@ -407,8 +407,8 @@ class SSH(MultiprocessingStateMixin): +@@ -392,8 +392,8 @@ class SSH: " host: {hostname}\n user: {user}\n passwd: {passwd}\n".format( s_user=getpass.getuser(), s_time=datetime.datetime.utcnow().isoformat(), @@ -46,7 +46,7 @@ index ef162d2270..ad2796bc87 100644 passwd=self.opts.get("ssh_passwd", ""), ) ) -@@ -425,20 +425,32 @@ class SSH(MultiprocessingStateMixin): +@@ -410,20 +410,32 @@ class SSH: Uptade targets in case hostname was directly passed without the roster. :return: """ @@ -93,6 +93,6 @@ index ef162d2270..ad2796bc87 100644 def get_pubkey(self): """ -- -2.37.3 +2.34.1 diff --git a/use-adler32-algorithm-to-compute-string-checksums.patch b/use-adler32-algorithm-to-compute-string-checksums.patch index a0e57a5..546e45f 100644 --- a/use-adler32-algorithm-to-compute-string-checksums.patch +++ b/use-adler32-algorithm-to-compute-string-checksums.patch @@ -1,4 +1,4 @@ -From a2f8b54cd56eb83552eccf87afd9b67f6a4a6f4c Mon Sep 17 00:00:00 2001 +From 466b188b52b064cbdda6cf3efa73da3861be8307 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 18 Jan 2022 16:36:57 +0100 Subject: [PATCH] Use Adler32 algorithm to compute string checksums @@ -24,10 +24,10 @@ Remove deprecated warning that breaks miniion execution when "server_id_use_crc" 2 files changed, 48 insertions(+), 4 deletions(-) diff --git a/salt/config/__init__.py b/salt/config/__init__.py -index a37f7eebcc..7cdee12c4d 100644 +index 97a7fce2f0..2c42290598 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py -@@ -964,6 +964,9 @@ VALID_OPTS = immutabletypes.freeze( +@@ -957,6 +957,9 @@ VALID_OPTS = immutabletypes.freeze( # The port to be used when checking if a master is connected to a # minion "remote_minions_port": int, @@ -37,7 +37,7 @@ index a37f7eebcc..7cdee12c4d 100644 } ) -@@ -1267,6 +1270,7 @@ DEFAULT_MINION_OPTS = immutabletypes.freeze( +@@ -1260,6 +1263,7 @@ DEFAULT_MINION_OPTS = immutabletypes.freeze( "disabled_requisites": [], "reactor_niceness": None, "fips_mode": False, @@ -46,7 +46,7 @@ index a37f7eebcc..7cdee12c4d 100644 ) diff --git a/salt/grains/core.py b/salt/grains/core.py -index b55ab4e472..23d8b8ea42 100644 +index 1077e64a11..dd0bd346b2 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -21,6 +21,7 @@ import subprocess @@ -57,7 +57,7 @@ index b55ab4e472..23d8b8ea42 100644 from errno import EACCES, EPERM import distro -@@ -3015,6 +3016,36 @@ def _hw_data(osdata): +@@ -3017,6 +3018,36 @@ def _hw_data(osdata): return grains @@ -94,7 +94,7 @@ index b55ab4e472..23d8b8ea42 100644 def get_server_id(): """ Provides an integer based on the FQDN of a machine. -@@ -3025,10 +3056,19 @@ def get_server_id(): +@@ -3027,10 +3058,19 @@ def get_server_id(): # server_id if salt.utils.platform.is_proxy(): @@ -119,6 +119,6 @@ index b55ab4e472..23d8b8ea42 100644 def get_master(): -- -2.37.3 +2.34.1 diff --git a/use-rlock-to-avoid-deadlocks-in-salt-ssh.patch b/use-rlock-to-avoid-deadlocks-in-salt-ssh.patch deleted file mode 100644 index fe104ae..0000000 --- a/use-rlock-to-avoid-deadlocks-in-salt-ssh.patch +++ /dev/null @@ -1,27 +0,0 @@ -From c6be36eeea49ee0d0641da272087305f79c32c99 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Pablo=20Su=C3=A1rez=20Hern=C3=A1ndez?= - -Date: Wed, 4 Jan 2023 13:11:50 +0000 -Subject: [PATCH] Use RLock to avoid deadlocks in salt-ssh - ---- - salt/loader/__init__.py | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/salt/loader/__init__.py b/salt/loader/__init__.py -index bbe4269839..b41cc64b8e 100644 ---- a/salt/loader/__init__.py -+++ b/salt/loader/__init__.py -@@ -82,7 +82,7 @@ SALT_INTERNAL_LOADERS_PATHS = ( - str(SALT_BASE_PATH / "wheel"), - ) - --LOAD_LOCK = threading.Lock() -+LOAD_LOCK = threading.RLock() - - - def LazyLoader(*args, **kwargs): --- -2.37.3 - - diff --git a/use-salt-bundle-in-dockermod.patch b/use-salt-bundle-in-dockermod.patch index a5547e2..ad73b16 100644 --- a/use-salt-bundle-in-dockermod.patch +++ b/use-salt-bundle-in-dockermod.patch @@ -1,4 +1,4 @@ -From cd03f33b3ba1ebf267825d29b68d4e88e6a0021a Mon Sep 17 00:00:00 2001 +From ed53e3cbd62352b8d2af4d4b36c03e40981263bb Mon Sep 17 00:00:00 2001 From: Victor Zhestkov Date: Mon, 27 Jun 2022 17:59:24 +0300 Subject: [PATCH] Use Salt Bundle in dockermod @@ -12,7 +12,7 @@ Subject: [PATCH] Use Salt Bundle in dockermod 2 files changed, 241 insertions(+), 34 deletions(-) diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py -index d8581586ca..ab5c2ac609 100644 +index fdded88dbb..e6b81e7f09 100644 --- a/salt/modules/dockermod.py +++ b/salt/modules/dockermod.py @@ -201,14 +201,19 @@ import copy @@ -35,7 +35,7 @@ index d8581586ca..ab5c2ac609 100644 import time import uuid -@@ -6693,6 +6698,111 @@ def _compile_state(sls_opts, mods=None): +@@ -6682,6 +6687,111 @@ def _compile_state(sls_opts, mods=None): return st_.state.compile_high_data(high_data) @@ -147,7 +147,7 @@ index d8581586ca..ab5c2ac609 100644 def call(name, function, *args, **kwargs): """ Executes a Salt function inside a running container -@@ -6728,47 +6838,68 @@ def call(name, function, *args, **kwargs): +@@ -6717,47 +6827,68 @@ def call(name, function, *args, **kwargs): if function is None: raise CommandExecutionError("Missing function parameter") @@ -370,6 +370,6 @@ index 47fe5d55e6..19c7f450d7 100644 + + assert {"retcode": 0, "comment": "container cmd"} == ret -- -2.37.3 +2.36.1 diff --git a/v3004.tar.gz b/v3004.tar.gz new file mode 100644 index 0000000..2129d86 --- /dev/null +++ b/v3004.tar.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fad2322cfef1a28351ef2cb645fcf7441502864326b63bb20d54d9bde97cf565 +size 17761294 diff --git a/v3005.1.tar.gz b/v3005.1.tar.gz deleted file mode 100644 index 3242028..0000000 --- a/v3005.1.tar.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:344032370bda5edd6242732587111b0a2fee378fcda2d9a1cfff7e191f9ac89e -size 18016427 diff --git a/wipe-notify_socket-from-env-in-cmdmod-bsc-1193357-30.patch b/wipe-notify_socket-from-env-in-cmdmod-bsc-1193357-30.patch new file mode 100644 index 0000000..d730978 --- /dev/null +++ b/wipe-notify_socket-from-env-in-cmdmod-bsc-1193357-30.patch @@ -0,0 +1,84 @@ +From 4da285c7b898645f8ffd0d0797df60ba107747e3 Mon Sep 17 00:00:00 2001 +From: Victor Zhestkov +Date: Fri, 28 Jan 2022 16:40:09 +0300 +Subject: [PATCH] Wipe NOTIFY_SOCKET from env in cmdmod (bsc#1193357) - + 3004 (#473) + +* Remove NOTIFY_SOCKET env variable from cmd.run calls + +* Add test for NOTIFY_SOCKET env variable wiping +--- + salt/modules/cmdmod.py | 3 ++ + tests/pytests/unit/modules/test_cmdmod.py | 41 +++++++++++++++++++++++ + 2 files changed, 44 insertions(+) + +diff --git a/salt/modules/cmdmod.py b/salt/modules/cmdmod.py +index 70889da07c..61b328b13b 100644 +--- a/salt/modules/cmdmod.py ++++ b/salt/modules/cmdmod.py +@@ -612,6 +612,9 @@ def _run( + if prepend_path: + run_env["PATH"] = ":".join((prepend_path, run_env["PATH"])) + ++ if "NOTIFY_SOCKET" not in env: ++ run_env.pop("NOTIFY_SOCKET", None) ++ + if python_shell is None: + python_shell = False + +diff --git a/tests/pytests/unit/modules/test_cmdmod.py b/tests/pytests/unit/modules/test_cmdmod.py +index bc1d2818aa..691b89271e 100644 +--- a/tests/pytests/unit/modules/test_cmdmod.py ++++ b/tests/pytests/unit/modules/test_cmdmod.py +@@ -368,6 +368,47 @@ def test_os_environment_remains_intact(): + getpwnam_mock.assert_called_with("foobar") + + ++@pytest.mark.skip_on_windows ++def test_os_environment_do_not_pass_notify_socket(): ++ """ ++ Make sure NOTIFY_SOCKET environment variable is not passed ++ to the command if not explicitly set with env parameter. ++ """ ++ with patch("pwd.getpwnam") as getpwnam_mock: ++ new_env = os.environ.copy() ++ new_env.update({"NOTIFY_SOCKET": "/run/systemd/notify"}) ++ with patch("subprocess.Popen") as popen_mock, patch( ++ "os.environ.copy", return_value=new_env ++ ): ++ popen_mock.return_value = Mock( ++ communicate=lambda *args, **kwags: [b"", None], ++ pid=lambda: 1, ++ retcode=0, ++ ) ++ ++ with patch.dict(cmdmod.__grains__, {"os": "SUSE", "os_family": "Suse"}): ++ if sys.platform.startswith(("freebsd", "openbsd")): ++ shell = "/bin/sh" ++ else: ++ shell = "/bin/bash" ++ ++ cmdmod._run("ls", cwd=tempfile.gettempdir(), shell=shell) ++ ++ assert "NOTIFY_SOCKET" not in popen_mock.call_args_list[0][1]["env"] ++ ++ cmdmod._run( ++ "ls", ++ cwd=tempfile.gettempdir(), ++ shell=shell, ++ env={"NOTIFY_SOCKET": "/run/systemd/notify.new"}, ++ ) ++ ++ assert ( ++ popen_mock.call_args_list[1][1]["env"]["NOTIFY_SOCKET"] ++ == "/run/systemd/notify.new" ++ ) ++ ++ + @pytest.mark.skip_unless_on_darwin + def test_shell_properly_handled_on_macOS(): + """ +-- +2.34.1 + + diff --git a/x509-fixes-111.patch b/x509-fixes-111.patch index 0a6a0c9..56eeac8 100644 --- a/x509-fixes-111.patch +++ b/x509-fixes-111.patch @@ -1,4 +1,4 @@ -From 1b78ceabaf3885e7d90d13a041685e7dda960ac9 Mon Sep 17 00:00:00 2001 +From da47028898edb69290f989d9f99973969d6a8652 Mon Sep 17 00:00:00 2001 From: Alexander Graul Date: Tue, 18 Jan 2022 16:38:17 +0100 Subject: [PATCH] X509 fixes (#111) @@ -44,10 +44,10 @@ We are logging in debug and not in trace mode here. 4 files changed, 121 insertions(+), 54 deletions(-) diff --git a/salt/modules/publish.py b/salt/modules/publish.py -index c2dd62b913..45ee41ef19 100644 +index f9b7e8b168..651119906e 100644 --- a/salt/modules/publish.py +++ b/salt/modules/publish.py -@@ -199,6 +199,8 @@ def _publish( +@@ -196,6 +196,8 @@ def _publish( else: return ret @@ -57,7 +57,7 @@ index c2dd62b913..45ee41ef19 100644 def publish( tgt, fun, arg=None, tgt_type="glob", returner="", timeout=5, via_master=None diff --git a/salt/modules/x509.py b/salt/modules/x509.py -index 2f9cc5cc44..194116a85c 100644 +index 0909bace48..261b794295 100644 --- a/salt/modules/x509.py +++ b/salt/modules/x509.py @@ -30,16 +30,13 @@ from salt.utils.odict import OrderedDict @@ -198,7 +198,7 @@ index 2f9cc5cc44..194116a85c 100644 return signing_policy -@@ -1762,7 +1761,8 @@ def create_csr(path=None, text=False, **kwargs): +@@ -1761,7 +1760,8 @@ def create_csr(path=None, text=False, **kwargs): ) ) @@ -208,7 +208,7 @@ index 2f9cc5cc44..194116a85c 100644 if entry in kwargs: setattr(subject, entry, kwargs[entry]) -@@ -1798,7 +1798,6 @@ def create_csr(path=None, text=False, **kwargs): +@@ -1797,7 +1797,6 @@ def create_csr(path=None, text=False, **kwargs): extstack.push(ext) csr.add_extensions(extstack) @@ -216,7 +216,7 @@ index 2f9cc5cc44..194116a85c 100644 csr.sign( _get_private_key_obj( kwargs["private_key"], passphrase=kwargs["private_key_passphrase"] -@@ -1806,10 +1805,11 @@ def create_csr(path=None, text=False, **kwargs): +@@ -1805,10 +1804,11 @@ def create_csr(path=None, text=False, **kwargs): kwargs["algorithm"], ) @@ -232,7 +232,7 @@ index 2f9cc5cc44..194116a85c 100644 def verify_private_key(private_key, public_key, passphrase=None): -@@ -1834,7 +1834,7 @@ def verify_private_key(private_key, public_key, passphrase=None): +@@ -1833,7 +1833,7 @@ def verify_private_key(private_key, public_key, passphrase=None): salt '*' x509.verify_private_key private_key=/etc/pki/myca.key \\ public_key=/etc/pki/myca.crt """ @@ -241,7 +241,7 @@ index 2f9cc5cc44..194116a85c 100644 def verify_signature( -@@ -1890,7 +1890,10 @@ def verify_crl(crl, cert): +@@ -1889,7 +1889,10 @@ def verify_crl(crl, cert): salt '*' x509.verify_crl crl=/etc/pki/myca.crl cert=/etc/pki/myca.crt """ if not salt.utils.path.which("openssl"): @@ -253,7 +253,7 @@ index 2f9cc5cc44..194116a85c 100644 crltext = _text_or_file(crl) crltext = get_pem_entry(crltext, pem_type="X509 CRL") crltempfile = tempfile.NamedTemporaryFile(delete=True) -@@ -1912,10 +1915,7 @@ def verify_crl(crl, cert): +@@ -1911,10 +1914,7 @@ def verify_crl(crl, cert): crltempfile.close() certtempfile.close() @@ -265,7 +265,7 @@ index 2f9cc5cc44..194116a85c 100644 def expired(certificate): -@@ -1953,8 +1953,9 @@ def expired(certificate): +@@ -1952,8 +1952,9 @@ def expired(certificate): ret["expired"] = True else: ret["expired"] = False @@ -277,7 +277,7 @@ index 2f9cc5cc44..194116a85c 100644 return ret -@@ -1977,6 +1978,7 @@ def will_expire(certificate, days): +@@ -1976,6 +1977,7 @@ def will_expire(certificate, days): salt '*' x509.will_expire "/etc/pki/mycert.crt" days=30 """ @@ -285,7 +285,7 @@ index 2f9cc5cc44..194116a85c 100644 ret = {} if os.path.isfile(certificate): -@@ -1990,14 +1992,11 @@ def will_expire(certificate, days): +@@ -1989,14 +1991,11 @@ def will_expire(certificate, days): _expiration_date = cert.get_not_after().get_datetime() ret["cn"] = _parse_subject(cert.get_subject())["CN"] @@ -433,6 +433,6 @@ index 8f4c433b1a..3105290a2c 100644 @skipIf(not HAS_M2CRYPTO, "Skipping, M2Crypto is unavailable") def test_get_pem_entry(self): -- -2.37.3 +2.34.1 diff --git a/zypperpkg-ignore-retcode-104-for-search-bsc-1176697-.patch b/zypperpkg-ignore-retcode-104-for-search-bsc-1176697-.patch index a8c344a..6984769 100644 --- a/zypperpkg-ignore-retcode-104-for-search-bsc-1176697-.patch +++ b/zypperpkg-ignore-retcode-104-for-search-bsc-1176697-.patch @@ -1,4 +1,4 @@ -From 41e53ad0b03511d65c5d7e6a12e7c460d83b6737 Mon Sep 17 00:00:00 2001 +From 48306a830d37e64b5275f48e25c315e658ee37e6 Mon Sep 17 00:00:00 2001 From: Alberto Planas Date: Mon, 5 Oct 2020 16:24:16 +0200 Subject: [PATCH] zypperpkg: ignore retcode 104 for search() @@ -10,7 +10,7 @@ Subject: [PATCH] zypperpkg: ignore retcode 104 for search() 2 files changed, 90 insertions(+), 27 deletions(-) diff --git a/salt/modules/zypperpkg.py b/salt/modules/zypperpkg.py -index 32e22ce9a8..6b19c65db3 100644 +index c7228bf712..4af29652d9 100644 --- a/salt/modules/zypperpkg.py +++ b/salt/modules/zypperpkg.py @@ -99,6 +99,8 @@ class _Zypper: @@ -51,7 +51,7 @@ index 32e22ce9a8..6b19c65db3 100644 self.__cmd.extend(args) kwargs["output_loglevel"] = "trace" kwargs["python_shell"] = False -@@ -445,9 +455,11 @@ class Wildcard: +@@ -447,9 +457,11 @@ class Wildcard: Get available versions of the package. :return: """ @@ -66,7 +66,7 @@ index 32e22ce9a8..6b19c65db3 100644 if not solvables: raise CommandExecutionError( "No packages found matching '{}'".format(self.name) -@@ -1052,7 +1064,7 @@ def list_repo_pkgs(*args, **kwargs): +@@ -1054,7 +1066,7 @@ def list_repo_pkgs(*args, **kwargs): root = kwargs.get("root") or None for node in ( @@ -75,7 +75,7 @@ index 32e22ce9a8..6b19c65db3 100644 .xml.call("se", "-s", *targets) .getElementsByTagName("solvable") ): -@@ -2439,7 +2451,9 @@ def owner(*paths, **kwargs): +@@ -2431,7 +2443,9 @@ def owner(*paths, **kwargs): def _get_visible_patterns(root=None): """Get all available patterns in the repo that are visible.""" patterns = {} @@ -86,7 +86,7 @@ index 32e22ce9a8..6b19c65db3 100644 for element in search_patterns.getElementsByTagName("solvable"): installed = element.getAttribute("status") == "installed" patterns[element.getAttribute("name")] = { -@@ -2636,7 +2650,7 @@ def search(criteria, refresh=False, **kwargs): +@@ -2628,7 +2642,7 @@ def search(criteria, refresh=False, **kwargs): cmd.append(criteria) solvables = ( @@ -95,7 +95,7 @@ index 32e22ce9a8..6b19c65db3 100644 .nolock.noraise.xml.call(*cmd) .getElementsByTagName("solvable") ) -@@ -2888,7 +2902,7 @@ def _get_patches(installed_only=False, root=None): +@@ -2880,7 +2894,7 @@ def _get_patches(installed_only=False, root=None): """ patches = {} for element in ( @@ -279,6 +279,6 @@ index 47fca906a7..671adc2779 100644 + env={"ZYPP_READONLY_HACK": "1"}, + ) -- -2.37.3 +2.34.1