7
0

3 Commits

Author SHA256 Message Date
a0f24b2e44 remove saltbundlepy-deb from include-deb 2026-01-14 15:54:55 +01:00
a52c5a84f5 Drop saltbundlepy-apt dependency 2026-01-14 15:51:16 +01:00
aa70523380 Synchronize latest patches 2026-01-14 15:27:12 +01:00
8 changed files with 3886 additions and 6 deletions

View File

@@ -1 +1 @@
955e9f940864b24137e666102b0046941abf7013
720580e5796703426c70d97b2db1487a3d504349

View File

@@ -0,0 +1,232 @@
From 324c7740438fd0bbcde1e0b6be70c92007c022ac Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 14 Jan 2026 14:08:48 +0100
Subject: [PATCH] Fixes for security issues (CVE-2025-13836,
CVE-2025-67725, CVE-2025-67726) (#744)
* Fixes for security issues (CVE-2025-67725)
httputil: Fix quadratic performance of repeated header lines
Previouisly, when many header lines with the same name were found
in an HTTP request or response, repeated string concatenation would
result in quadratic performance. This change does the concatenation
lazily (with a cache) so that repeated headers can be processed
efficiently.
Security: The previous behavior allowed a denial of service attack
via a maliciously crafted HTTP message, but only if the
max_header_size was increased from its default of 64kB.
* Patch tornado for (BDSA-2025-60811, CVE-2025-67726)
httputil: Fix quadratic behavior in _parseparam
Prior to this change, _parseparam had O(n^2) behavior when parsing
certain inputs, which could be a DoS vector. This change adapts
logic from the equivalent function in the python standard library
in https://github.com/python/cpython/pull/136072/files
* Set a safe limit to http.client response read (CVE-2025-13836)
https://github.com/saltstack/salt/pull/68611
* Remove duplicated test
---------
Co-authored-by: Twangboy <shane.d.lee@gmail.com>
Co-authored-by: Marek Czernek <marek.czernek@suse.com>
---
salt/ext/tornado/httputil.py | 56 ++++++++++++++++++--------
salt/ext/tornado/test/httputil_test.py | 38 +++++++++++++++++
salt/utils/nxos.py | 3 +-
3 files changed, 79 insertions(+), 18 deletions(-)
diff --git a/salt/ext/tornado/httputil.py b/salt/ext/tornado/httputil.py
index 4866b0c991..78953c5f6b 100644
--- a/salt/ext/tornado/httputil.py
+++ b/salt/ext/tornado/httputil.py
@@ -139,8 +139,8 @@ class HTTPHeaders(MutableMapping):
"""
def __init__(self, *args, **kwargs):
- self._dict = {} # type: typing.Dict[str, str]
self._as_list = {} # type: typing.Dict[str, typing.List[str]]
+ self._combined_cache = {} # type: typing.Dict[str, str]
self._last_key = None
if len(args) == 1 and len(kwargs) == 0 and isinstance(args[0], HTTPHeaders):
# Copy constructor
@@ -158,9 +158,7 @@ class HTTPHeaders(MutableMapping):
norm_name = _normalized_headers[name]
self._last_key = norm_name
if norm_name in self:
- self._dict[norm_name] = (
- native_str(self[norm_name]) + "," + native_str(value)
- )
+ self._combined_cache.pop(norm_name, None)
self._as_list[norm_name].append(value)
else:
self[norm_name] = value
@@ -193,7 +191,7 @@ class HTTPHeaders(MutableMapping):
# continuation of a multi-line header
new_part = " " + line.lstrip(HTTP_WHITESPACE)
self._as_list[self._last_key][-1] += new_part
- self._dict[self._last_key] += new_part
+ self._combined_cache.pop(self._last_key, None)
else:
name, value = line.split(":", 1)
self.add(name, value.strip(HTTP_WHITESPACE))
@@ -216,23 +214,33 @@ class HTTPHeaders(MutableMapping):
def __setitem__(self, name, value):
norm_name = _normalized_headers[name]
- self._dict[norm_name] = value
+ self._combined_cache[norm_name] = value
self._as_list[norm_name] = [value]
+ def __contains__(self, name):
+ # This is an important optimization to avoid the expensive concatenation
+ # in __getitem__ when it's not needed.
+ if not isinstance(name, str):
+ return False
+ return name in self._as_list
+
def __getitem__(self, name):
# type: (str) -> str
- return self._dict[_normalized_headers[name]]
+ header = _normalized_headers[name]
+ if header not in self._combined_cache:
+ self._combined_cache[header] = ",".join(self._as_list[header])
+ return self._combined_cache[header]
def __delitem__(self, name):
norm_name = _normalized_headers[name]
- del self._dict[norm_name]
+ del self._combined_cache[norm_name]
del self._as_list[norm_name]
def __len__(self):
- return len(self._dict)
+ return len(self._as_list)
def __iter__(self):
- return iter(self._dict)
+ return iter(self._as_list)
def copy(self):
# defined in dict but not in MutableMapping.
@@ -894,19 +902,33 @@ def parse_response_start_line(line):
# combinations of semicolons and double quotes.
# It has also been modified to support valueless parameters as seen in
# websocket extension negotiations.
+#
+# _parseparam has been further modified with the logic from
+# https://github.com/python/cpython/pull/136072/files
+# to avoid quadratic behavior when parsing semicolons in quoted strings.
+#
+# TODO: See if we can switch to email.message.Message for this functionality.
+# This is the suggested replacement for the cgi.py module now that cgi has
+# been removed from recent versions of Python. We need to verify that
+# the email module is consistent with our existing behavior (and all relevant
def _parseparam(s):
- while s[:1] == ";":
- s = s[1:]
- end = s.find(";")
- while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
- end = s.find(";", end + 1)
+ start = 0
+ while s.find(";", start) == start:
+ start += 1
+ end = s.find(";", start)
+ ind, diff = start, 0
+ while end > 0:
+ diff += s.count('"', ind, end) - s.count('\\"', ind, end)
+ if diff % 2 == 0:
+ break
+ end, ind = ind, s.find(";", end + 1)
if end < 0:
end = len(s)
- f = s[:end]
+ f = s[start:end]
yield f.strip()
- s = s[end:]
+ start = end
def _parse_header(line):
diff --git a/salt/ext/tornado/test/httputil_test.py b/salt/ext/tornado/test/httputil_test.py
index c613b4e41a..bdbfaa2b6a 100644
--- a/salt/ext/tornado/test/httputil_test.py
+++ b/salt/ext/tornado/test/httputil_test.py
@@ -245,6 +245,30 @@ Foo
self.assertEqual(file["body"], b"Foo")
+ def test_disposition_param_linear_performance(self):
+ # This is a regression test for performance of parsing parameters
+ # to the content-disposition header, specifically for semicolons within
+ # quoted strings.
+ def f(n):
+ start = time.time()
+ message = (
+ b"--1234\r\nContent-Disposition: form-data; "
+ + b'x="'
+ + b";" * n
+ + b'"; '
+ + b'name="files"; filename="a.txt"\r\n\r\nFoo\r\n--1234--\r\n'
+ )
+ args: dict[str, list[bytes]] = {}
+ files: dict[str, list[HTTPFile]] = {}
+ parse_multipart_form_data(b"1234", message, args, files)
+ return time.time() - start
+
+ d1 = f(1_000)
+ d2 = f(10_000)
+ if d2 / d1 > 20:
+ self.fail(f"Disposition param parsing is not linear: {d1=} vs {d2=}")
+
+
class HTTPHeadersTest(unittest.TestCase):
def test_multi_line(self):
# Lines beginning with whitespace are appended to the previous line
@@ -367,6 +391,20 @@ Foo: even
headers2 = HTTPHeaders.parse(str(headers))
self.assertEquals(headers, headers2)
+ def test_linear_performance(self):
+ def f(n):
+ start = time.time()
+ headers = HTTPHeaders()
+ for i in range(n):
+ headers.add("X-Foo", "bar")
+ return time.time() - start
+
+ # This runs under 50ms on my laptop as of 2025-12-09.
+ d1 = f(10000)
+ d2 = f(100000)
+ if d2 / d1 > 20:
+ # d2 should be about 10x d1 but allow a wide margin for variability.
+ self.fail("HTTPHeaders.add() does not scale linearly: %s vs %s" % (d1, d2))
class FormatTimestampTest(unittest.TestCase):
# Make sure that all the input types are supported.
diff --git a/salt/utils/nxos.py b/salt/utils/nxos.py
index 2572a76267..654290155e 100644
--- a/salt/utils/nxos.py
+++ b/salt/utils/nxos.py
@@ -212,7 +212,8 @@ class NxapiClient:
body = response
if self.nxargs["connect_over_uds"]:
- body = json.loads(response.read().decode("utf-8"))
+ max_safe_read = 10 * 1024 * 1024
+ body = json.loads(response.read(max_safe_read).decode("utf-8"))
# Proceed with caution. The JSON may not be complete.
# Don't just return body['ins_api']['outputs']['output'] directly.
--
2.52.0

View File

@@ -40,7 +40,6 @@ saltbundlepy-requests.deb$
saltbundlepy-setuptools.deb$
saltbundlepy-six.deb$
saltbundlepy-urllib3.deb$
saltbundlepy-apt.deb$
saltbundlepy-docker.deb$
saltbundlepy-docker-pycreds.deb$

View File

@@ -0,0 +1,173 @@
From 7cbb68f36824161743f4cc60d8920e2cea039e5e Mon Sep 17 00:00:00 2001
From: Marek Czernek <marek.czernek@suse.com>
Date: Fri, 9 Jan 2026 16:49:19 +0100
Subject: [PATCH] Simplify utils.json.find_json function
The previous implementation computed all combinations of potential JSON
documents and tried to `json.loads()`them. That resumted in num({) *
num(}) tries, which could take hours on large inputs.
The approach implemented with this change simplifies the work we do: we
only look for opening '{' and '[' characters, and try to parse the rest
of input string with JSONDecoder.raw_decode. This method ignores
extraneous data at the end and is faster than doing it ourselves in
Python.
Co-authored-by: Alexander Graul <agraul@suse.com>
---
changelog/68258.fixed.md | 1 +
salt/utils/json.py | 80 ++++++---------------------
tests/pytests/unit/utils/test_json.py | 5 --
tests/unit/utils/test_json.py | 12 ++++
4 files changed, 31 insertions(+), 67 deletions(-)
create mode 100644 changelog/68258.fixed.md
diff --git a/changelog/68258.fixed.md b/changelog/68258.fixed.md
new file mode 100644
index 0000000000..a9afeccef7
--- /dev/null
+++ b/changelog/68258.fixed.md
@@ -0,0 +1 @@
+Simplied and sped up `utils.json.find_json` function
diff --git a/salt/utils/json.py b/salt/utils/json.py
index 26cb38cdbe..1605e75f9f 100644
--- a/salt/utils/json.py
+++ b/salt/utils/json.py
@@ -2,7 +2,7 @@
Functions to work with JSON
"""
-
+import contextlib
import json
import logging
@@ -25,69 +25,25 @@ def __split(raw):
return raw.splitlines()
-def find_json(raw):
- """
- Pass in a raw string and load the json when it starts. This allows for a
- string to start with garbage and end with json but be cleanly loaded
- """
- ret = {}
- lines = __split(raw)
- lengths = list(map(len, lines))
- starts = []
- ends = []
-
- # Search for possible starts end ends of the json fragments
- for ind, _ in enumerate(lines):
- line = lines[ind].lstrip()
- line = line[0] if line else line
- if line == "{" or line == "[":
- starts.append((ind, line))
- if line == "}" or line == "]":
- ends.append((ind, line))
-
- # List all the possible pairs of starts and ends,
- # and fill the length of each block to sort by size after
- starts_ends = []
- for start, start_br in starts:
- for end, end_br in reversed(ends):
- if end > start and (
- (start_br == "{" and end_br == "}")
- or (start_br == "[" and end_br == "]")
- ):
- starts_ends.append((start, end, sum(lengths[start : end + 1])))
-
- # Iterate through all the possible pairs starting from the largest
- starts_ends.sort(key=lambda x: (x[2], x[1] - x[0], x[0]), reverse=True)
- for start, end, _ in starts_ends:
- working = "\n".join(lines[start : end + 1])
- try:
- ret = json.loads(working)
- return ret
- except ValueError:
- pass
- # Try filtering non-JSON text right after the last closing curly brace
- end_str = lines[end].lstrip()[0]
- working = "\n".join(lines[start : end]) + end_str
- try:
- ret = json.loads(working)
- return ret
- except ValueError:
- continue
+def find_json(s: str):
+ """Pass in a string and load JSON within it.
- # Fall back to old implementation for backward compatibility
- # excpecting json after the text
- for ind, _ in enumerate(lines):
- working = "\n".join(lines[ind:])
- try:
- ret = json.loads(working)
- except ValueError:
- continue
- if ret:
- return ret
+ The string may contain non-JSON text before and after the JSON document.
- if not ret:
- # Not json, raise an error
- raise ValueError
+ Raises ValueError if no valid JSON was found.
+ """
+ decoder = json.JSONDecoder()
+
+ # We look for the beginning of JSON objects / arrays and let raw_decode() handle
+ # extraneous data at the end.
+ for idx, char in enumerate(s):
+ if char == "{" or char == "[":
+ # JSONDecodeErrors are expected on stray '{'/'[' in the non-JSON part
+ with contextlib.suppress(json.JSONDecodeError):
+ data, _ = decoder.raw_decode(s[idx:])
+ return data
+
+ raise ValueError
def import_json():
diff --git a/tests/pytests/unit/utils/test_json.py b/tests/pytests/unit/utils/test_json.py
index 72b1023003..f7aed28b42 100644
--- a/tests/pytests/unit/utils/test_json.py
+++ b/tests/pytests/unit/utils/test_json.py
@@ -107,11 +107,6 @@ def test_find_json():
ret = salt.utils.json.find_json(garbage_around_json)
assert ret == expected_ret
- # Now pre-pend small json and re-test
- small_json_pre_json = f"{test_small_json}{test_sample_json}"
- ret = salt.utils.json.find_json(small_json_pre_json)
- assert ret == expected_ret
-
# Now post-pend small json and re-test
small_json_post_json = f"{test_sample_json}{test_small_json}"
ret = salt.utils.json.find_json(small_json_post_json)
diff --git a/tests/unit/utils/test_json.py b/tests/unit/utils/test_json.py
index 5ea409a705..f5dcc1f72d 100644
--- a/tests/unit/utils/test_json.py
+++ b/tests/unit/utils/test_json.py
@@ -49,6 +49,18 @@ class JSONTestCase(TestCase):
)
)
+ def test_find_json_unbalanced_brace_in_string(self):
+ test_sample_json = '{"title": "I like curly braces like this one:{"}'
+ expected_ret = {"title": "I like curly braces like this one:{"}
+ ret = salt.utils.json.find_json(test_sample_json)
+ self.assertDictEqual(ret, expected_ret)
+
+ def test_find_json_unbalanced_square_bracket_in_string(self):
+ test_sample_json = '{"title": "I like square brackets like this one:["}'
+ expected_ret = {"title": "I like square brackets like this one:["}
+ ret = salt.utils.json.find_json(test_sample_json)
+ self.assertDictEqual(ret, expected_ret)
+
def test_find_json(self):
test_sample_json = """
{
--
2.52.0

View File

@@ -0,0 +1,91 @@
From c4542e59844bce3a65726564fa364170c1fe7b8c Mon Sep 17 00:00:00 2001
From: Victor Zhestkov <vzhestkov@suse.com>
Date: Wed, 14 Jan 2026 14:12:44 +0100
Subject: [PATCH] Speedup wheel key.finger call (bsc#1240532) (#713)
* Reduce the number of os.path.basename calls with key.finger
* Simplify and speedup salt.key.Key.name_match
* Avoid not needed printing while calling wheel from master
* Populate missing parts for clear_load
* Remove redundant events to be fired
---
salt/key.py | 20 ++++++++++----------
salt/master.py | 11 ++++++++---
2 files changed, 18 insertions(+), 13 deletions(-)
diff --git a/salt/key.py b/salt/key.py
index b15b80eca3..8cd248bb8c 100644
--- a/salt/key.py
+++ b/salt/key.py
@@ -491,16 +491,15 @@ class Key:
ret = {}
if "," in match and isinstance(match, str):
match = match.split(",")
+ if not isinstance(match, list):
+ match = [match]
for status, keys in matches.items():
+ if match == ["*"] and keys:
+ ret[status] = keys
+ continue
for key in salt.utils.data.sorted_ignorecase(keys):
- if isinstance(match, list):
- for match_item in match:
- if fnmatch.fnmatch(key, match_item):
- if status not in ret:
- ret[status] = []
- ret[status].append(key)
- else:
- if fnmatch.fnmatch(key, match):
+ for match_item in match:
+ if fnmatch.fnmatch(key, match_item):
if status not in ret:
ret[status] = []
ret[status].append(key)
@@ -543,12 +542,13 @@ class Key:
for dir_ in key_dirs:
if dir_ is None:
continue
- ret[os.path.basename(dir_)] = []
+ base_dir = os.path.basename(dir_)
+ ret[base_dir] = []
try:
for fn_ in salt.utils.data.sorted_ignorecase(os.listdir(dir_)):
if not fn_.startswith("."):
if os.path.isfile(os.path.join(dir_, fn_)):
- ret[os.path.basename(dir_)].append(
+ ret[base_dir].append(
salt.utils.stringutils.to_unicode(fn_)
)
except OSError:
diff --git a/salt/master.py b/salt/master.py
index 09ce7d36a7..b9f009a028 100644
--- a/salt/master.py
+++ b/salt/master.py
@@ -2093,12 +2093,17 @@ class ClearFuncs(TransportMethods):
"tag": tag,
"user": username,
}
-
- self.event.fire_event(data, tagify([jid, "new"], "wheel"))
+ clear_load.update(
+ {
+ "__jid__": jid,
+ "__tag__": tag,
+ "__user__": username,
+ "print_event": clear_load.get("print_event", False),
+ }
+ )
ret = self.wheel_.call_func(fun, full_return=True, **clear_load)
data["return"] = ret["return"]
data["success"] = ret["success"]
- self.event.fire_event(data, tagify([jid, "ret"], "wheel"))
return {"tag": tag, "data": data}
except Exception as exc: # pylint: disable=broad-except
log.error("Exception occurred while introspecting %s: %s", fun, exc)
--
2.52.0

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +1,21 @@
-------------------------------------------------------------------
Wed Jan 14 14:25:41 UTC 2026 - Marek Czernek <marek.czernek@suse.com>
- Use internal deb classes instead of external aptsource lib
* Drop dependency on saltbundlepy-apt
- Speed up wheel key.finger call (bsc#1240532)
- Add security patches (bsc#1254903,bsc#1254905,bsc#1254904)
- Simplify and speed up utils.find_json function (bsc#1246130)
- Added:
* use-internal-salt.utils.pkg.deb-classes-instead-of-a.patch
* speedup-wheel-key.finger-call-bsc-1240532-713.patch
* fixes-for-security-issues-cve-2025-13836-cve-2025-67.patch
* simplify-utils.json.find_json-function.patch
- Modified:
* include-deb
-------------------------------------------------------------------
Thu Jan 8 08:39:12 UTC 2026 - Marek Czernek <marek.czernek@suse.com>

View File

@@ -565,6 +565,17 @@ Patch187: fix-tls-and-x509-modules-for-older-cryptography-modu.patch
Patch188: backport-3006.17-security-fixes-739.patch
# PATCH-FIX_OPENSUSE: https://github.com/openSUSE/salt/pull/742
Patch189: extend-fails-to-warnings-until-2027-742.patch
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/68253
Patch190: simplify-utils.json.find_json-function.patch
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/68595
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/68611
# PATCH-FIX_UPSTREAM: https://github.com/tornadoweb/tornado/pull/3553
# PATCH-FIX_UPSTREAM: https://github.com/tornadoweb/tornado/commit/771472cfdaeebc0d89a9cc46e249f8891a6b29cd
Patch191: fixes-for-security-issues-cve-2025-13836-cve-2025-67.patch
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/68251
Patch192: speedup-wheel-key.finger-call-bsc-1240532-713.patch
# PATCH-FIX_UPSTREAM: https://github.com/saltstack/salt/pull/67956
Patch193: use-internal-salt.utils.pkg.deb-classes-instead-of-a.patch
### IMPORTANT: The line below is used as a snippet marker. Do not touch it.
@@ -632,10 +643,6 @@ BuildRequires: python
Requires(post): policycoreutils
%endif
%if 0%{?debian} || 0%{?raspbian} || 0%{?ubuntu}
BuildRequires: saltbundlepy-apt
%endif
# --- [venvjail - BuildRequires] END ---
# --- [venvjail - Requires] START ---
# --- [venvjail - Requires] END ---