Matej Cepl 2024-09-30 18:58:36 +00:00 committed by Git OBS Bridge
parent a4d601d4ed
commit b5cfe7992f
8 changed files with 4 additions and 353 deletions

View File

@ -6,9 +6,9 @@ Subject: [PATCH 1/4] bpo-39017 Fix infinite loop in the tarfile module
Add a check for length = 0 in the _proc_pax function to avoid running into an infinite loop
---
Lib/tarfile.py | 2 ++
Lib/test/test_tarfile.py | 8 ++++++++
Lib/test/test_tarfile.py | 5 +++++
Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst | 1 +
3 files changed, 11 insertions(+)
3 files changed, 8 insertions(+)
create mode 100644 Lib/test/recursion.tar
--- a/Lib/tarfile.py
@ -24,17 +24,14 @@ Add a check for length = 0 in the _proc_pax function to avoid running into an in
keyword = keyword.decode("utf8")
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -321,6 +321,14 @@ class CommonReadTest(ReadTest):
@@ -321,6 +321,11 @@ class CommonReadTest(ReadTest):
with self.assertRaisesRegexp(tarfile.ReadError, "unexpected end of data"):
tar.extractfile(t).read()
+ def test_length_zero_header(self):
+ # bpo-39017 (CVE-2019-20907): reading a zero-length header should fail
+ # with an exception
+ with self.assertRaisesRegex(tarfile.ReadError, "file could not be opened successfully"):
+ with tarfile.open(support.findfile('recursion.tar')) as tar:
+ pass
+
+ self.assertRaises(tarfile.ReadError, tarfile.open, test_support.findfile('recursion.tar'))
+
class MiscReadTest(CommonReadTest):

View File

@ -1,313 +0,0 @@
From 540276999a26ee86a9da501bc8d556a51cf1808b Mon Sep 17 00:00:00 2001
From: Seth Michael Larson <seth@python.org>
Date: Wed, 4 Sep 2024 10:41:42 -0500
Subject: [PATCH] [CVE-2024-6232] Remove backtracking when parsing tarfile
headers
* Remove backtracking when parsing tarfile headers
* Rewrite PAX header parsing to be stricter
* Optimize parsing of GNU extended sparse headers v0.0
(cherry picked from commit 34ddb64d088dd7ccc321f6103d23153256caa5d4)
Co-authored-by: Seth Michael Larson <seth@python.org>
Co-authored-by: Kirill Podoprigora <kirill.bast9@mail.ru>
Co-authored-by: Gregory P. Smith <greg@krypto.org>
Fixes: bsc#1230227 (CVE-2024-6232)
Fixes: gh#python/cpython#121285
From-PR: gh#python/cpython!123642
Patch: CVE-2024-6232-ReDOS-backtrack-tarfile.patch
---
Lib/tarfile.py | 169 ++++++++--
Lib/test/test_tarfile.py | 44 ++
Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst | 2
3 files changed, 187 insertions(+), 28 deletions(-)
create mode 100644 Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -918,6 +918,9 @@ class ExFileObject(object):
yield line
#class ExFileObject
+# Header length is digits followed by a space.
+_header_length_prefix_re = re.compile(br"([0-9]{1,20}) ")
+
#------------------
# Exported Classes
#------------------
@@ -1390,32 +1393,94 @@ class TarInfo(object):
# Parse pax header information. A record looks like that:
# "%d %s=%s\n" % (length, keyword, value). length is the size
# of the complete record including the length field itself and
- # the newline. keyword and value are both UTF-8 encoded strings.
- regex = re.compile(r"(\d+) ([^=]+)=", re.U)
+ # the newline.
pos = 0
- while True:
- match = regex.match(buf, pos)
+ encoding = None
+ raw_headers = []
+ while len(buf) > pos and buf[pos] != 0x00:
+ match = _header_length_prefix_re.match(buf, pos)
if not match:
- break
+ raise InvalidHeaderError("invalid header")
+ try:
+ length = int(match.group(1))
+ except ValueError:
+ raise InvalidHeaderError("invalid header")
+ # Headers must be at least 5 bytes, shortest being '5 x=\n'.
+ # Value is allowed to be empty.
+ if length < 5:
+ raise InvalidHeaderError("invalid header")
+ if pos + length > len(buf):
+ raise InvalidHeaderError("invalid header")
- length, keyword = match.groups()
- length = int(length)
- if length == 0:
+ header_value_end_offset = match.start(1) + length - 1 # Last byte of the header
+ keyword_and_value = buf[match.end(1) + 1:header_value_end_offset]
+ raw_keyword, equals, raw_value = keyword_and_value.partition(b"=")
+
+ # Check the framing of the header. The last character must be '\n' (0x0A)
+ if not raw_keyword or equals != b"=" or buf[header_value_end_offset] != 0x0A:
raise InvalidHeaderError("invalid header")
- value = buf[match.end(2) + 1:match.start(1) + length - 1]
+ raw_headers.append((length, raw_keyword, raw_value))
- keyword = keyword.decode("utf8")
- value = value.decode("utf8")
+ # Check if the pax header contains a hdrcharset field. This tells us
+ # the encoding of the path, linkpath, uname and gname fields. Normally,
+ # these fields are UTF-8 encoded but since POSIX.1-2008 tar
+ # implementations are allowed to store them as raw binary strings if
+ # the translation to UTF-8 fails. For the time being, we don't care about
+ # anything other than "BINARY". The only other value that is currently
+ # allowed by the standard is "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
+ # Note that we only follow the initial 'hdrcharset' setting to preserve
+ # the initial behavior of the 'tarfile' module.
+ if raw_keyword == b"hdrcharset" and encoding is None:
+ if raw_value == b"BINARY":
+ encoding = tarfile.encoding
+ else: # This branch ensures only the first 'hdrcharset' header is used.
+ encoding = "utf-8"
- pax_headers[keyword] = value
pos += length
+ # If no explicit hdrcharset is set, we use UTF-8 as a default.
+ if encoding is None:
+ encoding = "utf-8"
+
+ # After parsing the raw headers we can decode them to text.
+ for length, raw_keyword, raw_value in raw_headers:
+ # Normally, we could just use "utf-8" as the encoding and "strict"
+ # as the error handler, but we better not take the risk. For
+ # example, GNU tar <= 1.23 is known to store filenames it cannot
+ # translate to UTF-8 as raw strings (unfortunately without a
+ # hdrcharset=BINARY header).
+ # We first try the strict standard encoding, and if that fails we
+ # fall back on the user's encoding and error handler.
+ keyword = self._decode_pax_field(raw_keyword, "utf-8", "utf-8",
+ tarfile.errors)
+ if keyword in PAX_NAME_FIELDS:
+ value = self._decode_pax_field(raw_value, encoding, tarfile.encoding,
+ tarfile.errors)
+ else:
+ value = self._decode_pax_field(raw_value, "utf-8", "utf-8",
+ tarfile.errors)
+
+ pax_headers[keyword] = value
+
# Fetch the next header.
try:
next = self.fromtarfile(tarfile)
except HeaderError:
raise SubsequentHeaderError("missing or bad subsequent header")
+ # Process GNU sparse information.
+ if "GNU.sparse.map" in pax_headers:
+ # GNU extended sparse format version 0.1.
+ self._proc_gnusparse_01(next, pax_headers)
+
+ elif "GNU.sparse.size" in pax_headers:
+ # GNU extended sparse format version 0.0.
+ self._proc_gnusparse_00(next, raw_headers)
+
+ elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
+ # GNU extended sparse format version 1.0.
+ self._proc_gnusparse_10(next, pax_headers, tarfile)
+
if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
# Patch the TarInfo object with the extended header info.
next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
@@ -1432,29 +1497,79 @@ class TarInfo(object):
return next
- def _apply_pax_info(self, pax_headers, encoding, errors):
- """Replace fields with supplemental information from a previous
- pax extended or global header.
+ def _proc_gnusparse_00(self, next, raw_headers):
+ """Process a GNU tar extended sparse header, version 0.0.
"""
- for keyword, value in pax_headers.iteritems():
- if keyword not in PAX_FIELDS:
- continue
-
- if keyword == "path":
- value = value.rstrip("/")
+ offsets = []
+ numbytes = []
+ for _, keyword, value in raw_headers:
+ if keyword == b"GNU.sparse.offset":
+ try:
+ offsets.append(int(value.decode()))
+ except ValueError:
+ raise InvalidHeaderError("invalid header")
- if keyword in PAX_NUMBER_FIELDS:
+ elif keyword == b"GNU.sparse.numbytes":
try:
- value = PAX_NUMBER_FIELDS[keyword](value)
+ numbytes.append(int(value.decode()))
except ValueError:
- value = 0
- else:
- value = uts(value, encoding, errors)
+ raise InvalidHeaderError("invalid header")
+ next.sparse = list(zip(offsets, numbytes))
+
+ def _proc_gnusparse_01(self, next, pax_headers):
+ """Process a GNU tar extended sparse header, version 0.1.
+ """
+ sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
+ next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+ def _proc_gnusparse_10(self, next, pax_headers, tarfile):
+ """Process a GNU tar extended sparse header, version 1.0.
+ """
+ fields = None
+ sparse = []
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ fields, buf = buf.split(b"\n", 1)
+ fields = int(fields)
+ while len(sparse) < fields * 2:
+ if b"\n" not in buf:
+ buf += tarfile.fileobj.read(BLOCKSIZE)
+ number, buf = buf.split(b"\n", 1)
+ sparse.append(int(number))
+ next.offset_data = tarfile.fileobj.tell()
+ next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+ def _apply_pax_info(self, pax_headers, encoding, errors):
+ """Replace fields with supplemental information from a previous
+ pax extended or global header.
+ """
+ for keyword, value in pax_headers.iteritems():
+ if keyword == "GNU.sparse.name":
+ setattr(self, "path", value)
+ elif keyword == "GNU.sparse.size":
+ setattr(self, "size", int(value))
+ elif keyword == "GNU.sparse.realsize":
+ setattr(self, "size", int(value))
+ elif keyword in PAX_FIELDS:
+ if keyword in PAX_NUMBER_FIELDS:
+ try:
+ value = PAX_NUMBER_FIELDS[keyword](value)
+ except ValueError:
+ value = 0
+ if keyword == "path":
+ value = value.rstrip("/")
setattr(self, keyword, value)
self.pax_headers = pax_headers.copy()
+ def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
+ """Decode a single field from a pax record.
+ """
+ try:
+ return value.decode(encoding, "strict")
+ except UnicodeDecodeError:
+ return value.decode(fallback_encoding, fallback_errors)
+
def _block(self, count):
"""Round up a byte count by BLOCKSIZE and return it,
e.g. _block(834) => 1024.
@@ -1481,7 +1596,7 @@ class TarInfo(object):
def isfifo(self):
return self.type == FIFOTYPE
def issparse(self):
- return self.type == GNUTYPE_SPARSE
+ return self.sparse is not None
def isdev(self):
return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
# class TarInfo
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -324,7 +324,7 @@ class CommonReadTest(ReadTest):
def test_length_zero_header(self):
# bpo-39017 (CVE-2019-20907): reading a zero-length header should fail
# with an exception
- with self.assertRaisesRegex(tarfile.ReadError, "file could not be opened successfully"):
+ with self.assertRaisesRegexp(tarfile.ReadError, "file could not be opened successfully"):
with tarfile.open(support.findfile('recursion.tar')) as tar:
pass
@@ -804,6 +804,48 @@ class WriteTestBase(unittest.TestCase):
self.assertFalse(fobj.closed)
self.assertEqual(data, fobj.getvalue())
+ def test_pax_header_bad_formats(self):
+ # The fields from the pax header have priority over the
+ # TarInfo.
+ pax_header_replacements = (
+ b" foo=bar\n",
+ b"0 \n",
+ b"1 \n",
+ b"2 \n",
+ b"3 =\n",
+ b"4 =a\n",
+ b"1000000 foo=bar\n",
+ b"0 foo=bar\n",
+ b"-12 foo=bar\n",
+ b"000000000000000000000000036 foo=bar\n",
+ )
+ pax_headers = {"foo": "bar"}
+
+ for replacement in pax_header_replacements:
+ with self.subTest(header=replacement):
+ tar = tarfile.open(tmpname, "w", format=tarfile.PAX_FORMAT,
+ encoding="iso8859-1")
+ try:
+ t = tarfile.TarInfo()
+ t.name = "pax" # non-ASCII
+ t.uid = 1
+ t.pax_headers = pax_headers
+ tar.addfile(t)
+ finally:
+ tar.close()
+
+ with open(tmpname, "rb") as f:
+ data = f.read()
+ self.assertIn(b"11 foo=bar\n", data)
+ data = data.replace(b"11 foo=bar\n", replacement)
+
+ with open(tmpname, "wb") as f:
+ f.truncate()
+ f.write(data)
+
+ with self.assertRaisesRegexp(tarfile.ReadError, r"file could not be opened successfully"):
+ tarfile.open(tmpname, encoding="iso8859-1")
+
class WriteTest(WriteTestBase):
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2024-07-02-13-39-20.gh-issue-121285.hrl-yI.rst
@@ -0,0 +1,2 @@
+Remove backtracking from tarfile header parsing for ``hdrcharset``, PAX, and
+GNU sparse headers.

View File

@ -1,10 +1,3 @@
-------------------------------------------------------------------
Wed Sep 18 23:03:19 UTC 2024 - Matej Cepl <mcepl@suse.com>
- Add CVE-2024-6232-ReDOS-backtrack-tarfile.patch prevent
ReDos via excessive backtracking while parsing header values
(bsc#1230227, CVE-2024-6232).
-------------------------------------------------------------------
Mon Jul 15 12:19:43 UTC 2024 - Matej Cepl <mcepl@cepl.eu>

View File

@ -168,9 +168,6 @@ Patch81: CVE-2023-52425-libexpat-2.6.0-remove-failing-tests.patch
# PATCH-FIX-UPSTREAM CVE-2024-0450-zipfile-avoid-quoted-overlap-zipbomb.patch bsc#1221854 mcepl@suse.com
# detecting the vulnerability of the "quoted-overlap" zipbomb (from gh#python/cpython!110016).
Patch82: CVE-2024-0450-zipfile-avoid-quoted-overlap-zipbomb.patch
# PATCH-FIX-UPSTREAM CVE-2024-6232-ReDOS-backtrack-tarfile.patch bsc#1230227 mcepl@suse.com
# preventing ReDos via excessive backtracking while parsing header values in tarfile
Patch83: CVE-2024-6232-ReDOS-backtrack-tarfile.patch
# COMMON-PATCH-END
%define python_version %(echo %{tarversion} | head -c 3)
BuildRequires: automake
@ -328,7 +325,6 @@ other applications.
%patch -P 80 -p1
%patch -P 81 -p1
%patch -P 82 -p1
%patch -P 83 -p1
# For patch 66
cp -v %{SOURCE66} Lib/test/recursion.tar

View File

@ -1,10 +1,3 @@
-------------------------------------------------------------------
Wed Sep 18 23:03:19 UTC 2024 - Matej Cepl <mcepl@suse.com>
- Add CVE-2024-6232-ReDOS-backtrack-tarfile.patch prevent
ReDos via excessive backtracking while parsing header values
(bsc#1230227, CVE-2024-6232).
-------------------------------------------------------------------
Mon Jul 15 12:19:43 UTC 2024 - Matej Cepl <mcepl@cepl.eu>

View File

@ -164,9 +164,6 @@ Patch81: CVE-2023-52425-libexpat-2.6.0-remove-failing-tests.patch
# PATCH-FIX-UPSTREAM CVE-2024-0450-zipfile-avoid-quoted-overlap-zipbomb.patch bsc#1221854 mcepl@suse.com
# detecting the vulnerability of the "quoted-overlap" zipbomb (from gh#python/cpython!110016).
Patch82: CVE-2024-0450-zipfile-avoid-quoted-overlap-zipbomb.patch
# PATCH-FIX-UPSTREAM CVE-2024-6232-ReDOS-backtrack-tarfile.patch bsc#1230227 mcepl@suse.com
# preventing ReDos via excessive backtracking while parsing header values in tarfile
Patch83: CVE-2024-6232-ReDOS-backtrack-tarfile.patch
# COMMON-PATCH-END
Provides: pyth_doc = %{version}
Provides: pyth_ps = %{version}
@ -258,7 +255,6 @@ Python, and Macintosh Module Reference in PDF format.
%patch -P 80 -p1
%patch -P 81 -p1
%patch -P 82 -p1
%patch -P 83 -p1
# For patch 66
cp -v %{SOURCE66} Lib/test/recursion.tar

View File

@ -1,10 +1,3 @@
-------------------------------------------------------------------
Wed Sep 18 23:03:19 UTC 2024 - Matej Cepl <mcepl@suse.com>
- Add CVE-2024-6232-ReDOS-backtrack-tarfile.patch prevent
ReDos via excessive backtracking while parsing header values
(bsc#1230227, CVE-2024-6232).
-------------------------------------------------------------------
Mon Jul 15 12:19:43 UTC 2024 - Matej Cepl <mcepl@cepl.eu>

View File

@ -164,9 +164,6 @@ Patch81: CVE-2023-52425-libexpat-2.6.0-remove-failing-tests.patch
# PATCH-FIX-UPSTREAM CVE-2024-0450-zipfile-avoid-quoted-overlap-zipbomb.patch bsc#1221854 mcepl@suse.com
# detecting the vulnerability of the "quoted-overlap" zipbomb (from gh#python/cpython!110016).
Patch82: CVE-2024-0450-zipfile-avoid-quoted-overlap-zipbomb.patch
# PATCH-FIX-UPSTREAM CVE-2024-6232-ReDOS-backtrack-tarfile.patch bsc#1230227 mcepl@suse.com
# preventing ReDos via excessive backtracking while parsing header values in tarfile
Patch83: CVE-2024-6232-ReDOS-backtrack-tarfile.patch
# COMMON-PATCH-END
BuildRequires: automake
BuildRequires: db-devel
@ -378,7 +375,6 @@ that rely on earlier non-verification behavior.
%patch -P 80 -p1
%patch -P 81 -p1
%patch -P 82 -p1
%patch -P 83 -p1
# For patch 66
cp -v %{SOURCE66} Lib/test/recursion.tar